Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in / Register
Toggle navigation
Menu
Open sidebar
陈曦
sub2api
Commits
241023f3
Unverified
Commit
241023f3
authored
Mar 18, 2026
by
Wesley Liddick
Committed by
GitHub
Mar 18, 2026
Browse files
Merge pull request #1097 from Ethan0x0000/pr/upstream-model-tracking
feat(usage): 新增 upstream_model 追踪,支持按模型来源统计与展示
parents
1292c44b
cfaac12a
Changes
42
Hide whitespace changes
Inline
Side-by-side
backend/ent/migrate/schema.go
View file @
241023f3
...
...
@@ -716,6 +716,7 @@ var (
{
Name
:
"id"
,
Type
:
field
.
TypeInt64
,
Increment
:
true
},
{
Name
:
"request_id"
,
Type
:
field
.
TypeString
,
Size
:
64
},
{
Name
:
"model"
,
Type
:
field
.
TypeString
,
Size
:
100
},
{
Name
:
"upstream_model"
,
Type
:
field
.
TypeString
,
Nullable
:
true
,
Size
:
100
},
{
Name
:
"input_tokens"
,
Type
:
field
.
TypeInt
,
Default
:
0
},
{
Name
:
"output_tokens"
,
Type
:
field
.
TypeInt
,
Default
:
0
},
{
Name
:
"cache_creation_tokens"
,
Type
:
field
.
TypeInt
,
Default
:
0
},
...
...
@@ -755,31 +756,31 @@ var (
ForeignKeys
:
[]
*
schema
.
ForeignKey
{
{
Symbol
:
"usage_logs_api_keys_usage_logs"
,
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
2
8
]},
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
2
9
]},
RefColumns
:
[]
*
schema
.
Column
{
APIKeysColumns
[
0
]},
OnDelete
:
schema
.
NoAction
,
},
{
Symbol
:
"usage_logs_accounts_usage_logs"
,
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
29
]},
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
30
]},
RefColumns
:
[]
*
schema
.
Column
{
AccountsColumns
[
0
]},
OnDelete
:
schema
.
NoAction
,
},
{
Symbol
:
"usage_logs_groups_usage_logs"
,
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
3
0
]},
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
3
1
]},
RefColumns
:
[]
*
schema
.
Column
{
GroupsColumns
[
0
]},
OnDelete
:
schema
.
SetNull
,
},
{
Symbol
:
"usage_logs_users_usage_logs"
,
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
3
1
]},
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
3
2
]},
RefColumns
:
[]
*
schema
.
Column
{
UsersColumns
[
0
]},
OnDelete
:
schema
.
NoAction
,
},
{
Symbol
:
"usage_logs_user_subscriptions_usage_logs"
,
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
3
2
]},
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
3
3
]},
RefColumns
:
[]
*
schema
.
Column
{
UserSubscriptionsColumns
[
0
]},
OnDelete
:
schema
.
SetNull
,
},
...
...
@@ -788,32 +789,32 @@ var (
{
Name
:
"usagelog_user_id"
,
Unique
:
false
,
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
3
1
]},
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
3
2
]},
},
{
Name
:
"usagelog_api_key_id"
,
Unique
:
false
,
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
2
8
]},
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
2
9
]},
},
{
Name
:
"usagelog_account_id"
,
Unique
:
false
,
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
29
]},
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
30
]},
},
{
Name
:
"usagelog_group_id"
,
Unique
:
false
,
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
3
0
]},
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
3
1
]},
},
{
Name
:
"usagelog_subscription_id"
,
Unique
:
false
,
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
3
2
]},
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
3
3
]},
},
{
Name
:
"usagelog_created_at"
,
Unique
:
false
,
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
2
7
]},
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
2
8
]},
},
{
Name
:
"usagelog_model"
,
...
...
@@ -828,17 +829,17 @@ var (
{
Name
:
"usagelog_user_id_created_at"
,
Unique
:
false
,
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
3
1
],
UsageLogsColumns
[
2
7
]},
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
3
2
],
UsageLogsColumns
[
2
8
]},
},
{
Name
:
"usagelog_api_key_id_created_at"
,
Unique
:
false
,
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
2
8
],
UsageLogsColumns
[
2
7
]},
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
2
9
],
UsageLogsColumns
[
2
8
]},
},
{
Name
:
"usagelog_group_id_created_at"
,
Unique
:
false
,
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
3
0
],
UsageLogsColumns
[
2
7
]},
Columns
:
[]
*
schema
.
Column
{
UsageLogsColumns
[
3
1
],
UsageLogsColumns
[
2
8
]},
},
},
}
...
...
backend/ent/mutation.go
View file @
241023f3
...
...
@@ -18239,6 +18239,7 @@ type UsageLogMutation struct {
id *int64
request_id *string
model *string
upstream_model *string
input_tokens *int
addinput_tokens *int
output_tokens *int
...
...
@@ -18576,6 +18577,55 @@ func (m *UsageLogMutation) ResetModel() {
m.model = nil
}
// SetUpstreamModel sets the "upstream_model" field.
func (m *UsageLogMutation) SetUpstreamModel(s string) {
m.upstream_model = &s
}
// UpstreamModel returns the value of the "upstream_model" field in the mutation.
func (m *UsageLogMutation) UpstreamModel() (r string, exists bool) {
v := m.upstream_model
if v == nil {
return
}
return *v, true
}
// OldUpstreamModel returns the old "upstream_model" field's value of the UsageLog entity.
// If the UsageLog object wasn't provided to the builder, the object is fetched from the database.
// An error is returned if the mutation operation is not UpdateOne, or the database query fails.
func (m *UsageLogMutation) OldUpstreamModel(ctx context.Context) (v *string, err error) {
if !m.op.Is(OpUpdateOne) {
return v, errors.New("OldUpstreamModel is only allowed on UpdateOne operations")
}
if m.id == nil || m.oldValue == nil {
return v, errors.New("OldUpstreamModel requires an ID field in the mutation")
}
oldValue, err := m.oldValue(ctx)
if err != nil {
return v, fmt.Errorf("querying old value for OldUpstreamModel: %w", err)
}
return oldValue.UpstreamModel, nil
}
// ClearUpstreamModel clears the value of the "upstream_model" field.
func (m *UsageLogMutation) ClearUpstreamModel() {
m.upstream_model = nil
m.clearedFields[usagelog.FieldUpstreamModel] = struct{}{}
}
// UpstreamModelCleared returns if the "upstream_model" field was cleared in this mutation.
func (m *UsageLogMutation) UpstreamModelCleared() bool {
_, ok := m.clearedFields[usagelog.FieldUpstreamModel]
return ok
}
// ResetUpstreamModel resets all changes to the "upstream_model" field.
func (m *UsageLogMutation) ResetUpstreamModel() {
m.upstream_model = nil
delete(m.clearedFields, usagelog.FieldUpstreamModel)
}
// SetGroupID sets the "group_id" field.
func (m *UsageLogMutation) SetGroupID(i int64) {
m.group = &i
...
...
@@ -20197,7 +20247,7 @@ func (m *UsageLogMutation) Type() string {
// order to get all numeric fields that were incremented/decremented, call
// AddedFields().
func (m *UsageLogMutation) Fields() []string {
fields := make([]string, 0, 3
2
)
fields := make([]string, 0, 3
3
)
if m.user != nil {
fields = append(fields, usagelog.FieldUserID)
}
...
...
@@ -20213,6 +20263,9 @@ func (m *UsageLogMutation) Fields() []string {
if m.model != nil {
fields = append(fields, usagelog.FieldModel)
}
if m.upstream_model != nil {
fields = append(fields, usagelog.FieldUpstreamModel)
}
if m.group != nil {
fields = append(fields, usagelog.FieldGroupID)
}
...
...
@@ -20312,6 +20365,8 @@ func (m *UsageLogMutation) Field(name string) (ent.Value, bool) {
return m.RequestID()
case usagelog.FieldModel:
return m.Model()
case usagelog.FieldUpstreamModel:
return m.UpstreamModel()
case usagelog.FieldGroupID:
return m.GroupID()
case usagelog.FieldSubscriptionID:
...
...
@@ -20385,6 +20440,8 @@ func (m *UsageLogMutation) OldField(ctx context.Context, name string) (ent.Value
return m.OldRequestID(ctx)
case usagelog.FieldModel:
return m.OldModel(ctx)
case usagelog.FieldUpstreamModel:
return m.OldUpstreamModel(ctx)
case usagelog.FieldGroupID:
return m.OldGroupID(ctx)
case usagelog.FieldSubscriptionID:
...
...
@@ -20483,6 +20540,13 @@ func (m *UsageLogMutation) SetField(name string, value ent.Value) error {
}
m.SetModel(v)
return nil
case usagelog.FieldUpstreamModel:
v, ok := value.(string)
if !ok {
return fmt.Errorf("unexpected type %T for field %s", value, name)
}
m.SetUpstreamModel(v)
return nil
case usagelog.FieldGroupID:
v, ok := value.(int64)
if !ok {
...
...
@@ -20921,6 +20985,9 @@ func (m *UsageLogMutation) AddField(name string, value ent.Value) error {
// mutation.
func (m *UsageLogMutation) ClearedFields() []string {
var fields []string
if m.FieldCleared(usagelog.FieldUpstreamModel) {
fields = append(fields, usagelog.FieldUpstreamModel)
}
if m.FieldCleared(usagelog.FieldGroupID) {
fields = append(fields, usagelog.FieldGroupID)
}
...
...
@@ -20962,6 +21029,9 @@ func (m *UsageLogMutation) FieldCleared(name string) bool {
// error if the field is not defined in the schema.
func (m *UsageLogMutation) ClearField(name string) error {
switch name {
case usagelog.FieldUpstreamModel:
m.ClearUpstreamModel()
return nil
case usagelog.FieldGroupID:
m.ClearGroupID()
return nil
...
...
@@ -21012,6 +21082,9 @@ func (m *UsageLogMutation) ResetField(name string) error {
case usagelog.FieldModel:
m.ResetModel()
return nil
case usagelog.FieldUpstreamModel:
m.ResetUpstreamModel()
return nil
case usagelog.FieldGroupID:
m.ResetGroupID()
return nil
...
...
backend/ent/runtime/runtime.go
View file @
241023f3
...
...
@@ -821,92 +821,96 @@ func init() {
return
nil
}
}()
// usagelogDescUpstreamModel is the schema descriptor for upstream_model field.
usagelogDescUpstreamModel
:=
usagelogFields
[
5
]
.
Descriptor
()
// usagelog.UpstreamModelValidator is a validator for the "upstream_model" field. It is called by the builders before save.
usagelog
.
UpstreamModelValidator
=
usagelogDescUpstreamModel
.
Validators
[
0
]
.
(
func
(
string
)
error
)
// usagelogDescInputTokens is the schema descriptor for input_tokens field.
usagelogDescInputTokens
:=
usagelogFields
[
7
]
.
Descriptor
()
usagelogDescInputTokens
:=
usagelogFields
[
8
]
.
Descriptor
()
// usagelog.DefaultInputTokens holds the default value on creation for the input_tokens field.
usagelog
.
DefaultInputTokens
=
usagelogDescInputTokens
.
Default
.
(
int
)
// usagelogDescOutputTokens is the schema descriptor for output_tokens field.
usagelogDescOutputTokens
:=
usagelogFields
[
8
]
.
Descriptor
()
usagelogDescOutputTokens
:=
usagelogFields
[
9
]
.
Descriptor
()
// usagelog.DefaultOutputTokens holds the default value on creation for the output_tokens field.
usagelog
.
DefaultOutputTokens
=
usagelogDescOutputTokens
.
Default
.
(
int
)
// usagelogDescCacheCreationTokens is the schema descriptor for cache_creation_tokens field.
usagelogDescCacheCreationTokens
:=
usagelogFields
[
9
]
.
Descriptor
()
usagelogDescCacheCreationTokens
:=
usagelogFields
[
10
]
.
Descriptor
()
// usagelog.DefaultCacheCreationTokens holds the default value on creation for the cache_creation_tokens field.
usagelog
.
DefaultCacheCreationTokens
=
usagelogDescCacheCreationTokens
.
Default
.
(
int
)
// usagelogDescCacheReadTokens is the schema descriptor for cache_read_tokens field.
usagelogDescCacheReadTokens
:=
usagelogFields
[
1
0
]
.
Descriptor
()
usagelogDescCacheReadTokens
:=
usagelogFields
[
1
1
]
.
Descriptor
()
// usagelog.DefaultCacheReadTokens holds the default value on creation for the cache_read_tokens field.
usagelog
.
DefaultCacheReadTokens
=
usagelogDescCacheReadTokens
.
Default
.
(
int
)
// usagelogDescCacheCreation5mTokens is the schema descriptor for cache_creation_5m_tokens field.
usagelogDescCacheCreation5mTokens
:=
usagelogFields
[
1
1
]
.
Descriptor
()
usagelogDescCacheCreation5mTokens
:=
usagelogFields
[
1
2
]
.
Descriptor
()
// usagelog.DefaultCacheCreation5mTokens holds the default value on creation for the cache_creation_5m_tokens field.
usagelog
.
DefaultCacheCreation5mTokens
=
usagelogDescCacheCreation5mTokens
.
Default
.
(
int
)
// usagelogDescCacheCreation1hTokens is the schema descriptor for cache_creation_1h_tokens field.
usagelogDescCacheCreation1hTokens
:=
usagelogFields
[
1
2
]
.
Descriptor
()
usagelogDescCacheCreation1hTokens
:=
usagelogFields
[
1
3
]
.
Descriptor
()
// usagelog.DefaultCacheCreation1hTokens holds the default value on creation for the cache_creation_1h_tokens field.
usagelog
.
DefaultCacheCreation1hTokens
=
usagelogDescCacheCreation1hTokens
.
Default
.
(
int
)
// usagelogDescInputCost is the schema descriptor for input_cost field.
usagelogDescInputCost
:=
usagelogFields
[
1
3
]
.
Descriptor
()
usagelogDescInputCost
:=
usagelogFields
[
1
4
]
.
Descriptor
()
// usagelog.DefaultInputCost holds the default value on creation for the input_cost field.
usagelog
.
DefaultInputCost
=
usagelogDescInputCost
.
Default
.
(
float64
)
// usagelogDescOutputCost is the schema descriptor for output_cost field.
usagelogDescOutputCost
:=
usagelogFields
[
1
4
]
.
Descriptor
()
usagelogDescOutputCost
:=
usagelogFields
[
1
5
]
.
Descriptor
()
// usagelog.DefaultOutputCost holds the default value on creation for the output_cost field.
usagelog
.
DefaultOutputCost
=
usagelogDescOutputCost
.
Default
.
(
float64
)
// usagelogDescCacheCreationCost is the schema descriptor for cache_creation_cost field.
usagelogDescCacheCreationCost
:=
usagelogFields
[
1
5
]
.
Descriptor
()
usagelogDescCacheCreationCost
:=
usagelogFields
[
1
6
]
.
Descriptor
()
// usagelog.DefaultCacheCreationCost holds the default value on creation for the cache_creation_cost field.
usagelog
.
DefaultCacheCreationCost
=
usagelogDescCacheCreationCost
.
Default
.
(
float64
)
// usagelogDescCacheReadCost is the schema descriptor for cache_read_cost field.
usagelogDescCacheReadCost
:=
usagelogFields
[
1
6
]
.
Descriptor
()
usagelogDescCacheReadCost
:=
usagelogFields
[
1
7
]
.
Descriptor
()
// usagelog.DefaultCacheReadCost holds the default value on creation for the cache_read_cost field.
usagelog
.
DefaultCacheReadCost
=
usagelogDescCacheReadCost
.
Default
.
(
float64
)
// usagelogDescTotalCost is the schema descriptor for total_cost field.
usagelogDescTotalCost
:=
usagelogFields
[
1
7
]
.
Descriptor
()
usagelogDescTotalCost
:=
usagelogFields
[
1
8
]
.
Descriptor
()
// usagelog.DefaultTotalCost holds the default value on creation for the total_cost field.
usagelog
.
DefaultTotalCost
=
usagelogDescTotalCost
.
Default
.
(
float64
)
// usagelogDescActualCost is the schema descriptor for actual_cost field.
usagelogDescActualCost
:=
usagelogFields
[
1
8
]
.
Descriptor
()
usagelogDescActualCost
:=
usagelogFields
[
1
9
]
.
Descriptor
()
// usagelog.DefaultActualCost holds the default value on creation for the actual_cost field.
usagelog
.
DefaultActualCost
=
usagelogDescActualCost
.
Default
.
(
float64
)
// usagelogDescRateMultiplier is the schema descriptor for rate_multiplier field.
usagelogDescRateMultiplier
:=
usagelogFields
[
19
]
.
Descriptor
()
usagelogDescRateMultiplier
:=
usagelogFields
[
20
]
.
Descriptor
()
// usagelog.DefaultRateMultiplier holds the default value on creation for the rate_multiplier field.
usagelog
.
DefaultRateMultiplier
=
usagelogDescRateMultiplier
.
Default
.
(
float64
)
// usagelogDescBillingType is the schema descriptor for billing_type field.
usagelogDescBillingType
:=
usagelogFields
[
2
1
]
.
Descriptor
()
usagelogDescBillingType
:=
usagelogFields
[
2
2
]
.
Descriptor
()
// usagelog.DefaultBillingType holds the default value on creation for the billing_type field.
usagelog
.
DefaultBillingType
=
usagelogDescBillingType
.
Default
.
(
int8
)
// usagelogDescStream is the schema descriptor for stream field.
usagelogDescStream
:=
usagelogFields
[
2
2
]
.
Descriptor
()
usagelogDescStream
:=
usagelogFields
[
2
3
]
.
Descriptor
()
// usagelog.DefaultStream holds the default value on creation for the stream field.
usagelog
.
DefaultStream
=
usagelogDescStream
.
Default
.
(
bool
)
// usagelogDescUserAgent is the schema descriptor for user_agent field.
usagelogDescUserAgent
:=
usagelogFields
[
2
5
]
.
Descriptor
()
usagelogDescUserAgent
:=
usagelogFields
[
2
6
]
.
Descriptor
()
// usagelog.UserAgentValidator is a validator for the "user_agent" field. It is called by the builders before save.
usagelog
.
UserAgentValidator
=
usagelogDescUserAgent
.
Validators
[
0
]
.
(
func
(
string
)
error
)
// usagelogDescIPAddress is the schema descriptor for ip_address field.
usagelogDescIPAddress
:=
usagelogFields
[
2
6
]
.
Descriptor
()
usagelogDescIPAddress
:=
usagelogFields
[
2
7
]
.
Descriptor
()
// usagelog.IPAddressValidator is a validator for the "ip_address" field. It is called by the builders before save.
usagelog
.
IPAddressValidator
=
usagelogDescIPAddress
.
Validators
[
0
]
.
(
func
(
string
)
error
)
// usagelogDescImageCount is the schema descriptor for image_count field.
usagelogDescImageCount
:=
usagelogFields
[
2
7
]
.
Descriptor
()
usagelogDescImageCount
:=
usagelogFields
[
2
8
]
.
Descriptor
()
// usagelog.DefaultImageCount holds the default value on creation for the image_count field.
usagelog
.
DefaultImageCount
=
usagelogDescImageCount
.
Default
.
(
int
)
// usagelogDescImageSize is the schema descriptor for image_size field.
usagelogDescImageSize
:=
usagelogFields
[
2
8
]
.
Descriptor
()
usagelogDescImageSize
:=
usagelogFields
[
2
9
]
.
Descriptor
()
// usagelog.ImageSizeValidator is a validator for the "image_size" field. It is called by the builders before save.
usagelog
.
ImageSizeValidator
=
usagelogDescImageSize
.
Validators
[
0
]
.
(
func
(
string
)
error
)
// usagelogDescMediaType is the schema descriptor for media_type field.
usagelogDescMediaType
:=
usagelogFields
[
29
]
.
Descriptor
()
usagelogDescMediaType
:=
usagelogFields
[
30
]
.
Descriptor
()
// usagelog.MediaTypeValidator is a validator for the "media_type" field. It is called by the builders before save.
usagelog
.
MediaTypeValidator
=
usagelogDescMediaType
.
Validators
[
0
]
.
(
func
(
string
)
error
)
// usagelogDescCacheTTLOverridden is the schema descriptor for cache_ttl_overridden field.
usagelogDescCacheTTLOverridden
:=
usagelogFields
[
3
0
]
.
Descriptor
()
usagelogDescCacheTTLOverridden
:=
usagelogFields
[
3
1
]
.
Descriptor
()
// usagelog.DefaultCacheTTLOverridden holds the default value on creation for the cache_ttl_overridden field.
usagelog
.
DefaultCacheTTLOverridden
=
usagelogDescCacheTTLOverridden
.
Default
.
(
bool
)
// usagelogDescCreatedAt is the schema descriptor for created_at field.
usagelogDescCreatedAt
:=
usagelogFields
[
3
1
]
.
Descriptor
()
usagelogDescCreatedAt
:=
usagelogFields
[
3
2
]
.
Descriptor
()
// usagelog.DefaultCreatedAt holds the default value on creation for the created_at field.
usagelog
.
DefaultCreatedAt
=
usagelogDescCreatedAt
.
Default
.
(
func
()
time
.
Time
)
userMixin
:=
schema
.
User
{}
.
Mixin
()
...
...
backend/ent/schema/usage_log.go
View file @
241023f3
...
...
@@ -41,6 +41,12 @@ func (UsageLog) Fields() []ent.Field {
field
.
String
(
"model"
)
.
MaxLen
(
100
)
.
NotEmpty
(),
// UpstreamModel stores the actual upstream model name when model mapping
// is applied. NULL means no mapping — the requested model was used as-is.
field
.
String
(
"upstream_model"
)
.
MaxLen
(
100
)
.
Optional
()
.
Nillable
(),
field
.
Int64
(
"group_id"
)
.
Optional
()
.
Nillable
(),
...
...
backend/ent/usagelog.go
View file @
241023f3
...
...
@@ -32,6 +32,8 @@ type UsageLog struct {
RequestID
string
`json:"request_id,omitempty"`
// Model holds the value of the "model" field.
Model
string
`json:"model,omitempty"`
// UpstreamModel holds the value of the "upstream_model" field.
UpstreamModel
*
string
`json:"upstream_model,omitempty"`
// GroupID holds the value of the "group_id" field.
GroupID
*
int64
`json:"group_id,omitempty"`
// SubscriptionID holds the value of the "subscription_id" field.
...
...
@@ -175,7 +177,7 @@ func (*UsageLog) scanValues(columns []string) ([]any, error) {
values
[
i
]
=
new
(
sql
.
NullFloat64
)
case
usagelog
.
FieldID
,
usagelog
.
FieldUserID
,
usagelog
.
FieldAPIKeyID
,
usagelog
.
FieldAccountID
,
usagelog
.
FieldGroupID
,
usagelog
.
FieldSubscriptionID
,
usagelog
.
FieldInputTokens
,
usagelog
.
FieldOutputTokens
,
usagelog
.
FieldCacheCreationTokens
,
usagelog
.
FieldCacheReadTokens
,
usagelog
.
FieldCacheCreation5mTokens
,
usagelog
.
FieldCacheCreation1hTokens
,
usagelog
.
FieldBillingType
,
usagelog
.
FieldDurationMs
,
usagelog
.
FieldFirstTokenMs
,
usagelog
.
FieldImageCount
:
values
[
i
]
=
new
(
sql
.
NullInt64
)
case
usagelog
.
FieldRequestID
,
usagelog
.
FieldModel
,
usagelog
.
FieldUserAgent
,
usagelog
.
FieldIPAddress
,
usagelog
.
FieldImageSize
,
usagelog
.
FieldMediaType
:
case
usagelog
.
FieldRequestID
,
usagelog
.
FieldModel
,
usagelog
.
FieldUpstreamModel
,
usagelog
.
FieldUserAgent
,
usagelog
.
FieldIPAddress
,
usagelog
.
FieldImageSize
,
usagelog
.
FieldMediaType
:
values
[
i
]
=
new
(
sql
.
NullString
)
case
usagelog
.
FieldCreatedAt
:
values
[
i
]
=
new
(
sql
.
NullTime
)
...
...
@@ -230,6 +232,13 @@ func (_m *UsageLog) assignValues(columns []string, values []any) error {
}
else
if
value
.
Valid
{
_m
.
Model
=
value
.
String
}
case
usagelog
.
FieldUpstreamModel
:
if
value
,
ok
:=
values
[
i
]
.
(
*
sql
.
NullString
);
!
ok
{
return
fmt
.
Errorf
(
"unexpected type %T for field upstream_model"
,
values
[
i
])
}
else
if
value
.
Valid
{
_m
.
UpstreamModel
=
new
(
string
)
*
_m
.
UpstreamModel
=
value
.
String
}
case
usagelog
.
FieldGroupID
:
if
value
,
ok
:=
values
[
i
]
.
(
*
sql
.
NullInt64
);
!
ok
{
return
fmt
.
Errorf
(
"unexpected type %T for field group_id"
,
values
[
i
])
...
...
@@ -477,6 +486,11 @@ func (_m *UsageLog) String() string {
builder
.
WriteString
(
"model="
)
builder
.
WriteString
(
_m
.
Model
)
builder
.
WriteString
(
", "
)
if
v
:=
_m
.
UpstreamModel
;
v
!=
nil
{
builder
.
WriteString
(
"upstream_model="
)
builder
.
WriteString
(
*
v
)
}
builder
.
WriteString
(
", "
)
if
v
:=
_m
.
GroupID
;
v
!=
nil
{
builder
.
WriteString
(
"group_id="
)
builder
.
WriteString
(
fmt
.
Sprintf
(
"%v"
,
*
v
))
...
...
backend/ent/usagelog/usagelog.go
View file @
241023f3
...
...
@@ -24,6 +24,8 @@ const (
FieldRequestID
=
"request_id"
// FieldModel holds the string denoting the model field in the database.
FieldModel
=
"model"
// FieldUpstreamModel holds the string denoting the upstream_model field in the database.
FieldUpstreamModel
=
"upstream_model"
// FieldGroupID holds the string denoting the group_id field in the database.
FieldGroupID
=
"group_id"
// FieldSubscriptionID holds the string denoting the subscription_id field in the database.
...
...
@@ -135,6 +137,7 @@ var Columns = []string{
FieldAccountID
,
FieldRequestID
,
FieldModel
,
FieldUpstreamModel
,
FieldGroupID
,
FieldSubscriptionID
,
FieldInputTokens
,
...
...
@@ -179,6 +182,8 @@ var (
RequestIDValidator
func
(
string
)
error
// ModelValidator is a validator for the "model" field. It is called by the builders before save.
ModelValidator
func
(
string
)
error
// UpstreamModelValidator is a validator for the "upstream_model" field. It is called by the builders before save.
UpstreamModelValidator
func
(
string
)
error
// DefaultInputTokens holds the default value on creation for the "input_tokens" field.
DefaultInputTokens
int
// DefaultOutputTokens holds the default value on creation for the "output_tokens" field.
...
...
@@ -258,6 +263,11 @@ func ByModel(opts ...sql.OrderTermOption) OrderOption {
return
sql
.
OrderByField
(
FieldModel
,
opts
...
)
.
ToFunc
()
}
// ByUpstreamModel orders the results by the upstream_model field.
func
ByUpstreamModel
(
opts
...
sql
.
OrderTermOption
)
OrderOption
{
return
sql
.
OrderByField
(
FieldUpstreamModel
,
opts
...
)
.
ToFunc
()
}
// ByGroupID orders the results by the group_id field.
func
ByGroupID
(
opts
...
sql
.
OrderTermOption
)
OrderOption
{
return
sql
.
OrderByField
(
FieldGroupID
,
opts
...
)
.
ToFunc
()
...
...
backend/ent/usagelog/where.go
View file @
241023f3
...
...
@@ -80,6 +80,11 @@ func Model(v string) predicate.UsageLog {
return
predicate
.
UsageLog
(
sql
.
FieldEQ
(
FieldModel
,
v
))
}
// UpstreamModel applies equality check predicate on the "upstream_model" field. It's identical to UpstreamModelEQ.
func
UpstreamModel
(
v
string
)
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldEQ
(
FieldUpstreamModel
,
v
))
}
// GroupID applies equality check predicate on the "group_id" field. It's identical to GroupIDEQ.
func
GroupID
(
v
int64
)
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldEQ
(
FieldGroupID
,
v
))
...
...
@@ -405,6 +410,81 @@ func ModelContainsFold(v string) predicate.UsageLog {
return
predicate
.
UsageLog
(
sql
.
FieldContainsFold
(
FieldModel
,
v
))
}
// UpstreamModelEQ applies the EQ predicate on the "upstream_model" field.
func
UpstreamModelEQ
(
v
string
)
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldEQ
(
FieldUpstreamModel
,
v
))
}
// UpstreamModelNEQ applies the NEQ predicate on the "upstream_model" field.
func
UpstreamModelNEQ
(
v
string
)
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldNEQ
(
FieldUpstreamModel
,
v
))
}
// UpstreamModelIn applies the In predicate on the "upstream_model" field.
func
UpstreamModelIn
(
vs
...
string
)
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldIn
(
FieldUpstreamModel
,
vs
...
))
}
// UpstreamModelNotIn applies the NotIn predicate on the "upstream_model" field.
func
UpstreamModelNotIn
(
vs
...
string
)
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldNotIn
(
FieldUpstreamModel
,
vs
...
))
}
// UpstreamModelGT applies the GT predicate on the "upstream_model" field.
func
UpstreamModelGT
(
v
string
)
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldGT
(
FieldUpstreamModel
,
v
))
}
// UpstreamModelGTE applies the GTE predicate on the "upstream_model" field.
func
UpstreamModelGTE
(
v
string
)
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldGTE
(
FieldUpstreamModel
,
v
))
}
// UpstreamModelLT applies the LT predicate on the "upstream_model" field.
func
UpstreamModelLT
(
v
string
)
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldLT
(
FieldUpstreamModel
,
v
))
}
// UpstreamModelLTE applies the LTE predicate on the "upstream_model" field.
func
UpstreamModelLTE
(
v
string
)
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldLTE
(
FieldUpstreamModel
,
v
))
}
// UpstreamModelContains applies the Contains predicate on the "upstream_model" field.
func
UpstreamModelContains
(
v
string
)
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldContains
(
FieldUpstreamModel
,
v
))
}
// UpstreamModelHasPrefix applies the HasPrefix predicate on the "upstream_model" field.
func
UpstreamModelHasPrefix
(
v
string
)
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldHasPrefix
(
FieldUpstreamModel
,
v
))
}
// UpstreamModelHasSuffix applies the HasSuffix predicate on the "upstream_model" field.
func
UpstreamModelHasSuffix
(
v
string
)
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldHasSuffix
(
FieldUpstreamModel
,
v
))
}
// UpstreamModelIsNil applies the IsNil predicate on the "upstream_model" field.
func
UpstreamModelIsNil
()
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldIsNull
(
FieldUpstreamModel
))
}
// UpstreamModelNotNil applies the NotNil predicate on the "upstream_model" field.
func
UpstreamModelNotNil
()
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldNotNull
(
FieldUpstreamModel
))
}
// UpstreamModelEqualFold applies the EqualFold predicate on the "upstream_model" field.
func
UpstreamModelEqualFold
(
v
string
)
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldEqualFold
(
FieldUpstreamModel
,
v
))
}
// UpstreamModelContainsFold applies the ContainsFold predicate on the "upstream_model" field.
func
UpstreamModelContainsFold
(
v
string
)
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldContainsFold
(
FieldUpstreamModel
,
v
))
}
// GroupIDEQ applies the EQ predicate on the "group_id" field.
func
GroupIDEQ
(
v
int64
)
predicate
.
UsageLog
{
return
predicate
.
UsageLog
(
sql
.
FieldEQ
(
FieldGroupID
,
v
))
...
...
backend/ent/usagelog_create.go
View file @
241023f3
...
...
@@ -57,6 +57,20 @@ func (_c *UsageLogCreate) SetModel(v string) *UsageLogCreate {
return
_c
}
// SetUpstreamModel sets the "upstream_model" field.
func
(
_c
*
UsageLogCreate
)
SetUpstreamModel
(
v
string
)
*
UsageLogCreate
{
_c
.
mutation
.
SetUpstreamModel
(
v
)
return
_c
}
// SetNillableUpstreamModel sets the "upstream_model" field if the given value is not nil.
func
(
_c
*
UsageLogCreate
)
SetNillableUpstreamModel
(
v
*
string
)
*
UsageLogCreate
{
if
v
!=
nil
{
_c
.
SetUpstreamModel
(
*
v
)
}
return
_c
}
// SetGroupID sets the "group_id" field.
func
(
_c
*
UsageLogCreate
)
SetGroupID
(
v
int64
)
*
UsageLogCreate
{
_c
.
mutation
.
SetGroupID
(
v
)
...
...
@@ -596,6 +610,11 @@ func (_c *UsageLogCreate) check() error {
return
&
ValidationError
{
Name
:
"model"
,
err
:
fmt
.
Errorf
(
`ent: validator failed for field "UsageLog.model": %w`
,
err
)}
}
}
if
v
,
ok
:=
_c
.
mutation
.
UpstreamModel
();
ok
{
if
err
:=
usagelog
.
UpstreamModelValidator
(
v
);
err
!=
nil
{
return
&
ValidationError
{
Name
:
"upstream_model"
,
err
:
fmt
.
Errorf
(
`ent: validator failed for field "UsageLog.upstream_model": %w`
,
err
)}
}
}
if
_
,
ok
:=
_c
.
mutation
.
InputTokens
();
!
ok
{
return
&
ValidationError
{
Name
:
"input_tokens"
,
err
:
errors
.
New
(
`ent: missing required field "UsageLog.input_tokens"`
)}
}
...
...
@@ -714,6 +733,10 @@ func (_c *UsageLogCreate) createSpec() (*UsageLog, *sqlgraph.CreateSpec) {
_spec
.
SetField
(
usagelog
.
FieldModel
,
field
.
TypeString
,
value
)
_node
.
Model
=
value
}
if
value
,
ok
:=
_c
.
mutation
.
UpstreamModel
();
ok
{
_spec
.
SetField
(
usagelog
.
FieldUpstreamModel
,
field
.
TypeString
,
value
)
_node
.
UpstreamModel
=
&
value
}
if
value
,
ok
:=
_c
.
mutation
.
InputTokens
();
ok
{
_spec
.
SetField
(
usagelog
.
FieldInputTokens
,
field
.
TypeInt
,
value
)
_node
.
InputTokens
=
value
...
...
@@ -1011,6 +1034,24 @@ func (u *UsageLogUpsert) UpdateModel() *UsageLogUpsert {
return
u
}
// SetUpstreamModel sets the "upstream_model" field.
func
(
u
*
UsageLogUpsert
)
SetUpstreamModel
(
v
string
)
*
UsageLogUpsert
{
u
.
Set
(
usagelog
.
FieldUpstreamModel
,
v
)
return
u
}
// UpdateUpstreamModel sets the "upstream_model" field to the value that was provided on create.
func
(
u
*
UsageLogUpsert
)
UpdateUpstreamModel
()
*
UsageLogUpsert
{
u
.
SetExcluded
(
usagelog
.
FieldUpstreamModel
)
return
u
}
// ClearUpstreamModel clears the value of the "upstream_model" field.
func
(
u
*
UsageLogUpsert
)
ClearUpstreamModel
()
*
UsageLogUpsert
{
u
.
SetNull
(
usagelog
.
FieldUpstreamModel
)
return
u
}
// SetGroupID sets the "group_id" field.
func
(
u
*
UsageLogUpsert
)
SetGroupID
(
v
int64
)
*
UsageLogUpsert
{
u
.
Set
(
usagelog
.
FieldGroupID
,
v
)
...
...
@@ -1600,6 +1641,27 @@ func (u *UsageLogUpsertOne) UpdateModel() *UsageLogUpsertOne {
})
}
// SetUpstreamModel sets the "upstream_model" field.
func
(
u
*
UsageLogUpsertOne
)
SetUpstreamModel
(
v
string
)
*
UsageLogUpsertOne
{
return
u
.
Update
(
func
(
s
*
UsageLogUpsert
)
{
s
.
SetUpstreamModel
(
v
)
})
}
// UpdateUpstreamModel sets the "upstream_model" field to the value that was provided on create.
func
(
u
*
UsageLogUpsertOne
)
UpdateUpstreamModel
()
*
UsageLogUpsertOne
{
return
u
.
Update
(
func
(
s
*
UsageLogUpsert
)
{
s
.
UpdateUpstreamModel
()
})
}
// ClearUpstreamModel clears the value of the "upstream_model" field.
func
(
u
*
UsageLogUpsertOne
)
ClearUpstreamModel
()
*
UsageLogUpsertOne
{
return
u
.
Update
(
func
(
s
*
UsageLogUpsert
)
{
s
.
ClearUpstreamModel
()
})
}
// SetGroupID sets the "group_id" field.
func
(
u
*
UsageLogUpsertOne
)
SetGroupID
(
v
int64
)
*
UsageLogUpsertOne
{
return
u
.
Update
(
func
(
s
*
UsageLogUpsert
)
{
...
...
@@ -2434,6 +2496,27 @@ func (u *UsageLogUpsertBulk) UpdateModel() *UsageLogUpsertBulk {
})
}
// SetUpstreamModel sets the "upstream_model" field.
func
(
u
*
UsageLogUpsertBulk
)
SetUpstreamModel
(
v
string
)
*
UsageLogUpsertBulk
{
return
u
.
Update
(
func
(
s
*
UsageLogUpsert
)
{
s
.
SetUpstreamModel
(
v
)
})
}
// UpdateUpstreamModel sets the "upstream_model" field to the value that was provided on create.
func
(
u
*
UsageLogUpsertBulk
)
UpdateUpstreamModel
()
*
UsageLogUpsertBulk
{
return
u
.
Update
(
func
(
s
*
UsageLogUpsert
)
{
s
.
UpdateUpstreamModel
()
})
}
// ClearUpstreamModel clears the value of the "upstream_model" field.
func
(
u
*
UsageLogUpsertBulk
)
ClearUpstreamModel
()
*
UsageLogUpsertBulk
{
return
u
.
Update
(
func
(
s
*
UsageLogUpsert
)
{
s
.
ClearUpstreamModel
()
})
}
// SetGroupID sets the "group_id" field.
func
(
u
*
UsageLogUpsertBulk
)
SetGroupID
(
v
int64
)
*
UsageLogUpsertBulk
{
return
u
.
Update
(
func
(
s
*
UsageLogUpsert
)
{
...
...
backend/ent/usagelog_update.go
View file @
241023f3
...
...
@@ -102,6 +102,26 @@ func (_u *UsageLogUpdate) SetNillableModel(v *string) *UsageLogUpdate {
return
_u
}
// SetUpstreamModel sets the "upstream_model" field.
func
(
_u
*
UsageLogUpdate
)
SetUpstreamModel
(
v
string
)
*
UsageLogUpdate
{
_u
.
mutation
.
SetUpstreamModel
(
v
)
return
_u
}
// SetNillableUpstreamModel sets the "upstream_model" field if the given value is not nil.
func
(
_u
*
UsageLogUpdate
)
SetNillableUpstreamModel
(
v
*
string
)
*
UsageLogUpdate
{
if
v
!=
nil
{
_u
.
SetUpstreamModel
(
*
v
)
}
return
_u
}
// ClearUpstreamModel clears the value of the "upstream_model" field.
func
(
_u
*
UsageLogUpdate
)
ClearUpstreamModel
()
*
UsageLogUpdate
{
_u
.
mutation
.
ClearUpstreamModel
()
return
_u
}
// SetGroupID sets the "group_id" field.
func
(
_u
*
UsageLogUpdate
)
SetGroupID
(
v
int64
)
*
UsageLogUpdate
{
_u
.
mutation
.
SetGroupID
(
v
)
...
...
@@ -745,6 +765,11 @@ func (_u *UsageLogUpdate) check() error {
return
&
ValidationError
{
Name
:
"model"
,
err
:
fmt
.
Errorf
(
`ent: validator failed for field "UsageLog.model": %w`
,
err
)}
}
}
if
v
,
ok
:=
_u
.
mutation
.
UpstreamModel
();
ok
{
if
err
:=
usagelog
.
UpstreamModelValidator
(
v
);
err
!=
nil
{
return
&
ValidationError
{
Name
:
"upstream_model"
,
err
:
fmt
.
Errorf
(
`ent: validator failed for field "UsageLog.upstream_model": %w`
,
err
)}
}
}
if
v
,
ok
:=
_u
.
mutation
.
UserAgent
();
ok
{
if
err
:=
usagelog
.
UserAgentValidator
(
v
);
err
!=
nil
{
return
&
ValidationError
{
Name
:
"user_agent"
,
err
:
fmt
.
Errorf
(
`ent: validator failed for field "UsageLog.user_agent": %w`
,
err
)}
...
...
@@ -795,6 +820,12 @@ func (_u *UsageLogUpdate) sqlSave(ctx context.Context) (_node int, err error) {
if
value
,
ok
:=
_u
.
mutation
.
Model
();
ok
{
_spec
.
SetField
(
usagelog
.
FieldModel
,
field
.
TypeString
,
value
)
}
if
value
,
ok
:=
_u
.
mutation
.
UpstreamModel
();
ok
{
_spec
.
SetField
(
usagelog
.
FieldUpstreamModel
,
field
.
TypeString
,
value
)
}
if
_u
.
mutation
.
UpstreamModelCleared
()
{
_spec
.
ClearField
(
usagelog
.
FieldUpstreamModel
,
field
.
TypeString
)
}
if
value
,
ok
:=
_u
.
mutation
.
InputTokens
();
ok
{
_spec
.
SetField
(
usagelog
.
FieldInputTokens
,
field
.
TypeInt
,
value
)
}
...
...
@@ -1177,6 +1208,26 @@ func (_u *UsageLogUpdateOne) SetNillableModel(v *string) *UsageLogUpdateOne {
return
_u
}
// SetUpstreamModel sets the "upstream_model" field.
func
(
_u
*
UsageLogUpdateOne
)
SetUpstreamModel
(
v
string
)
*
UsageLogUpdateOne
{
_u
.
mutation
.
SetUpstreamModel
(
v
)
return
_u
}
// SetNillableUpstreamModel sets the "upstream_model" field if the given value is not nil.
func
(
_u
*
UsageLogUpdateOne
)
SetNillableUpstreamModel
(
v
*
string
)
*
UsageLogUpdateOne
{
if
v
!=
nil
{
_u
.
SetUpstreamModel
(
*
v
)
}
return
_u
}
// ClearUpstreamModel clears the value of the "upstream_model" field.
func
(
_u
*
UsageLogUpdateOne
)
ClearUpstreamModel
()
*
UsageLogUpdateOne
{
_u
.
mutation
.
ClearUpstreamModel
()
return
_u
}
// SetGroupID sets the "group_id" field.
func
(
_u
*
UsageLogUpdateOne
)
SetGroupID
(
v
int64
)
*
UsageLogUpdateOne
{
_u
.
mutation
.
SetGroupID
(
v
)
...
...
@@ -1833,6 +1884,11 @@ func (_u *UsageLogUpdateOne) check() error {
return
&
ValidationError
{
Name
:
"model"
,
err
:
fmt
.
Errorf
(
`ent: validator failed for field "UsageLog.model": %w`
,
err
)}
}
}
if
v
,
ok
:=
_u
.
mutation
.
UpstreamModel
();
ok
{
if
err
:=
usagelog
.
UpstreamModelValidator
(
v
);
err
!=
nil
{
return
&
ValidationError
{
Name
:
"upstream_model"
,
err
:
fmt
.
Errorf
(
`ent: validator failed for field "UsageLog.upstream_model": %w`
,
err
)}
}
}
if
v
,
ok
:=
_u
.
mutation
.
UserAgent
();
ok
{
if
err
:=
usagelog
.
UserAgentValidator
(
v
);
err
!=
nil
{
return
&
ValidationError
{
Name
:
"user_agent"
,
err
:
fmt
.
Errorf
(
`ent: validator failed for field "UsageLog.user_agent": %w`
,
err
)}
...
...
@@ -1900,6 +1956,12 @@ func (_u *UsageLogUpdateOne) sqlSave(ctx context.Context) (_node *UsageLog, err
if
value
,
ok
:=
_u
.
mutation
.
Model
();
ok
{
_spec
.
SetField
(
usagelog
.
FieldModel
,
field
.
TypeString
,
value
)
}
if
value
,
ok
:=
_u
.
mutation
.
UpstreamModel
();
ok
{
_spec
.
SetField
(
usagelog
.
FieldUpstreamModel
,
field
.
TypeString
,
value
)
}
if
_u
.
mutation
.
UpstreamModelCleared
()
{
_spec
.
ClearField
(
usagelog
.
FieldUpstreamModel
,
field
.
TypeString
)
}
if
value
,
ok
:=
_u
.
mutation
.
InputTokens
();
ok
{
_spec
.
SetField
(
usagelog
.
FieldInputTokens
,
field
.
TypeInt
,
value
)
}
...
...
backend/go.sum
View file @
241023f3
...
...
@@ -22,8 +22,6 @@ github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwTo
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY=
github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4=
github.com/aws/aws-sdk-go-v2 v1.41.2 h1:LuT2rzqNQsauaGkPK/7813XxcZ3o3yePY0Iy891T2ls=
github.com/aws/aws-sdk-go-v2 v1.41.2/go.mod h1:IvvlAZQXvTXznUPfRVfryiG1fbzE2NGK6m9u39YQ+S4=
github.com/aws/aws-sdk-go-v2 v1.41.3 h1:4kQ/fa22KjDt13QCy1+bYADvdgcxpfH18f0zP542kZA=
github.com/aws/aws-sdk-go-v2 v1.41.3/go.mod h1:mwsPRE8ceUUpiTgF7QmQIJ7lgsKUPQOUl3o72QBrE1o=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.5 h1:zWFmPmgw4sveAYi1mRqG+E/g0461cJ5M4bJ8/nc6d3Q=
...
...
@@ -60,8 +58,6 @@ github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.15 h1:edCcNp9eGIUDUCrzoCu1jWA
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.15/go.mod h1:lyRQKED9xWfgkYC/wmmYfv7iVIM68Z5OQ88ZdcV1QbU=
github.com/aws/aws-sdk-go-v2/service/sts v1.41.7 h1:NITQpgo9A5NrDZ57uOWj+abvXSb83BbyggcUBVksN7c=
github.com/aws/aws-sdk-go-v2/service/sts v1.41.7/go.mod h1:sks5UWBhEuWYDPdwlnRFn1w7xWdH29Jcpe+/PJQefEs=
github.com/aws/smithy-go v1.24.1 h1:VbyeNfmYkWoxMVpGUAbQumkODcYmfMRfZ8yQiH30SK0=
github.com/aws/smithy-go v1.24.1/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0=
github.com/aws/smithy-go v1.24.2 h1:FzA3bu/nt/vDvmnkg+R8Xl46gmzEDam6mZ1hzmwXFng=
github.com/aws/smithy-go v1.24.2/go.mod h1:YE2RhdIuDbA5E5bTdciG9KrW3+TiEONeUWCqxX9i1Fc=
github.com/bdandy/go-errors v1.2.2 h1:WdFv/oukjTJCLa79UfkGmwX7ZxONAihKu4V0mLIs11Q=
...
...
@@ -98,10 +94,6 @@ github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XL
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams=
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
github.com/clipperhouse/stringish v0.1.1 h1:+NSqMOr3GR6k1FdRhhnXrLfztGzuG+VuFDfatpWHKCs=
github.com/clipperhouse/stringish v0.1.1/go.mod h1:v/WhFtE1q0ovMta2+m+UbpZ+2/HEXNWYXQgCt4hdOzA=
github.com/clipperhouse/uax29/v2 v2.5.0 h1:x7T0T4eTHDONxFJsL94uKNKPHrclyFI0lm7+w94cO8U=
github.com/clipperhouse/uax29/v2 v2.5.0/go.mod h1:Wn1g7MK6OoeDT0vL+Q0SQLDz/KpfsVRgg6W7ihQeh4g=
github.com/coder/websocket v1.8.14 h1:9L0p0iKiNOibykf283eHkKUHHrpG7f65OE3BhhO7v9g=
github.com/coder/websocket v1.8.14/go.mod h1:NX3SzP+inril6yawo5CQXx8+fk145lPDC6pumgx0mVg=
github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI=
...
...
@@ -238,8 +230,6 @@ github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovk
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw=
github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs=
github.com/mattn/go-sqlite3 v1.14.17 h1:mCRHCLDUBXgpKAqIKsaAaAsrAlbkeomtRFKXh2L6YIM=
github.com/mattn/go-sqlite3 v1.14.17/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
github.com/mdelapenya/tlscert v0.2.0 h1:7H81W6Z/4weDvZBNOfQte5GpIMo0lGYEeWbkGp5LJHI=
...
...
@@ -273,8 +263,6 @@ github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040=
...
...
@@ -326,8 +314,6 @@ github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8=
github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY=
github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0=
github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ=
...
...
backend/internal/handler/admin/dashboard_handler.go
View file @
241023f3
...
...
@@ -273,6 +273,7 @@ func (h *DashboardHandler) GetModelStats(c *gin.Context) {
// Parse optional filter params
var
userID
,
apiKeyID
,
accountID
,
groupID
int64
modelSource
:=
usagestats
.
ModelSourceRequested
var
requestType
*
int16
var
stream
*
bool
var
billingType
*
int8
...
...
@@ -297,6 +298,13 @@ func (h *DashboardHandler) GetModelStats(c *gin.Context) {
groupID
=
id
}
}
if
rawModelSource
:=
strings
.
TrimSpace
(
c
.
Query
(
"model_source"
));
rawModelSource
!=
""
{
if
!
usagestats
.
IsValidModelSource
(
rawModelSource
)
{
response
.
BadRequest
(
c
,
"Invalid model_source, use requested/upstream/mapping"
)
return
}
modelSource
=
rawModelSource
}
if
requestTypeStr
:=
strings
.
TrimSpace
(
c
.
Query
(
"request_type"
));
requestTypeStr
!=
""
{
parsed
,
err
:=
service
.
ParseUsageRequestType
(
requestTypeStr
)
if
err
!=
nil
{
...
...
@@ -323,7 +331,7 @@ func (h *DashboardHandler) GetModelStats(c *gin.Context) {
}
}
stats
,
hit
,
err
:=
h
.
getModelStatsCached
(
c
.
Request
.
Context
(),
startTime
,
endTime
,
userID
,
apiKeyID
,
accountID
,
groupID
,
requestType
,
stream
,
billingType
)
stats
,
hit
,
err
:=
h
.
getModelStatsCached
(
c
.
Request
.
Context
(),
startTime
,
endTime
,
userID
,
apiKeyID
,
accountID
,
groupID
,
modelSource
,
requestType
,
stream
,
billingType
)
if
err
!=
nil
{
response
.
Error
(
c
,
500
,
"Failed to get model statistics"
)
return
...
...
@@ -619,6 +627,12 @@ func (h *DashboardHandler) GetUserBreakdown(c *gin.Context) {
}
}
dim
.
Model
=
c
.
Query
(
"model"
)
rawModelSource
:=
strings
.
TrimSpace
(
c
.
DefaultQuery
(
"model_source"
,
usagestats
.
ModelSourceRequested
))
if
!
usagestats
.
IsValidModelSource
(
rawModelSource
)
{
response
.
BadRequest
(
c
,
"Invalid model_source, use requested/upstream/mapping"
)
return
}
dim
.
ModelType
=
rawModelSource
dim
.
Endpoint
=
c
.
Query
(
"endpoint"
)
dim
.
EndpointType
=
c
.
DefaultQuery
(
"endpoint_type"
,
"inbound"
)
...
...
backend/internal/handler/admin/dashboard_handler_request_type_test.go
View file @
241023f3
...
...
@@ -149,6 +149,28 @@ func TestDashboardModelStatsInvalidStream(t *testing.T) {
require
.
Equal
(
t
,
http
.
StatusBadRequest
,
rec
.
Code
)
}
func
TestDashboardModelStatsInvalidModelSource
(
t
*
testing
.
T
)
{
repo
:=
&
dashboardUsageRepoCapture
{}
router
:=
newDashboardRequestTypeTestRouter
(
repo
)
req
:=
httptest
.
NewRequest
(
http
.
MethodGet
,
"/admin/dashboard/models?model_source=invalid"
,
nil
)
rec
:=
httptest
.
NewRecorder
()
router
.
ServeHTTP
(
rec
,
req
)
require
.
Equal
(
t
,
http
.
StatusBadRequest
,
rec
.
Code
)
}
func
TestDashboardModelStatsValidModelSource
(
t
*
testing
.
T
)
{
repo
:=
&
dashboardUsageRepoCapture
{}
router
:=
newDashboardRequestTypeTestRouter
(
repo
)
req
:=
httptest
.
NewRequest
(
http
.
MethodGet
,
"/admin/dashboard/models?model_source=upstream"
,
nil
)
rec
:=
httptest
.
NewRecorder
()
router
.
ServeHTTP
(
rec
,
req
)
require
.
Equal
(
t
,
http
.
StatusOK
,
rec
.
Code
)
}
func
TestDashboardUsersRankingLimitAndCache
(
t
*
testing
.
T
)
{
dashboardUsersRankingCache
=
newSnapshotCache
(
5
*
time
.
Minute
)
repo
:=
&
dashboardUsageRepoCapture
{
...
...
backend/internal/handler/admin/dashboard_handler_user_breakdown_test.go
View file @
241023f3
...
...
@@ -73,9 +73,35 @@ func TestGetUserBreakdown_ModelFilter(t *testing.T) {
require
.
Equal
(
t
,
http
.
StatusOK
,
w
.
Code
)
require
.
Equal
(
t
,
"claude-opus-4-6"
,
repo
.
capturedDim
.
Model
)
require
.
Equal
(
t
,
usagestats
.
ModelSourceRequested
,
repo
.
capturedDim
.
ModelType
)
require
.
Equal
(
t
,
int64
(
0
),
repo
.
capturedDim
.
GroupID
)
}
func
TestGetUserBreakdown_ModelSourceFilter
(
t
*
testing
.
T
)
{
repo
:=
&
userBreakdownRepoCapture
{}
router
:=
newUserBreakdownRouter
(
repo
)
req
:=
httptest
.
NewRequest
(
http
.
MethodGet
,
"/admin/dashboard/user-breakdown?start_date=2026-03-01&end_date=2026-03-16&model=claude-opus-4-6&model_source=upstream"
,
nil
)
w
:=
httptest
.
NewRecorder
()
router
.
ServeHTTP
(
w
,
req
)
require
.
Equal
(
t
,
http
.
StatusOK
,
w
.
Code
)
require
.
Equal
(
t
,
usagestats
.
ModelSourceUpstream
,
repo
.
capturedDim
.
ModelType
)
}
func
TestGetUserBreakdown_InvalidModelSource
(
t
*
testing
.
T
)
{
repo
:=
&
userBreakdownRepoCapture
{}
router
:=
newUserBreakdownRouter
(
repo
)
req
:=
httptest
.
NewRequest
(
http
.
MethodGet
,
"/admin/dashboard/user-breakdown?start_date=2026-03-01&end_date=2026-03-16&model_source=foobar"
,
nil
)
w
:=
httptest
.
NewRecorder
()
router
.
ServeHTTP
(
w
,
req
)
require
.
Equal
(
t
,
http
.
StatusBadRequest
,
w
.
Code
)
}
func
TestGetUserBreakdown_EndpointFilter
(
t
*
testing
.
T
)
{
repo
:=
&
userBreakdownRepoCapture
{}
router
:=
newUserBreakdownRouter
(
repo
)
...
...
backend/internal/handler/admin/dashboard_query_cache.go
View file @
241023f3
...
...
@@ -38,6 +38,7 @@ type dashboardModelGroupCacheKey struct {
APIKeyID
int64
`json:"api_key_id"`
AccountID
int64
`json:"account_id"`
GroupID
int64
`json:"group_id"`
ModelSource
string
`json:"model_source,omitempty"`
RequestType
*
int16
`json:"request_type"`
Stream
*
bool
`json:"stream"`
BillingType
*
int8
`json:"billing_type"`
...
...
@@ -111,6 +112,7 @@ func (h *DashboardHandler) getModelStatsCached(
ctx
context
.
Context
,
startTime
,
endTime
time
.
Time
,
userID
,
apiKeyID
,
accountID
,
groupID
int64
,
modelSource
string
,
requestType
*
int16
,
stream
*
bool
,
billingType
*
int8
,
...
...
@@ -122,12 +124,13 @@ func (h *DashboardHandler) getModelStatsCached(
APIKeyID
:
apiKeyID
,
AccountID
:
accountID
,
GroupID
:
groupID
,
ModelSource
:
usagestats
.
NormalizeModelSource
(
modelSource
),
RequestType
:
requestType
,
Stream
:
stream
,
BillingType
:
billingType
,
})
entry
,
hit
,
err
:=
dashboardModelStatsCache
.
GetOrLoad
(
key
,
func
()
(
any
,
error
)
{
return
h
.
dashboardService
.
GetModelStatsWithFilters
(
ctx
,
startTime
,
endTime
,
userID
,
apiKeyID
,
accountID
,
groupID
,
requestType
,
stream
,
billingType
)
return
h
.
dashboardService
.
GetModelStatsWithFilters
BySource
(
ctx
,
startTime
,
endTime
,
userID
,
apiKeyID
,
accountID
,
groupID
,
requestType
,
stream
,
billingType
,
modelSource
)
})
if
err
!=
nil
{
return
nil
,
hit
,
err
...
...
backend/internal/handler/admin/dashboard_snapshot_v2_handler.go
View file @
241023f3
...
...
@@ -200,6 +200,7 @@ func (h *DashboardHandler) buildSnapshotV2Response(
filters
.
APIKeyID
,
filters
.
AccountID
,
filters
.
GroupID
,
usagestats
.
ModelSourceRequested
,
filters
.
RequestType
,
filters
.
Stream
,
filters
.
BillingType
,
...
...
backend/internal/handler/dto/mappers.go
View file @
241023f3
...
...
@@ -523,6 +523,7 @@ func usageLogFromServiceUser(l *service.UsageLog) UsageLog {
AccountID
:
l
.
AccountID
,
RequestID
:
l
.
RequestID
,
Model
:
l
.
Model
,
UpstreamModel
:
l
.
UpstreamModel
,
ServiceTier
:
l
.
ServiceTier
,
ReasoningEffort
:
l
.
ReasoningEffort
,
InboundEndpoint
:
l
.
InboundEndpoint
,
...
...
backend/internal/handler/dto/types.go
View file @
241023f3
...
...
@@ -334,6 +334,9 @@ type UsageLog struct {
AccountID
int64
`json:"account_id"`
RequestID
string
`json:"request_id"`
Model
string
`json:"model"`
// UpstreamModel is the actual model sent to the upstream provider after mapping.
// Omitted when no mapping was applied (requested model was used as-is).
UpstreamModel
*
string
`json:"upstream_model,omitempty"`
// ServiceTier records the OpenAI service tier used for billing, e.g. "priority" / "flex".
ServiceTier
*
string
`json:"service_tier,omitempty"`
// ReasoningEffort is the request's reasoning effort level.
...
...
backend/internal/pkg/usagestats/usage_log_types.go
View file @
241023f3
...
...
@@ -3,6 +3,28 @@ package usagestats
import
"time"
const
(
ModelSourceRequested
=
"requested"
ModelSourceUpstream
=
"upstream"
ModelSourceMapping
=
"mapping"
)
func
IsValidModelSource
(
source
string
)
bool
{
switch
source
{
case
ModelSourceRequested
,
ModelSourceUpstream
,
ModelSourceMapping
:
return
true
default
:
return
false
}
}
func
NormalizeModelSource
(
source
string
)
string
{
if
IsValidModelSource
(
source
)
{
return
source
}
return
ModelSourceRequested
}
// DashboardStats 仪表盘统计
type
DashboardStats
struct
{
// 用户统计
...
...
@@ -150,6 +172,7 @@ type UserBreakdownItem struct {
type
UserBreakdownDimension
struct
{
GroupID
int64
// filter by group_id (>0 to enable)
Model
string
// filter by model name (non-empty to enable)
ModelType
string
// "requested", "upstream", or "mapping"
Endpoint
string
// filter by endpoint value (non-empty to enable)
EndpointType
string
// "inbound", "upstream", or "path"
}
...
...
backend/internal/pkg/usagestats/usage_log_types_test.go
0 → 100644
View file @
241023f3
package
usagestats
import
"testing"
func
TestIsValidModelSource
(
t
*
testing
.
T
)
{
tests
:=
[]
struct
{
name
string
source
string
want
bool
}{
{
name
:
"requested"
,
source
:
ModelSourceRequested
,
want
:
true
},
{
name
:
"upstream"
,
source
:
ModelSourceUpstream
,
want
:
true
},
{
name
:
"mapping"
,
source
:
ModelSourceMapping
,
want
:
true
},
{
name
:
"invalid"
,
source
:
"foobar"
,
want
:
false
},
{
name
:
"empty"
,
source
:
""
,
want
:
false
},
}
for
_
,
tc
:=
range
tests
{
t
.
Run
(
tc
.
name
,
func
(
t
*
testing
.
T
)
{
if
got
:=
IsValidModelSource
(
tc
.
source
);
got
!=
tc
.
want
{
t
.
Fatalf
(
"IsValidModelSource(%q)=%v want %v"
,
tc
.
source
,
got
,
tc
.
want
)
}
})
}
}
func
TestNormalizeModelSource
(
t
*
testing
.
T
)
{
tests
:=
[]
struct
{
name
string
source
string
want
string
}{
{
name
:
"requested"
,
source
:
ModelSourceRequested
,
want
:
ModelSourceRequested
},
{
name
:
"upstream"
,
source
:
ModelSourceUpstream
,
want
:
ModelSourceUpstream
},
{
name
:
"mapping"
,
source
:
ModelSourceMapping
,
want
:
ModelSourceMapping
},
{
name
:
"invalid falls back"
,
source
:
"foobar"
,
want
:
ModelSourceRequested
},
{
name
:
"empty falls back"
,
source
:
""
,
want
:
ModelSourceRequested
},
}
for
_
,
tc
:=
range
tests
{
t
.
Run
(
tc
.
name
,
func
(
t
*
testing
.
T
)
{
if
got
:=
NormalizeModelSource
(
tc
.
source
);
got
!=
tc
.
want
{
t
.
Fatalf
(
"NormalizeModelSource(%q)=%q want %q"
,
tc
.
source
,
got
,
tc
.
want
)
}
})
}
}
backend/internal/repository/usage_log_repo.go
View file @
241023f3
...
...
@@ -28,7 +28,7 @@ import (
gocache
"github.com/patrickmn/go-cache"
)
const
usageLogSelectColumns
=
"id, user_id, api_key_id, account_id, request_id, model, group_id, subscription_id, input_tokens, output_tokens, cache_creation_tokens, cache_read_tokens, cache_creation_5m_tokens, cache_creation_1h_tokens, input_cost, output_cost, cache_creation_cost, cache_read_cost, total_cost, actual_cost, rate_multiplier, account_rate_multiplier, billing_type, request_type, stream, openai_ws_mode, duration_ms, first_token_ms, user_agent, ip_address, image_count, image_size, media_type, service_tier, reasoning_effort, inbound_endpoint, upstream_endpoint, cache_ttl_overridden, created_at"
const
usageLogSelectColumns
=
"id, user_id, api_key_id, account_id, request_id, model,
upstream_model,
group_id, subscription_id, input_tokens, output_tokens, cache_creation_tokens, cache_read_tokens, cache_creation_5m_tokens, cache_creation_1h_tokens, input_cost, output_cost, cache_creation_cost, cache_read_cost, total_cost, actual_cost, rate_multiplier, account_rate_multiplier, billing_type, request_type, stream, openai_ws_mode, duration_ms, first_token_ms, user_agent, ip_address, image_count, image_size, media_type, service_tier, reasoning_effort, inbound_endpoint, upstream_endpoint, cache_ttl_overridden, created_at"
var
usageLogInsertArgTypes
=
[
...
]
string
{
"bigint"
,
...
...
@@ -36,6 +36,7 @@ var usageLogInsertArgTypes = [...]string{
"bigint"
,
"text"
,
"text"
,
"text"
,
"bigint"
,
"bigint"
,
"integer"
,
...
...
@@ -277,6 +278,7 @@ func (r *usageLogRepository) createSingle(ctx context.Context, sqlq sqlExecutor,
account_id,
request_id,
model,
upstream_model,
group_id,
subscription_id,
input_tokens,
...
...
@@ -311,12 +313,12 @@ func (r *usageLogRepository) createSingle(ctx context.Context, sqlq sqlExecutor,
cache_ttl_overridden,
created_at
) VALUES (
$1, $2, $3, $4, $5,
$
6
, $
7
,
$8,
$9, $10, $11,
$1
2
, $1
3
,
$14,
$15, $16, $17, $18, $19,
$20,
$21, $22, $23, $24, $25, $26, $27, $28, $29, $30, $31, $32, $33, $34, $35, $36, $37, $38
$1, $2, $3, $4, $5,
$6,
$
7
, $
8
,
$9, $10, $11,
$12,
$1
3
, $1
4
,
$15, $16, $17, $18, $19,
$20,
$21, $22, $23, $24, $25, $26, $27, $28, $29, $30, $31, $32, $33, $34, $35, $36, $37, $38
, $39
)
ON CONFLICT (request_id, api_key_id) DO NOTHING
RETURNING id, created_at
...
...
@@ -707,6 +709,7 @@ func buildUsageLogBatchInsertQuery(keys []string, preparedByKey map[string]usage
account_id,
request_id,
model,
upstream_model,
group_id,
subscription_id,
input_tokens,
...
...
@@ -742,7 +745,7 @@ func buildUsageLogBatchInsertQuery(keys []string, preparedByKey map[string]usage
created_at
) AS (VALUES `
)
args
:=
make
([]
any
,
0
,
len
(
keys
)
*
3
8
)
args
:=
make
([]
any
,
0
,
len
(
keys
)
*
3
9
)
argPos
:=
1
for
idx
,
key
:=
range
keys
{
if
idx
>
0
{
...
...
@@ -776,6 +779,7 @@ func buildUsageLogBatchInsertQuery(keys []string, preparedByKey map[string]usage
account_id,
request_id,
model,
upstream_model,
group_id,
subscription_id,
input_tokens,
...
...
@@ -816,6 +820,7 @@ func buildUsageLogBatchInsertQuery(keys []string, preparedByKey map[string]usage
account_id,
request_id,
model,
upstream_model,
group_id,
subscription_id,
input_tokens,
...
...
@@ -896,6 +901,7 @@ func buildUsageLogBestEffortInsertQuery(preparedList []usageLogInsertPrepared) (
account_id,
request_id,
model,
upstream_model,
group_id,
subscription_id,
input_tokens,
...
...
@@ -931,7 +937,7 @@ func buildUsageLogBestEffortInsertQuery(preparedList []usageLogInsertPrepared) (
created_at
) AS (VALUES `
)
args
:=
make
([]
any
,
0
,
len
(
preparedList
)
*
3
8
)
args
:=
make
([]
any
,
0
,
len
(
preparedList
)
*
3
9
)
argPos
:=
1
for
idx
,
prepared
:=
range
preparedList
{
if
idx
>
0
{
...
...
@@ -962,6 +968,7 @@ func buildUsageLogBestEffortInsertQuery(preparedList []usageLogInsertPrepared) (
account_id,
request_id,
model,
upstream_model,
group_id,
subscription_id,
input_tokens,
...
...
@@ -1002,6 +1009,7 @@ func buildUsageLogBestEffortInsertQuery(preparedList []usageLogInsertPrepared) (
account_id,
request_id,
model,
upstream_model,
group_id,
subscription_id,
input_tokens,
...
...
@@ -1050,6 +1058,7 @@ func execUsageLogInsertNoResult(ctx context.Context, sqlq sqlExecutor, prepared
account_id,
request_id,
model,
upstream_model,
group_id,
subscription_id,
input_tokens,
...
...
@@ -1084,12 +1093,12 @@ func execUsageLogInsertNoResult(ctx context.Context, sqlq sqlExecutor, prepared
cache_ttl_overridden,
created_at
) VALUES (
$1, $2, $3, $4, $5,
$
6
, $
7
,
$8,
$9, $10, $11,
$1
2
, $1
3
,
$14,
$15, $16, $17, $18, $19,
$20,
$21, $22, $23, $24, $25, $26, $27, $28, $29, $30, $31, $32, $33, $34, $35, $36, $37, $38
$1, $2, $3, $4, $5,
$6,
$
7
, $
8
,
$9, $10, $11,
$12,
$1
3
, $1
4
,
$15, $16, $17, $18, $19,
$20,
$21, $22, $23, $24, $25, $26, $27, $28, $29, $30, $31, $32, $33, $34, $35, $36, $37, $38
, $39
)
ON CONFLICT (request_id, api_key_id) DO NOTHING
`
,
prepared
.
args
...
)
...
...
@@ -1121,6 +1130,7 @@ func prepareUsageLogInsert(log *service.UsageLog) usageLogInsertPrepared {
reasoningEffort
:=
nullString
(
log
.
ReasoningEffort
)
inboundEndpoint
:=
nullString
(
log
.
InboundEndpoint
)
upstreamEndpoint
:=
nullString
(
log
.
UpstreamEndpoint
)
upstreamModel
:=
nullString
(
log
.
UpstreamModel
)
var
requestIDArg
any
if
requestID
!=
""
{
...
...
@@ -1138,6 +1148,7 @@ func prepareUsageLogInsert(log *service.UsageLog) usageLogInsertPrepared {
log
.
AccountID
,
requestIDArg
,
log
.
Model
,
upstreamModel
,
groupID
,
subscriptionID
,
log
.
InputTokens
,
...
...
@@ -2864,15 +2875,26 @@ func (r *usageLogRepository) getUsageTrendFromAggregates(ctx context.Context, st
// GetModelStatsWithFilters returns model statistics with optional filters
func
(
r
*
usageLogRepository
)
GetModelStatsWithFilters
(
ctx
context
.
Context
,
startTime
,
endTime
time
.
Time
,
userID
,
apiKeyID
,
accountID
,
groupID
int64
,
requestType
*
int16
,
stream
*
bool
,
billingType
*
int8
)
(
results
[]
ModelStat
,
err
error
)
{
return
r
.
getModelStatsWithFiltersBySource
(
ctx
,
startTime
,
endTime
,
userID
,
apiKeyID
,
accountID
,
groupID
,
requestType
,
stream
,
billingType
,
usagestats
.
ModelSourceRequested
)
}
// GetModelStatsWithFiltersBySource returns model statistics with optional filters and model source dimension.
// source: requested | upstream | mapping.
func
(
r
*
usageLogRepository
)
GetModelStatsWithFiltersBySource
(
ctx
context
.
Context
,
startTime
,
endTime
time
.
Time
,
userID
,
apiKeyID
,
accountID
,
groupID
int64
,
requestType
*
int16
,
stream
*
bool
,
billingType
*
int8
,
source
string
)
(
results
[]
ModelStat
,
err
error
)
{
return
r
.
getModelStatsWithFiltersBySource
(
ctx
,
startTime
,
endTime
,
userID
,
apiKeyID
,
accountID
,
groupID
,
requestType
,
stream
,
billingType
,
source
)
}
func
(
r
*
usageLogRepository
)
getModelStatsWithFiltersBySource
(
ctx
context
.
Context
,
startTime
,
endTime
time
.
Time
,
userID
,
apiKeyID
,
accountID
,
groupID
int64
,
requestType
*
int16
,
stream
*
bool
,
billingType
*
int8
,
source
string
)
(
results
[]
ModelStat
,
err
error
)
{
actualCostExpr
:=
"COALESCE(SUM(actual_cost), 0) as actual_cost"
// 当仅按 account_id 聚合时,实际费用使用账号倍率(total_cost * account_rate_multiplier)。
if
accountID
>
0
&&
userID
==
0
&&
apiKeyID
==
0
{
actualCostExpr
=
"COALESCE(SUM(total_cost * COALESCE(account_rate_multiplier, 1)), 0) as actual_cost"
}
modelExpr
:=
resolveModelDimensionExpression
(
source
)
query
:=
fmt
.
Sprintf
(
`
SELECT
model,
%s as
model,
COUNT(*) as requests,
COALESCE(SUM(input_tokens), 0) as input_tokens,
COALESCE(SUM(output_tokens), 0) as output_tokens,
...
...
@@ -2883,7 +2905,7 @@ func (r *usageLogRepository) GetModelStatsWithFilters(ctx context.Context, start
%s
FROM usage_logs
WHERE created_at >= $1 AND created_at < $2
`
,
actualCostExpr
)
`
,
modelExpr
,
actualCostExpr
)
args
:=
[]
any
{
startTime
,
endTime
}
if
userID
>
0
{
...
...
@@ -2907,7 +2929,7 @@ func (r *usageLogRepository) GetModelStatsWithFilters(ctx context.Context, start
query
+=
fmt
.
Sprintf
(
" AND billing_type = $%d"
,
len
(
args
)
+
1
)
args
=
append
(
args
,
int16
(
*
billingType
))
}
query
+=
" GROUP BY
model
ORDER BY total_tokens DESC"
query
+=
fmt
.
Sprintf
(
" GROUP BY
%s
ORDER BY total_tokens DESC"
,
modelExpr
)
rows
,
err
:=
r
.
sql
.
QueryContext
(
ctx
,
query
,
args
...
)
if
err
!=
nil
{
...
...
@@ -3021,7 +3043,7 @@ func (r *usageLogRepository) GetUserBreakdownStats(ctx context.Context, startTim
args
=
append
(
args
,
dim
.
GroupID
)
}
if
dim
.
Model
!=
""
{
query
+=
fmt
.
Sprintf
(
" AND
ul.model = $%d"
,
len
(
args
)
+
1
)
query
+=
fmt
.
Sprintf
(
" AND
%s = $%d"
,
resolveModelDimensionExpression
(
dim
.
ModelType
)
,
len
(
args
)
+
1
)
args
=
append
(
args
,
dim
.
Model
)
}
if
dim
.
Endpoint
!=
""
{
...
...
@@ -3102,6 +3124,18 @@ func (r *usageLogRepository) GetAllGroupUsageSummary(ctx context.Context, todayS
return
results
,
nil
}
// resolveModelDimensionExpression maps model source type to a safe SQL expression.
func
resolveModelDimensionExpression
(
modelType
string
)
string
{
switch
usagestats
.
NormalizeModelSource
(
modelType
)
{
case
usagestats
.
ModelSourceUpstream
:
return
"COALESCE(NULLIF(TRIM(upstream_model), ''), model)"
case
usagestats
.
ModelSourceMapping
:
return
"(model || ' -> ' || COALESCE(NULLIF(TRIM(upstream_model), ''), model))"
default
:
return
"model"
}
}
// resolveEndpointColumn maps endpoint type to the corresponding DB column name.
func
resolveEndpointColumn
(
endpointType
string
)
string
{
switch
endpointType
{
...
...
@@ -3854,6 +3888,7 @@ func scanUsageLog(scanner interface{ Scan(...any) error }) (*service.UsageLog, e
accountID
int64
requestID
sql
.
NullString
model
string
upstreamModel
sql
.
NullString
groupID
sql
.
NullInt64
subscriptionID
sql
.
NullInt64
inputTokens
int
...
...
@@ -3896,6 +3931,7 @@ func scanUsageLog(scanner interface{ Scan(...any) error }) (*service.UsageLog, e
&
accountID
,
&
requestID
,
&
model
,
&
upstreamModel
,
&
groupID
,
&
subscriptionID
,
&
inputTokens
,
...
...
@@ -4008,6 +4044,9 @@ func scanUsageLog(scanner interface{ Scan(...any) error }) (*service.UsageLog, e
if
upstreamEndpoint
.
Valid
{
log
.
UpstreamEndpoint
=
&
upstreamEndpoint
.
String
}
if
upstreamModel
.
Valid
{
log
.
UpstreamModel
=
&
upstreamModel
.
String
}
return
log
,
nil
}
...
...
Prev
1
2
3
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment