Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions backend/internal/service/openai_codex_transform.go
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,10 @@ func applyCodexOAuthTransform(reqBody map[string]any, isCodexCLI bool, isCompact
}
}

if stripUnsupportedOpenAIOAuthServiceTier(reqBody) {
result.Modified = true
}

// 兼容遗留的 functions 和 function_call,转换为 tools 和 tool_choice
if functionsRaw, ok := reqBody["functions"]; ok {
if functions, k := functionsRaw.([]any); k {
Expand Down
16 changes: 16 additions & 0 deletions backend/internal/service/openai_codex_transform_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,22 @@ func TestApplyCodexOAuthTransform_ExplicitStoreTrueForcedFalse(t *testing.T) {
require.False(t, store)
}

func TestApplyCodexOAuthTransform_StripsUnsupportedFlexServiceTier(t *testing.T) {
reqBody := map[string]any{
"model": "gpt-5.2",
"service_tier": "flex",
"input": []any{
map[string]any{"type": "message", "role": "user", "content": "hi"},
},
}

result := applyCodexOAuthTransform(reqBody, false, false)

_, hasServiceTier := reqBody["service_tier"]
require.False(t, hasServiceTier)
require.True(t, result.Modified)
}

func TestApplyCodexOAuthTransform_CompactForcesNonStreaming(t *testing.T) {
reqBody := map[string]any{
"model": "gpt-5.1-codex",
Expand Down
84 changes: 84 additions & 0 deletions backend/internal/service/openai_compat_model_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,48 @@ func TestForwardAsAnthropic_NormalizesRoutingAndEffortForGpt54XHigh(t *testing.T
t.Logf("response body: %s", rec.Body.String())
}

func TestForwardAsAnthropic_OAuthStripsFlexServiceTier(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)

rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
body := []byte(`{"model":"gpt-5.4","max_tokens":16,"messages":[{"role":"user","content":"hello"}],"service_tier":"flex","stream":false}`)
c.Request = httptest.NewRequest(http.MethodPost, "/v1/messages", bytes.NewReader(body))
c.Request.Header.Set("Content-Type", "application/json")

upstreamBody := strings.Join([]string{
`data: {"type":"response.completed","response":{"id":"resp_1","object":"response","model":"gpt-5.4","status":"completed","output":[{"type":"message","id":"msg_1","role":"assistant","status":"completed","content":[{"type":"output_text","text":"ok"}]}],"usage":{"input_tokens":5,"output_tokens":2,"total_tokens":7}}}`,
"",
"data: [DONE]",
"",
}, "\n")
upstream := &httpUpstreamRecorder{resp: &http.Response{
StatusCode: http.StatusOK,
Header: http.Header{"Content-Type": []string{"text/event-stream"}, "x-request-id": []string{"rid_flex_anthropic"}},
Body: io.NopCloser(strings.NewReader(upstreamBody)),
}}

svc := &OpenAIGatewayService{httpUpstream: upstream}
account := &Account{
ID: 1,
Name: "openai-oauth",
Platform: PlatformOpenAI,
Type: AccountTypeOAuth,
Concurrency: 1,
Credentials: map[string]any{
"access_token": "oauth-token",
"chatgpt_account_id": "chatgpt-acc",
},
}

result, err := svc.ForwardAsAnthropic(context.Background(), c, account, body, "", "gpt-5.1")
require.NoError(t, err)
require.NotNil(t, result)
require.Nil(t, result.ServiceTier)
require.False(t, gjson.GetBytes(upstream.lastBody, "service_tier").Exists())
}

func TestForwardAsAnthropic_ForcedCodexInstructionsTemplatePrependsRenderedInstructions(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)
Expand Down Expand Up @@ -228,3 +270,45 @@ func TestForwardAsAnthropic_ForcedCodexInstructionsTemplateUsesCachedTemplateCon
require.NotNil(t, result)
require.Equal(t, "cached-prefix\n\nclient-system", gjson.GetBytes(upstream.lastBody, "instructions").String())
}

func TestForwardAsChatCompletions_OAuthStripsFlexServiceTier(t *testing.T) {
t.Parallel()
gin.SetMode(gin.TestMode)

rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
body := []byte(`{"model":"gpt-5.4","messages":[{"role":"user","content":"hello"}],"service_tier":"flex","stream":false}`)
c.Request = httptest.NewRequest(http.MethodPost, "/v1/chat/completions", bytes.NewReader(body))
c.Request.Header.Set("Content-Type", "application/json")

upstreamBody := strings.Join([]string{
`data: {"type":"response.completed","response":{"id":"resp_1","object":"response","model":"gpt-5.4","status":"completed","output":[{"type":"message","id":"msg_1","role":"assistant","status":"completed","content":[{"type":"output_text","text":"ok"}]}],"usage":{"input_tokens":5,"output_tokens":2,"total_tokens":7}}}`,
"",
"data: [DONE]",
"",
}, "\n")
upstream := &httpUpstreamRecorder{resp: &http.Response{
StatusCode: http.StatusOK,
Header: http.Header{"Content-Type": []string{"text/event-stream"}, "x-request-id": []string{"rid_flex_chat"}},
Body: io.NopCloser(strings.NewReader(upstreamBody)),
}}

svc := &OpenAIGatewayService{httpUpstream: upstream}
account := &Account{
ID: 1,
Name: "openai-oauth",
Platform: PlatformOpenAI,
Type: AccountTypeOAuth,
Concurrency: 1,
Credentials: map[string]any{
"access_token": "oauth-token",
"chatgpt_account_id": "chatgpt-acc",
},
}

result, err := svc.ForwardAsChatCompletions(context.Background(), c, account, body, "", "gpt-5.1")
require.NoError(t, err)
require.NotNil(t, result)
require.Nil(t, result.ServiceTier)
require.False(t, gjson.GetBytes(upstream.lastBody, "service_tier").Exists())
}
5 changes: 5 additions & 0 deletions backend/internal/service/openai_gateway_chat_completions.go
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,11 @@ func (s *OpenAIGatewayService) ForwardAsChatCompletions(
} else if promptCacheKey != "" {
reqBody["prompt_cache_key"] = promptCacheKey
}
if serviceTier := extractOpenAIServiceTier(reqBody); serviceTier != nil {
responsesReq.ServiceTier = *serviceTier
} else {
responsesReq.ServiceTier = ""
}
responsesBody, err = json.Marshal(reqBody)
if err != nil {
return nil, fmt.Errorf("remarshal after codex transform: %w", err)
Expand Down
5 changes: 5 additions & 0 deletions backend/internal/service/openai_gateway_messages.go
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,11 @@ func (s *OpenAIGatewayService) ForwardAsAnthropic(
} else if promptCacheKey != "" {
reqBody["prompt_cache_key"] = promptCacheKey
}
if serviceTier := extractOpenAIServiceTier(reqBody); serviceTier != nil {
responsesReq.ServiceTier = *serviceTier
} else {
responsesReq.ServiceTier = ""
}
// OAuth codex transform forces stream=true upstream, so always use
// the streaming response handler regardless of what the client asked.
isStream = true
Expand Down
18 changes: 18 additions & 0 deletions backend/internal/service/openai_gateway_service.go
Original file line number Diff line number Diff line change
Expand Up @@ -4883,6 +4883,15 @@ func normalizeOpenAIPassthroughOAuthBody(body []byte, compact bool) ([]byte, boo
}
}

if serviceTier := normalizeOpenAIServiceTier(gjson.GetBytes(normalized, "service_tier").String()); serviceTier != nil && *serviceTier == "flex" {
next, err := sjson.DeleteBytes(normalized, "service_tier")
if err != nil {
return body, false, fmt.Errorf("normalize passthrough body delete service_tier: %w", err)
}
normalized = next
changed = true
}

return normalized, changed, nil
}

Expand Down Expand Up @@ -4936,6 +4945,15 @@ func extractOpenAIServiceTier(reqBody map[string]any) *string {
return normalizeOpenAIServiceTier(raw)
}

func stripUnsupportedOpenAIOAuthServiceTier(reqBody map[string]any) bool {
serviceTier := extractOpenAIServiceTier(reqBody)
if serviceTier == nil || *serviceTier != "flex" {
return false
}
delete(reqBody, "service_tier")
return true
}

func extractOpenAIServiceTierFromBody(body []byte) *string {
if len(body) == 0 {
return nil
Expand Down
50 changes: 50 additions & 0 deletions backend/internal/service/openai_oauth_passthrough_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -880,6 +880,56 @@ func TestOpenAIGatewayService_OAuthPassthrough_StreamingSetsFirstTokenMs(t *test
require.Equal(t, "priority", *result.ServiceTier)
}

func TestOpenAIGatewayService_OAuthPassthrough_StripsFlexServiceTierBeforeUpstream(t *testing.T) {
gin.SetMode(gin.TestMode)

rec := httptest.NewRecorder()
c, _ := gin.CreateTestContext(rec)
c.Request = httptest.NewRequest(http.MethodPost, "/v1/responses", bytes.NewReader(nil))
c.Request.Header.Set("User-Agent", "codex_cli_rs/0.1.0")

originalBody := []byte(`{"model":"gpt-5.2","stream":false,"service_tier":"flex","input":[{"type":"text","text":"hi"}]}`)

upstreamSSE := strings.Join([]string{
`data: {"type":"response.completed","response":{"id":"resp_1","object":"response","model":"gpt-5.2","status":"completed","output":[{"type":"message","id":"msg_1","role":"assistant","status":"completed","content":[{"type":"output_text","text":"ok"}]}],"usage":{"input_tokens":5,"output_tokens":2,"input_tokens_details":{"cached_tokens":0}}}}`,
"",
"data: [DONE]",
"",
}, "\n")
resp := &http.Response{
StatusCode: http.StatusOK,
Header: http.Header{"Content-Type": []string{"text/event-stream"}, "x-request-id": []string{"rid-flex"}},
Body: io.NopCloser(strings.NewReader(upstreamSSE)),
}
upstream := &httpUpstreamRecorder{resp: resp}

svc := &OpenAIGatewayService{
cfg: &config.Config{Gateway: config.GatewayConfig{ForceCodexCLI: false}},
httpUpstream: upstream,
}

account := &Account{
ID: 123,
Name: "acc",
Platform: PlatformOpenAI,
Type: AccountTypeOAuth,
Concurrency: 1,
Credentials: map[string]any{"access_token": "oauth-token", "chatgpt_account_id": "chatgpt-acc"},
Extra: map[string]any{"openai_passthrough": true},
Status: StatusActive,
Schedulable: true,
RateMultiplier: f64p(1),
}

result, err := svc.Forward(context.Background(), c, account, originalBody)
require.NoError(t, err)
require.NotNil(t, result)
require.Nil(t, result.ServiceTier)
require.NotNil(t, upstream.lastReq)
require.Equal(t, chatgptCodexURL, upstream.lastReq.URL.String())
require.False(t, gjson.GetBytes(upstream.lastBody, "service_tier").Exists())
}

func TestOpenAIGatewayService_OAuthPassthrough_StreamClientDisconnectStillCollectsUsage(t *testing.T) {
gin.SetMode(gin.TestMode)

Expand Down