8294a76bc2
Replace the legacy boolean “DisplayInCurrencyEnabled” with an injected, type-safe
configuration `general_setting.quota_display_type`, and wire it through the
backend and frontend.
Backend
- Add `QuotaDisplayType` to `operation_setting.GeneralSetting` with injected
registration via `config.GlobalConfig.Register("general_setting", ...)`.
Helpers: `IsCurrencyDisplay()`, `IsCNYDisplay()`, `GetQuotaDisplayType()`.
- Expose `quota_display_type` in `/api/status` and keep legacy
`display_in_currency` for backward compatibility.
- Logger: update `LogQuota` and `FormatQuota` to support USD/CNY/TOKENS. When
CNY is selected, convert using `operation_setting.USDExchangeRate`.
- Controllers:
- `billing`: compute subscription/usage amounts based on the selected type
(USD: divide by `QuotaPerUnit`; CNY: USD→CNY; TOKENS: keep raw tokens).
- `topup` / `topup_stripe`: treat inputs as “amount” for USD/CNY and as
token-count for TOKENS; adjust min topup and pay money accordingly.
- `misc`: include `quota_display_type` in status payload.
- Compatibility: in `model/option.UpdateOption`, map updates to
`DisplayInCurrencyEnabled` → `general_setting.quota_display_type`
(true→USD, false→TOKENS). Keep exporting the legacy key in `OptionMap`.
Frontend
- Settings: replace the “display in currency” switch with a Select
(`general_setting.quota_display_type`) offering USD / CNY / Tokens.
Provide fallback mapping from legacy `DisplayInCurrencyEnabled`.
- Persist `quota_display_type` to localStorage (keep `display_in_currency`
for legacy components).
- Rendering helpers: base all quota/price rendering on `quota_display_type`;
use `usd_exchange_rate` for CNY symbol/values.
- Pricing page: default view currency follows site display type (USD/CNY),
while TOKENS mode still allows per-view currency toggling when needed.
Notes
- No database migrations required.
- Legacy clients remain functional via compatibility fields.
348 lines
10 KiB
Go
348 lines
10 KiB
Go
package vertex
|
|
|
|
import (
|
|
"encoding/json"
|
|
"errors"
|
|
"fmt"
|
|
"io"
|
|
"net/http"
|
|
"one-api/common"
|
|
"one-api/dto"
|
|
"one-api/relay/channel"
|
|
"one-api/relay/channel/claude"
|
|
"one-api/relay/channel/gemini"
|
|
"one-api/relay/channel/openai"
|
|
relaycommon "one-api/relay/common"
|
|
"one-api/relay/constant"
|
|
"one-api/setting/model_setting"
|
|
"one-api/types"
|
|
"strings"
|
|
|
|
"github.com/gin-gonic/gin"
|
|
)
|
|
|
|
const (
|
|
RequestModeClaude = 1
|
|
RequestModeGemini = 2
|
|
RequestModeLlama = 3
|
|
)
|
|
|
|
var claudeModelMap = map[string]string{
|
|
"claude-3-sonnet-20240229": "claude-3-sonnet@20240229",
|
|
"claude-3-opus-20240229": "claude-3-opus@20240229",
|
|
"claude-3-haiku-20240307": "claude-3-haiku@20240307",
|
|
"claude-3-5-sonnet-20240620": "claude-3-5-sonnet@20240620",
|
|
"claude-3-5-sonnet-20241022": "claude-3-5-sonnet-v2@20241022",
|
|
"claude-3-7-sonnet-20250219": "claude-3-7-sonnet@20250219",
|
|
"claude-sonnet-4-20250514": "claude-sonnet-4@20250514",
|
|
"claude-opus-4-20250514": "claude-opus-4@20250514",
|
|
"claude-opus-4-1-20250805": "claude-opus-4-1@20250805",
|
|
}
|
|
|
|
const anthropicVersion = "vertex-2023-10-16"
|
|
|
|
type Adaptor struct {
|
|
RequestMode int
|
|
AccountCredentials Credentials
|
|
}
|
|
|
|
func (a *Adaptor) ConvertGeminiRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.GeminiChatRequest) (any, error) {
|
|
geminiAdaptor := gemini.Adaptor{}
|
|
return geminiAdaptor.ConvertGeminiRequest(c, info, request)
|
|
}
|
|
|
|
func (a *Adaptor) ConvertClaudeRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.ClaudeRequest) (any, error) {
|
|
if v, ok := claudeModelMap[info.UpstreamModelName]; ok {
|
|
c.Set("request_model", v)
|
|
} else {
|
|
c.Set("request_model", request.Model)
|
|
}
|
|
vertexClaudeReq := copyRequest(request, anthropicVersion)
|
|
return vertexClaudeReq, nil
|
|
}
|
|
|
|
func (a *Adaptor) ConvertAudioRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.AudioRequest) (io.Reader, error) {
|
|
//TODO implement me
|
|
return nil, errors.New("not implemented")
|
|
}
|
|
|
|
func (a *Adaptor) ConvertImageRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.ImageRequest) (any, error) {
|
|
geminiAdaptor := gemini.Adaptor{}
|
|
return geminiAdaptor.ConvertImageRequest(c, info, request)
|
|
}
|
|
|
|
func (a *Adaptor) Init(info *relaycommon.RelayInfo) {
|
|
if strings.HasPrefix(info.UpstreamModelName, "claude") {
|
|
a.RequestMode = RequestModeClaude
|
|
} else if strings.Contains(info.UpstreamModelName, "llama") {
|
|
a.RequestMode = RequestModeLlama
|
|
} else {
|
|
a.RequestMode = RequestModeGemini
|
|
}
|
|
}
|
|
|
|
func (a *Adaptor) getRequestUrl(info *relaycommon.RelayInfo, modelName, suffix string) (string, error) {
|
|
region := GetModelRegion(info.ApiVersion, info.OriginModelName)
|
|
if info.ChannelOtherSettings.VertexKeyType != dto.VertexKeyTypeAPIKey {
|
|
adc := &Credentials{}
|
|
if err := common.Unmarshal([]byte(info.ApiKey), adc); err != nil {
|
|
return "", fmt.Errorf("failed to decode credentials file: %w", err)
|
|
}
|
|
a.AccountCredentials = *adc
|
|
|
|
if a.RequestMode == RequestModeLlama {
|
|
return fmt.Sprintf(
|
|
"https://%s-aiplatform.googleapis.com/v1beta1/projects/%s/locations/%s/endpoints/openapi/chat/completions",
|
|
region,
|
|
adc.ProjectID,
|
|
region,
|
|
), nil
|
|
}
|
|
|
|
if region == "global" {
|
|
return fmt.Sprintf(
|
|
"https://aiplatform.googleapis.com/v1/projects/%s/locations/global/publishers/google/models/%s:%s",
|
|
adc.ProjectID,
|
|
modelName,
|
|
suffix,
|
|
), nil
|
|
} else {
|
|
return fmt.Sprintf(
|
|
"https://%s-aiplatform.googleapis.com/v1/projects/%s/locations/%s/publishers/google/models/%s:%s",
|
|
region,
|
|
adc.ProjectID,
|
|
region,
|
|
modelName,
|
|
suffix,
|
|
), nil
|
|
}
|
|
} else {
|
|
if region == "global" {
|
|
return fmt.Sprintf(
|
|
"https://aiplatform.googleapis.com/v1/publishers/google/models/%s:%s?key=%s",
|
|
modelName,
|
|
suffix,
|
|
info.ApiKey,
|
|
), nil
|
|
} else {
|
|
return fmt.Sprintf(
|
|
"https://%s-aiplatform.googleapis.com/v1/publishers/google/models/%s:%s?key=%s",
|
|
region,
|
|
modelName,
|
|
suffix,
|
|
info.ApiKey,
|
|
), nil
|
|
}
|
|
}
|
|
}
|
|
|
|
func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
|
|
suffix := ""
|
|
if a.RequestMode == RequestModeGemini {
|
|
if model_setting.GetGeminiSettings().ThinkingAdapterEnabled {
|
|
// 新增逻辑:处理 -thinking-<budget> 格式
|
|
if strings.Contains(info.UpstreamModelName, "-thinking-") {
|
|
parts := strings.Split(info.UpstreamModelName, "-thinking-")
|
|
info.UpstreamModelName = parts[0]
|
|
} else if strings.HasSuffix(info.UpstreamModelName, "-thinking") { // 旧的适配
|
|
info.UpstreamModelName = strings.TrimSuffix(info.UpstreamModelName, "-thinking")
|
|
} else if strings.HasSuffix(info.UpstreamModelName, "-nothinking") {
|
|
info.UpstreamModelName = strings.TrimSuffix(info.UpstreamModelName, "-nothinking")
|
|
}
|
|
}
|
|
|
|
if info.IsStream {
|
|
suffix = "streamGenerateContent?alt=sse"
|
|
} else {
|
|
suffix = "generateContent"
|
|
}
|
|
|
|
if strings.HasPrefix(info.UpstreamModelName, "imagen") {
|
|
suffix = "predict"
|
|
}
|
|
return a.getRequestUrl(info, info.UpstreamModelName, suffix)
|
|
} else if a.RequestMode == RequestModeClaude {
|
|
if info.IsStream {
|
|
suffix = "streamRawPredict?alt=sse"
|
|
} else {
|
|
suffix = "rawPredict"
|
|
}
|
|
model := info.UpstreamModelName
|
|
if v, ok := claudeModelMap[info.UpstreamModelName]; ok {
|
|
model = v
|
|
}
|
|
return a.getRequestUrl(info, model, suffix)
|
|
} else if a.RequestMode == RequestModeLlama {
|
|
return a.getRequestUrl(info, "", "")
|
|
}
|
|
return "", errors.New("unsupported request mode")
|
|
}
|
|
|
|
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Header, info *relaycommon.RelayInfo) error {
|
|
channel.SetupApiRequestHeader(info, c, req)
|
|
if info.ChannelOtherSettings.VertexKeyType != dto.VertexKeyTypeAPIKey {
|
|
accessToken, err := getAccessToken(a, info)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
req.Set("Authorization", "Bearer "+accessToken)
|
|
}
|
|
if a.AccountCredentials.ProjectID != "" {
|
|
req.Set("x-goog-user-project", a.AccountCredentials.ProjectID)
|
|
}
|
|
return nil
|
|
}
|
|
|
|
func (a *Adaptor) ConvertOpenAIRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.GeneralOpenAIRequest) (any, error) {
|
|
if request == nil {
|
|
return nil, errors.New("request is nil")
|
|
}
|
|
if a.RequestMode == RequestModeGemini && strings.HasPrefix(info.UpstreamModelName, "imagen") {
|
|
prompt := ""
|
|
for _, m := range request.Messages {
|
|
if m.Role == "user" {
|
|
prompt = m.StringContent()
|
|
if prompt != "" {
|
|
break
|
|
}
|
|
}
|
|
}
|
|
if prompt == "" {
|
|
if p, ok := request.Prompt.(string); ok {
|
|
prompt = p
|
|
}
|
|
}
|
|
if prompt == "" {
|
|
return nil, errors.New("prompt is required for image generation")
|
|
}
|
|
|
|
imgReq := dto.ImageRequest{
|
|
Model: request.Model,
|
|
Prompt: prompt,
|
|
N: 1,
|
|
Size: "1024x1024",
|
|
}
|
|
if request.N > 0 {
|
|
imgReq.N = uint(request.N)
|
|
}
|
|
if request.Size != "" {
|
|
imgReq.Size = request.Size
|
|
}
|
|
if len(request.ExtraBody) > 0 {
|
|
var extra map[string]any
|
|
if err := json.Unmarshal(request.ExtraBody, &extra); err == nil {
|
|
if n, ok := extra["n"].(float64); ok && n > 0 {
|
|
imgReq.N = uint(n)
|
|
}
|
|
if size, ok := extra["size"].(string); ok {
|
|
imgReq.Size = size
|
|
}
|
|
// accept aspectRatio in extra body (top-level or under parameters)
|
|
if ar, ok := extra["aspectRatio"].(string); ok && ar != "" {
|
|
imgReq.Size = ar
|
|
}
|
|
if params, ok := extra["parameters"].(map[string]any); ok {
|
|
if ar, ok := params["aspectRatio"].(string); ok && ar != "" {
|
|
imgReq.Size = ar
|
|
}
|
|
}
|
|
}
|
|
}
|
|
c.Set("request_model", request.Model)
|
|
return a.ConvertImageRequest(c, info, imgReq)
|
|
}
|
|
if a.RequestMode == RequestModeClaude {
|
|
claudeReq, err := claude.RequestOpenAI2ClaudeMessage(c, *request)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
vertexClaudeReq := copyRequest(claudeReq, anthropicVersion)
|
|
c.Set("request_model", claudeReq.Model)
|
|
info.UpstreamModelName = claudeReq.Model
|
|
return vertexClaudeReq, nil
|
|
} else if a.RequestMode == RequestModeGemini {
|
|
geminiRequest, err := gemini.CovertGemini2OpenAI(c, *request, info)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
c.Set("request_model", request.Model)
|
|
return geminiRequest, nil
|
|
} else if a.RequestMode == RequestModeLlama {
|
|
return request, nil
|
|
}
|
|
return nil, errors.New("unsupported request mode")
|
|
}
|
|
|
|
func (a *Adaptor) ConvertRerankRequest(c *gin.Context, relayMode int, request dto.RerankRequest) (any, error) {
|
|
return nil, nil
|
|
}
|
|
|
|
func (a *Adaptor) ConvertEmbeddingRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.EmbeddingRequest) (any, error) {
|
|
//TODO implement me
|
|
return nil, errors.New("not implemented")
|
|
}
|
|
|
|
func (a *Adaptor) ConvertOpenAIResponsesRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.OpenAIResponsesRequest) (any, error) {
|
|
// TODO implement me
|
|
return nil, errors.New("not implemented")
|
|
}
|
|
|
|
func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, requestBody io.Reader) (any, error) {
|
|
return channel.DoApiRequest(a, c, info, requestBody)
|
|
}
|
|
|
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage any, err *types.NewAPIError) {
|
|
if info.IsStream {
|
|
switch a.RequestMode {
|
|
case RequestModeClaude:
|
|
return claude.ClaudeStreamHandler(c, resp, info, claude.RequestModeMessage)
|
|
case RequestModeGemini:
|
|
if info.RelayMode == constant.RelayModeGemini {
|
|
return gemini.GeminiTextGenerationStreamHandler(c, info, resp)
|
|
} else {
|
|
return gemini.GeminiChatStreamHandler(c, info, resp)
|
|
}
|
|
case RequestModeLlama:
|
|
return openai.OaiStreamHandler(c, info, resp)
|
|
}
|
|
} else {
|
|
switch a.RequestMode {
|
|
case RequestModeClaude:
|
|
return claude.ClaudeHandler(c, resp, info, claude.RequestModeMessage)
|
|
case RequestModeGemini:
|
|
if info.RelayMode == constant.RelayModeGemini {
|
|
return gemini.GeminiTextGenerationHandler(c, info, resp)
|
|
} else {
|
|
if strings.HasPrefix(info.UpstreamModelName, "imagen") {
|
|
return gemini.GeminiImageHandler(c, info, resp)
|
|
}
|
|
return gemini.GeminiChatHandler(c, info, resp)
|
|
}
|
|
case RequestModeLlama:
|
|
return openai.OpenaiHandler(c, info, resp)
|
|
}
|
|
}
|
|
return
|
|
}
|
|
|
|
func (a *Adaptor) GetModelList() []string {
|
|
var modelList []string
|
|
for i, s := range ModelList {
|
|
modelList = append(modelList, s)
|
|
ModelList[i] = s
|
|
}
|
|
for i, s := range claude.ModelList {
|
|
modelList = append(modelList, s)
|
|
claude.ModelList[i] = s
|
|
}
|
|
for i, s := range gemini.ModelList {
|
|
modelList = append(modelList, s)
|
|
gemini.ModelList[i] = s
|
|
}
|
|
return modelList
|
|
}
|
|
|
|
func (a *Adaptor) GetChannelName() string {
|
|
return ChannelName
|
|
}
|