🐛 fix: Use correct dto for non-stream xai
This commit is contained in:
@@ -4,23 +4,23 @@ import "one-api/dto"
|
|||||||
|
|
||||||
// ChatCompletionResponse represents the response from XAI chat completion API
|
// ChatCompletionResponse represents the response from XAI chat completion API
|
||||||
type ChatCompletionResponse struct {
|
type ChatCompletionResponse struct {
|
||||||
Id string `json:"id"`
|
Id string `json:"id"`
|
||||||
Object string `json:"object"`
|
Object string `json:"object"`
|
||||||
Created int64 `json:"created"`
|
Created int64 `json:"created"`
|
||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
Choices []dto.ChatCompletionsStreamResponseChoice
|
Choices []dto.OpenAITextResponseChoice `json:"choices"`
|
||||||
Usage *dto.Usage `json:"usage"`
|
Usage *dto.Usage `json:"usage"`
|
||||||
SystemFingerprint string `json:"system_fingerprint"`
|
SystemFingerprint string `json:"system_fingerprint"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// quality, size or style are not supported by xAI API at the moment.
|
// quality, size or style are not supported by xAI API at the moment.
|
||||||
type ImageRequest struct {
|
type ImageRequest struct {
|
||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
Prompt string `json:"prompt" binding:"required"`
|
Prompt string `json:"prompt" binding:"required"`
|
||||||
N int `json:"n,omitempty"`
|
N int `json:"n,omitempty"`
|
||||||
// Size string `json:"size,omitempty"`
|
// Size string `json:"size,omitempty"`
|
||||||
// Quality string `json:"quality,omitempty"`
|
// Quality string `json:"quality,omitempty"`
|
||||||
ResponseFormat string `json:"response_format,omitempty"`
|
ResponseFormat string `json:"response_format,omitempty"`
|
||||||
// Style string `json:"style,omitempty"`
|
// Style string `json:"style,omitempty"`
|
||||||
// User string `json:"user,omitempty"`
|
// User string `json:"user,omitempty"`
|
||||||
// ExtraFields json.RawMessage `json:"extra_fields,omitempty"`
|
// ExtraFields json.RawMessage `json:"extra_fields,omitempty"`
|
||||||
|
|||||||
@@ -82,21 +82,24 @@ func xAIHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response
|
|||||||
defer common.CloseResponseBodyGracefully(resp)
|
defer common.CloseResponseBodyGracefully(resp)
|
||||||
|
|
||||||
responseBody, err := io.ReadAll(resp.Body)
|
responseBody, err := io.ReadAll(resp.Body)
|
||||||
var response *dto.SimpleResponse
|
|
||||||
err = common.Unmarshal(responseBody, &response)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
|
return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
|
||||||
}
|
}
|
||||||
response.Usage.CompletionTokens = response.Usage.TotalTokens - response.Usage.PromptTokens
|
var xaiResponse ChatCompletionResponse
|
||||||
response.Usage.CompletionTokenDetails.TextTokens = response.Usage.CompletionTokens - response.Usage.CompletionTokenDetails.ReasoningTokens
|
err = common.Unmarshal(responseBody, &xaiResponse)
|
||||||
|
if err != nil {
|
||||||
|
return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
|
||||||
|
}
|
||||||
|
xaiResponse.Usage.CompletionTokens = xaiResponse.Usage.TotalTokens - xaiResponse.Usage.PromptTokens
|
||||||
|
xaiResponse.Usage.CompletionTokenDetails.TextTokens = xaiResponse.Usage.CompletionTokens - xaiResponse.Usage.CompletionTokenDetails.ReasoningTokens
|
||||||
|
|
||||||
// new body
|
// new body
|
||||||
encodeJson, err := common.Marshal(response)
|
encodeJson, err := common.Marshal(xaiResponse)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
|
return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
|
||||||
}
|
}
|
||||||
|
|
||||||
common.IOCopyBytesGracefully(c, resp, encodeJson)
|
common.IOCopyBytesGracefully(c, resp, encodeJson)
|
||||||
|
|
||||||
return &response.Usage, nil
|
return xaiResponse.Usage, nil
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user