diff --git a/relay/channel/gemini/dto.go b/relay/channel/gemini/dto.go index fa9108df..b22e092a 100644 --- a/relay/channel/gemini/dto.go +++ b/relay/channel/gemini/dto.go @@ -140,6 +140,7 @@ type GeminiChatGenerationConfig struct { Seed int64 `json:"seed,omitempty"` ResponseModalities []string `json:"responseModalities,omitempty"` ThinkingConfig *GeminiThinkingConfig `json:"thinkingConfig,omitempty"` + SpeechConfig json.RawMessage `json:"speechConfig,omitempty"` // RawMessage to allow flexible speech config } type GeminiChatCandidate struct { diff --git a/relay/relay-gemini.go b/relay/relay-gemini.go index 21cf5e12..9edbe5c2 100644 --- a/relay/relay-gemini.go +++ b/relay/relay-gemini.go @@ -155,6 +155,10 @@ func GeminiHelper(c *gin.Context) (openaiErr *dto.OpenAIErrorWithStatusCode) { return service.OpenAIErrorWrapperLocal(err, "marshal_text_request_failed", http.StatusInternalServerError) } + if common.DebugEnabled { + println("Gemini request body: %s", string(requestBody)) + } + resp, err := adaptor.DoRequest(c, relayInfo, bytes.NewReader(requestBody)) if err != nil { common.LogError(c, "Do gemini request failed: "+err.Error())