diff --git a/relay/channel/gemini/constant.go b/relay/channel/gemini/constant.go index 90e347c5..73418d43 100644 --- a/relay/channel/gemini/constant.go +++ b/relay/channel/gemini/constant.go @@ -5,7 +5,7 @@ const ( ) var ModelList = []string{ - "gemini-1.5-pro-latest", "gemini-1.5-flash-latest", "gemini-ultra", + "gemini-1.5-pro-latest", "gemini-1.5-flash-latest", "gemini-1.5-pro-exp-0827", "gemini-1.5-flash-exp-0827", "gemini-exp-1114", "gemini-exp-1206", "gemini-2.0-flash-exp", diff --git a/relay/channel/gemini/dto.go b/relay/channel/gemini/dto.go index 8c93073f..26189069 100644 --- a/relay/channel/gemini/dto.go +++ b/relay/channel/gemini/dto.go @@ -1,10 +1,11 @@ package gemini type GeminiChatRequest struct { - Contents []GeminiChatContent `json:"contents"` - SafetySettings []GeminiChatSafetySettings `json:"safety_settings,omitempty"` - GenerationConfig GeminiChatGenerationConfig `json:"generation_config,omitempty"` - Tools []GeminiChatTools `json:"tools,omitempty"` + Contents []GeminiChatContent `json:"contents"` + SafetySettings []GeminiChatSafetySettings `json:"safety_settings,omitempty"` + GenerationConfig GeminiChatGenerationConfig `json:"generation_config,omitempty"` + Tools []GeminiChatTools `json:"tools,omitempty"` + SystemInstructions *GeminiPart `json:"system_instructions,omitempty"` } type GeminiInlineData struct { diff --git a/relay/channel/gemini/relay-gemini.go b/relay/channel/gemini/relay-gemini.go index 2e45e44c..03ba46f4 100644 --- a/relay/channel/gemini/relay-gemini.go +++ b/relay/channel/gemini/relay-gemini.go @@ -72,21 +72,27 @@ func CovertGemini2OpenAI(textRequest dto.GeneralOpenAIRequest) *GeminiChatReques }, } } - shouldAddDummyModelMessage := false + //shouldAddDummyModelMessage := false for _, message := range textRequest.Messages { + + if message.Role == "system" { + geminiRequest.SystemInstructions = &GeminiPart{ + Text: message.StringContent(), + } + continue + } content := GeminiChatContent{ Role: message.Role, - Parts: []GeminiPart{ - { - Text: message.StringContent(), - }, - }, + //Parts: []GeminiPart{ + // { + // Text: message.StringContent(), + // }, + //}, } openaiContent := message.ParseContent() var parts []GeminiPart imageNum := 0 for _, part := range openaiContent { - if part.Type == dto.ContentTypeText { parts = append(parts, GeminiPart{ Text: part.Text, @@ -127,24 +133,24 @@ func CovertGemini2OpenAI(textRequest dto.GeneralOpenAIRequest) *GeminiChatReques content.Role = "model" } // Converting system prompt to prompt from user for the same reason - if content.Role == "system" { - content.Role = "user" - shouldAddDummyModelMessage = true - } + //if content.Role == "system" { + // content.Role = "user" + // shouldAddDummyModelMessage = true + //} geminiRequest.Contents = append(geminiRequest.Contents, content) - - // If a system message is the last message, we need to add a dummy model message to make gemini happy - if shouldAddDummyModelMessage { - geminiRequest.Contents = append(geminiRequest.Contents, GeminiChatContent{ - Role: "model", - Parts: []GeminiPart{ - { - Text: "Okay", - }, - }, - }) - shouldAddDummyModelMessage = false - } + // + //// If a system message is the last message, we need to add a dummy model message to make gemini happy + //if shouldAddDummyModelMessage { + // geminiRequest.Contents = append(geminiRequest.Contents, GeminiChatContent{ + // Role: "model", + // Parts: []GeminiPart{ + // { + // Text: "Okay", + // }, + // }, + // }) + // shouldAddDummyModelMessage = false + //} } return &geminiRequest }