fix: make last stream message to send empty content

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
Ettore Di Giacinto 2023-07-16 00:09:28 +02:00
parent 6ec315e540
commit d0e67cce75

View File

@ -18,8 +18,9 @@ import (
) )
func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error { func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error {
emptyMessage := ""
process := func(s string, req *OpenAIRequest, config *config.Config, loader *model.ModelLoader, responses chan OpenAIResponse) { process := func(s string, req *OpenAIRequest, config *config.Config, loader *model.ModelLoader, responses chan OpenAIResponse) {
emptyMessage := ""
initialMessage := OpenAIResponse{ initialMessage := OpenAIResponse{
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec. Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
Choices: []Choice{{Delta: &Message{Role: "assistant", Content: &emptyMessage}}}, Choices: []Choice{{Delta: &Message{Role: "assistant", Content: &emptyMessage}}},
@ -222,7 +223,7 @@ func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx)
{ {
FinishReason: "stop", FinishReason: "stop",
Index: 0, Index: 0,
Delta: &Message{}, Delta: &Message{Content: &emptyMessage},
}}, }},
Object: "chat.completion.chunk", Object: "chat.completion.chunk",
} }