package openai import ( "encoding/json" "fmt" "github.com/go-skynet/LocalAI/api/backend" config "github.com/go-skynet/LocalAI/api/config" "github.com/go-skynet/LocalAI/api/options" "github.com/gofiber/fiber/v2" "github.com/rs/zerolog/log" ) // https://platform.openai.com/docs/api-reference/embeddings func EmbeddingsEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error { return func(c *fiber.Ctx) error { model, input, err := readInput(c, o, true) if err != nil { return fmt.Errorf("failed reading parameters from request:%w", err) } config, input, err := readConfig(model, input, cm, o.Loader, o.Debug, o.Threads, o.ContextSize, o.F16) if err != nil { return fmt.Errorf("failed reading parameters from request:%w", err) } log.Debug().Msgf("Parameter Config: %+v", config) items := []Item{} for i, s := range config.InputToken { // get the model function to call for the result embedFn, err := backend.ModelEmbedding("", s, o.Loader, *config, o) if err != nil { return err } embeddings, err := embedFn() if err != nil { return err } items = append(items, Item{Embedding: embeddings, Index: i, Object: "embedding"}) } for i, s := range config.InputStrings { // get the model function to call for the result embedFn, err := backend.ModelEmbedding(s, []int{}, o.Loader, *config, o) if err != nil { return err } embeddings, err := embedFn() if err != nil { return err } items = append(items, Item{Embedding: embeddings, Index: i, Object: "embedding"}) } resp := &OpenAIResponse{ Model: input.Model, // we have to return what the user sent here, due to OpenAI spec. Data: items, Object: "list", } jsonResult, _ := json.Marshal(resp) log.Debug().Msgf("Response: %s", jsonResult) // Return the prediction in the response body return c.JSON(resp) } }