2024-01-05 17:04:46 +00:00
|
|
|
package localai
|
|
|
|
|
|
|
|
import (
|
2024-02-21 01:21:19 +00:00
|
|
|
"github.com/go-skynet/LocalAI/core/backend"
|
2024-03-01 15:19:53 +00:00
|
|
|
"github.com/go-skynet/LocalAI/core/config"
|
|
|
|
fiberContext "github.com/go-skynet/LocalAI/core/http/ctx"
|
|
|
|
"github.com/go-skynet/LocalAI/pkg/model"
|
2024-01-05 17:04:46 +00:00
|
|
|
|
2024-03-01 15:19:53 +00:00
|
|
|
"github.com/go-skynet/LocalAI/core/schema"
|
2024-01-05 17:04:46 +00:00
|
|
|
"github.com/gofiber/fiber/v2"
|
2024-03-01 15:19:53 +00:00
|
|
|
"github.com/rs/zerolog/log"
|
2024-01-05 17:04:46 +00:00
|
|
|
)
|
|
|
|
|
2024-03-29 21:29:33 +00:00
|
|
|
// TTSEndpoint is the OpenAI Speech API endpoint https://platform.openai.com/docs/api-reference/audio/createSpeech
|
|
|
|
// @Summary Generates audio from the input text.
|
|
|
|
// @Param request body schema.TTSRequest true "query params"
|
|
|
|
// @Success 200 {string} binary "Response"
|
|
|
|
// @Router /v1/audio/speech [post]
|
2024-03-01 15:19:53 +00:00
|
|
|
func TTSEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, appConfig *config.ApplicationConfig) func(c *fiber.Ctx) error {
|
2024-01-05 17:04:46 +00:00
|
|
|
return func(c *fiber.Ctx) error {
|
|
|
|
|
2024-03-01 15:19:53 +00:00
|
|
|
input := new(schema.TTSRequest)
|
2024-02-10 20:37:03 +00:00
|
|
|
|
2024-01-05 17:04:46 +00:00
|
|
|
// Get input data from the request body
|
|
|
|
if err := c.BodyParser(input); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-03-01 15:19:53 +00:00
|
|
|
modelFile, err := fiberContext.ModelFromContext(c, ml, input.Model, false)
|
2024-02-10 20:37:03 +00:00
|
|
|
if err != nil {
|
|
|
|
modelFile = input.Model
|
|
|
|
log.Warn().Msgf("Model not found in context: %s", input.Model)
|
|
|
|
}
|
2024-03-13 09:05:30 +00:00
|
|
|
|
|
|
|
cfg, err := cl.LoadBackendConfigFileByName(modelFile, appConfig.ModelPath,
|
|
|
|
config.LoadOptionDebug(appConfig.Debug),
|
|
|
|
config.LoadOptionThreads(appConfig.Threads),
|
|
|
|
config.LoadOptionContextSize(appConfig.ContextSize),
|
|
|
|
config.LoadOptionF16(appConfig.F16),
|
|
|
|
)
|
|
|
|
|
2024-02-10 20:37:03 +00:00
|
|
|
if err != nil {
|
|
|
|
modelFile = input.Model
|
|
|
|
log.Warn().Msgf("Model not found in context: %s", input.Model)
|
|
|
|
} else {
|
|
|
|
modelFile = cfg.Model
|
|
|
|
}
|
|
|
|
log.Debug().Msgf("Request for model: %s", modelFile)
|
|
|
|
|
|
|
|
if input.Backend != "" {
|
2024-02-15 16:33:06 +00:00
|
|
|
cfg.Backend = input.Backend
|
2024-02-10 20:37:03 +00:00
|
|
|
}
|
|
|
|
|
2024-03-14 22:08:34 +00:00
|
|
|
filePath, _, err := backend.ModelTTS(cfg.Backend, input.Input, modelFile, input.Voice, ml, appConfig, *cfg)
|
2024-01-05 17:04:46 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return c.Download(filePath)
|
|
|
|
}
|
|
|
|
}
|