mirror of
https://github.com/mudler/LocalAI.git
synced 2024-06-07 19:40:48 +00:00
af9e5a2d05
* Revert "fix(fncall): fix regression introduced in #1963 (#2048)" This reverts commit6b06d4e0af
. * Revert "fix: action-tmate back to upstream, dead code removal (#2038)" This reverts commitfdec8a9d00
. * Revert "feat(grpc): return consumed token count and update response accordingly (#2035)" This reverts commite843d7df0e
. * Revert "refactor: backend/service split, channel-based llm flow (#1963)" This reverts commiteed5706994
. * feat(grpc): return consumed token count and update response accordingly Fixes: #1920 Signed-off-by: Ettore Di Giacinto <mudler@localai.io> --------- Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
61 lines
1.8 KiB
Go
61 lines
1.8 KiB
Go
package localai
|
|
|
|
import (
|
|
"github.com/go-skynet/LocalAI/core/backend"
|
|
"github.com/go-skynet/LocalAI/core/config"
|
|
fiberContext "github.com/go-skynet/LocalAI/core/http/ctx"
|
|
"github.com/go-skynet/LocalAI/pkg/model"
|
|
|
|
"github.com/go-skynet/LocalAI/core/schema"
|
|
"github.com/gofiber/fiber/v2"
|
|
"github.com/rs/zerolog/log"
|
|
)
|
|
|
|
// TTSEndpoint is the OpenAI Speech API endpoint https://platform.openai.com/docs/api-reference/audio/createSpeech
|
|
// @Summary Generates audio from the input text.
|
|
// @Param request body schema.TTSRequest true "query params"
|
|
// @Success 200 {string} binary "Response"
|
|
// @Router /v1/audio/speech [post]
|
|
func TTSEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, appConfig *config.ApplicationConfig) func(c *fiber.Ctx) error {
|
|
return func(c *fiber.Ctx) error {
|
|
|
|
input := new(schema.TTSRequest)
|
|
|
|
// Get input data from the request body
|
|
if err := c.BodyParser(input); err != nil {
|
|
return err
|
|
}
|
|
|
|
modelFile, err := fiberContext.ModelFromContext(c, ml, input.Model, false)
|
|
if err != nil {
|
|
modelFile = input.Model
|
|
log.Warn().Msgf("Model not found in context: %s", input.Model)
|
|
}
|
|
|
|
cfg, err := cl.LoadBackendConfigFileByName(modelFile, appConfig.ModelPath,
|
|
config.LoadOptionDebug(appConfig.Debug),
|
|
config.LoadOptionThreads(appConfig.Threads),
|
|
config.LoadOptionContextSize(appConfig.ContextSize),
|
|
config.LoadOptionF16(appConfig.F16),
|
|
)
|
|
|
|
if err != nil {
|
|
modelFile = input.Model
|
|
log.Warn().Msgf("Model not found in context: %s", input.Model)
|
|
} else {
|
|
modelFile = cfg.Model
|
|
}
|
|
log.Debug().Msgf("Request for model: %s", modelFile)
|
|
|
|
if input.Backend != "" {
|
|
cfg.Backend = input.Backend
|
|
}
|
|
|
|
filePath, _, err := backend.ModelTTS(cfg.Backend, input.Input, modelFile, input.Voice, ml, appConfig, *cfg)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
return c.Download(filePath)
|
|
}
|
|
}
|