mirror of
https://github.com/mudler/LocalAI.git
synced 2024-06-07 19:40:48 +00:00
20136ca8b7
* feat(elevenlabs): map elevenlabs API support to TTS This allows elevenlabs Clients to work automatically with LocalAI by supporting the elevenlabs API. The elevenlabs server endpoint is implemented such as it is wired to the TTS endpoints. Fixes: https://github.com/mudler/LocalAI/issues/1809 * feat(openai/tts): compat layer with openai tts Fixes: #1276 * fix: adapt tts CLI
56 lines
1.5 KiB
Go
56 lines
1.5 KiB
Go
package localai
|
|
|
|
import (
|
|
"github.com/go-skynet/LocalAI/core/backend"
|
|
"github.com/go-skynet/LocalAI/core/config"
|
|
fiberContext "github.com/go-skynet/LocalAI/core/http/ctx"
|
|
"github.com/go-skynet/LocalAI/pkg/model"
|
|
|
|
"github.com/go-skynet/LocalAI/core/schema"
|
|
"github.com/gofiber/fiber/v2"
|
|
"github.com/rs/zerolog/log"
|
|
)
|
|
|
|
func TTSEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, appConfig *config.ApplicationConfig) func(c *fiber.Ctx) error {
|
|
return func(c *fiber.Ctx) error {
|
|
|
|
input := new(schema.TTSRequest)
|
|
|
|
// Get input data from the request body
|
|
if err := c.BodyParser(input); err != nil {
|
|
return err
|
|
}
|
|
|
|
modelFile, err := fiberContext.ModelFromContext(c, ml, input.Model, false)
|
|
if err != nil {
|
|
modelFile = input.Model
|
|
log.Warn().Msgf("Model not found in context: %s", input.Model)
|
|
}
|
|
|
|
cfg, err := cl.LoadBackendConfigFileByName(modelFile, appConfig.ModelPath,
|
|
config.LoadOptionDebug(appConfig.Debug),
|
|
config.LoadOptionThreads(appConfig.Threads),
|
|
config.LoadOptionContextSize(appConfig.ContextSize),
|
|
config.LoadOptionF16(appConfig.F16),
|
|
)
|
|
|
|
if err != nil {
|
|
modelFile = input.Model
|
|
log.Warn().Msgf("Model not found in context: %s", input.Model)
|
|
} else {
|
|
modelFile = cfg.Model
|
|
}
|
|
log.Debug().Msgf("Request for model: %s", modelFile)
|
|
|
|
if input.Backend != "" {
|
|
cfg.Backend = input.Backend
|
|
}
|
|
|
|
filePath, _, err := backend.ModelTTS(cfg.Backend, input.Input, modelFile, input.Voice, ml, appConfig, *cfg)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
return c.Download(filePath)
|
|
}
|
|
}
|