2023-03-18 22:59:06 +00:00
package main
import (
2023-10-14 10:27:35 +00:00
"context"
2023-10-12 08:45:34 +00:00
"encoding/json"
2023-10-14 10:27:35 +00:00
"errors"
2023-10-12 08:45:34 +00:00
"fmt"
2023-03-18 22:59:06 +00:00
"os"
2023-07-14 23:19:43 +00:00
"os/signal"
2023-05-03 21:03:31 +00:00
"path/filepath"
2023-07-20 20:10:12 +00:00
"strings"
2023-07-14 23:19:43 +00:00
"syscall"
2023-11-26 17:36:23 +00:00
"time"
2023-05-03 21:03:31 +00:00
2024-02-21 01:21:19 +00:00
"github.com/go-skynet/LocalAI/core/backend"
2024-03-01 15:19:53 +00:00
"github.com/go-skynet/LocalAI/core/config"
"github.com/go-skynet/LocalAI/core/http"
"github.com/go-skynet/LocalAI/core/startup"
2023-06-26 13:12:43 +00:00
"github.com/go-skynet/LocalAI/internal"
2023-10-12 08:45:34 +00:00
"github.com/go-skynet/LocalAI/pkg/gallery"
2024-01-05 17:04:46 +00:00
model "github.com/go-skynet/LocalAI/pkg/model"
2023-04-20 16:33:02 +00:00
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
2023-10-12 08:45:34 +00:00
progressbar "github.com/schollz/progressbar/v3"
2023-03-18 22:59:06 +00:00
"github.com/urfave/cli/v2"
)
2024-01-27 23:14:16 +00:00
const (
remoteLibraryURL = "https://raw.githubusercontent.com/mudler/LocalAI/master/embedded/model_library.yaml"
)
2023-04-19 16:43:10 +00:00
func main ( ) {
2023-04-20 16:33:02 +00:00
log . Logger = log . Output ( zerolog . ConsoleWriter { Out : os . Stderr } )
2023-07-14 23:19:43 +00:00
// clean up process
go func ( ) {
c := make ( chan os . Signal , 1 ) // we need to reserve to buffer size 1, so the notifier are not blocked
signal . Notify ( c , os . Interrupt , syscall . SIGTERM )
<- c
os . Exit ( 1 )
} ( )
2023-04-20 16:33:02 +00:00
2023-04-19 16:43:10 +00:00
path , err := os . Getwd ( )
2023-03-18 22:59:06 +00:00
if err != nil {
2023-04-20 16:33:02 +00:00
log . Error ( ) . Msgf ( "error: %s" , err . Error ( ) )
2023-04-19 16:43:10 +00:00
os . Exit ( 1 )
2023-03-18 22:59:06 +00:00
}
app := & cli . App {
2023-06-26 13:12:43 +00:00
Name : "LocalAI" ,
Version : internal . PrintableVersion ( ) ,
2024-03-14 22:08:34 +00:00
Usage : "OpenAI, OSS alternative. Drop-in compatible API for running LLM, GPT and genAI models locally on CPU, GPUs with consumer grade hardware. Supported server endpoints: OpenAI, Elevenlabs" ,
2023-04-19 16:43:10 +00:00
Flags : [ ] cli . Flag {
& cli . BoolFlag {
Name : "f16" ,
EnvVars : [ ] string { "F16" } ,
} ,
2023-07-20 20:10:12 +00:00
& cli . BoolFlag {
Name : "autoload-galleries" ,
EnvVars : [ ] string { "AUTOLOAD_GALLERIES" } ,
} ,
2023-04-20 16:33:02 +00:00
& cli . BoolFlag {
Name : "debug" ,
EnvVars : [ ] string { "DEBUG" } ,
} ,
2023-08-18 23:49:33 +00:00
& cli . BoolFlag {
Name : "single-active-backend" ,
EnvVars : [ ] string { "SINGLE_ACTIVE_BACKEND" } ,
Usage : "Allow only one backend to be running." ,
} ,
2023-11-16 07:20:05 +00:00
& cli . BoolFlag {
Name : "parallel-requests" ,
EnvVars : [ ] string { "PARALLEL_REQUESTS" } ,
Usage : "Enable backends to handle multiple requests in parallel. This is for backends that supports multiple requests in parallel, like llama.cpp or vllm" ,
} ,
2023-05-21 12:38:25 +00:00
& cli . BoolFlag {
Name : "cors" ,
EnvVars : [ ] string { "CORS" } ,
} ,
& cli . StringFlag {
Name : "cors-allow-origins" ,
EnvVars : [ ] string { "CORS_ALLOW_ORIGINS" } ,
} ,
2023-04-19 16:43:10 +00:00
& cli . IntFlag {
2023-06-03 12:25:30 +00:00
Name : "threads" ,
Usage : "Number of threads used for parallel computation. Usage of the number of physical cores in the system is suggested." ,
EnvVars : [ ] string { "THREADS" } ,
Value : 4 ,
2023-03-18 22:59:06 +00:00
} ,
& cli . StringFlag {
2023-06-03 12:25:30 +00:00
Name : "models-path" ,
Usage : "Path containing models used for inferencing" ,
EnvVars : [ ] string { "MODELS_PATH" } ,
Value : filepath . Join ( path , "models" ) ,
2023-03-18 22:59:06 +00:00
} ,
2023-06-24 06:18:17 +00:00
& cli . StringFlag {
Name : "galleries" ,
Usage : "JSON list of galleries" ,
EnvVars : [ ] string { "GALLERIES" } ,
} ,
2024-01-27 23:14:16 +00:00
& cli . StringFlag {
Name : "remote-library" ,
Usage : "A LocalAI remote library URL" ,
EnvVars : [ ] string { "REMOTE_LIBRARY" } ,
Value : remoteLibraryURL ,
} ,
2023-05-27 07:26:33 +00:00
& cli . StringFlag {
2023-06-03 12:25:30 +00:00
Name : "preload-models" ,
Usage : "A List of models to apply in JSON at start" ,
EnvVars : [ ] string { "PRELOAD_MODELS" } ,
2023-05-27 07:26:33 +00:00
} ,
2024-01-18 18:41:08 +00:00
& cli . StringSliceFlag {
2024-01-01 09:31:03 +00:00
Name : "models" ,
Usage : "A List of models URLs configurations." ,
EnvVars : [ ] string { "MODELS" } ,
} ,
2023-05-27 07:26:33 +00:00
& cli . StringFlag {
2023-06-03 12:25:30 +00:00
Name : "preload-models-config" ,
Usage : "A List of models to apply at startup. Path to a YAML config file" ,
EnvVars : [ ] string { "PRELOAD_MODELS_CONFIG" } ,
2023-05-27 07:26:33 +00:00
} ,
2023-04-27 04:18:18 +00:00
& cli . StringFlag {
2023-06-03 12:25:30 +00:00
Name : "config-file" ,
Usage : "Config file" ,
EnvVars : [ ] string { "CONFIG_FILE" } ,
2023-04-27 04:18:18 +00:00
} ,
2023-03-18 22:59:06 +00:00
& cli . StringFlag {
2023-06-03 12:25:30 +00:00
Name : "address" ,
Usage : "Bind address for the API server." ,
EnvVars : [ ] string { "ADDRESS" } ,
Value : ":8080" ,
2023-04-19 16:43:10 +00:00
} ,
2023-05-16 17:32:53 +00:00
& cli . StringFlag {
2023-06-03 12:25:30 +00:00
Name : "image-path" ,
Usage : "Image directory" ,
EnvVars : [ ] string { "IMAGE_PATH" } ,
2023-06-22 15:53:10 +00:00
Value : "/tmp/generated/images" ,
} ,
& cli . StringFlag {
Name : "audio-path" ,
Usage : "audio directory" ,
EnvVars : [ ] string { "AUDIO_PATH" } ,
Value : "/tmp/generated/audio" ,
2023-05-16 17:32:53 +00:00
} ,
2024-02-18 10:12:02 +00:00
& cli . StringFlag {
Name : "upload-path" ,
Usage : "Path to store uploads from files api" ,
EnvVars : [ ] string { "UPLOAD_PATH" } ,
Value : "/tmp/localai/upload" ,
} ,
2024-03-26 17:54:35 +00:00
& cli . StringFlag {
Name : "config-path" ,
Usage : "Path to store uploads from files api" ,
EnvVars : [ ] string { "CONFIG_PATH" } ,
Value : "/tmp/localai/config" ,
} ,
2023-06-01 21:38:52 +00:00
& cli . StringFlag {
2023-06-03 12:25:30 +00:00
Name : "backend-assets-path" ,
Usage : "Path used to extract libraries that are required by some of the backends in runtime." ,
EnvVars : [ ] string { "BACKEND_ASSETS_PATH" } ,
Value : "/tmp/localai/backend_data" ,
2023-06-01 21:38:52 +00:00
} ,
2023-07-20 20:10:12 +00:00
& cli . StringSliceFlag {
Name : "external-grpc-backends" ,
Usage : "A list of external grpc backends" ,
EnvVars : [ ] string { "EXTERNAL_GRPC_BACKENDS" } ,
} ,
2023-04-19 16:43:10 +00:00
& cli . IntFlag {
2023-06-03 12:25:30 +00:00
Name : "context-size" ,
Usage : "Default context size of the model" ,
EnvVars : [ ] string { "CONTEXT_SIZE" } ,
Value : 512 ,
2023-04-19 16:43:10 +00:00
} ,
2023-05-12 08:04:20 +00:00
& cli . IntFlag {
2023-06-03 12:25:30 +00:00
Name : "upload-limit" ,
Usage : "Default upload-limit. MB" ,
EnvVars : [ ] string { "UPLOAD_LIMIT" } ,
Value : 15 ,
2023-05-12 08:04:20 +00:00
} ,
2023-08-09 22:06:21 +00:00
& cli . StringSliceFlag {
Name : "api-keys" ,
Usage : "List of API Keys to enable API authentication. When this is set, all the requests must be authenticated with one of these API keys." ,
EnvVars : [ ] string { "API_KEY" } ,
} ,
2023-11-26 17:36:23 +00:00
& cli . BoolFlag {
Name : "enable-watchdog-idle" ,
Usage : "Enable watchdog for stopping idle backends. This will stop the backends if are in idle state for too long." ,
EnvVars : [ ] string { "WATCHDOG_IDLE" } ,
Value : false ,
} ,
& cli . BoolFlag {
Name : "enable-watchdog-busy" ,
Usage : "Enable watchdog for stopping busy backends that exceed a defined threshold." ,
EnvVars : [ ] string { "WATCHDOG_BUSY" } ,
Value : false ,
} ,
& cli . StringFlag {
Name : "watchdog-busy-timeout" ,
Usage : "Watchdog timeout. This will restart the backend if it crashes." ,
EnvVars : [ ] string { "WATCHDOG_BUSY_TIMEOUT" } ,
Value : "5m" ,
} ,
& cli . StringFlag {
Name : "watchdog-idle-timeout" ,
Usage : "Watchdog idle timeout. This will restart the backend if it crashes." ,
EnvVars : [ ] string { "WATCHDOG_IDLE_TIMEOUT" } ,
Value : "15m" ,
} ,
2023-08-18 19:23:14 +00:00
& cli . BoolFlag {
Name : "preload-backend-only" ,
Usage : "If set, the api is NOT launched, and only the preloaded models / backends are started. This is intended for multi-node setups." ,
EnvVars : [ ] string { "PRELOAD_BACKEND_ONLY" } ,
Value : false ,
} ,
2024-03-01 15:19:53 +00:00
& cli . StringFlag {
Name : "localai-config-dir" ,
Usage : "Directory to use for the configuration files of LocalAI itself. This is NOT where model files should be placed." ,
EnvVars : [ ] string { "LOCALAI_CONFIG_DIR" } ,
Value : "./configuration" ,
} ,
2023-04-19 16:43:10 +00:00
} ,
Description : `
LocalAI is a drop - in replacement OpenAI API which runs inference locally .
2023-03-18 22:59:06 +00:00
2023-04-19 16:43:10 +00:00
Some of the models compatible are :
- Vicuna
- Koala
- GPT4ALL
- GPT4ALL - J
2023-04-20 22:06:55 +00:00
- Cerebras
2023-04-19 16:43:10 +00:00
- Alpaca
2023-04-20 22:06:55 +00:00
- StableLM ( ggml quantized )
2023-03-18 22:59:06 +00:00
2023-07-02 09:15:05 +00:00
For a list of compatible model , check out : https : //localai.io/model-compatibility/index.html
2023-03-18 22:59:06 +00:00
` ,
2023-04-19 16:43:10 +00:00
UsageText : ` local-ai [options] ` ,
2023-07-02 09:15:05 +00:00
Copyright : "Ettore Di Giacinto" ,
2023-03-18 22:59:06 +00:00
Action : func ( ctx * cli . Context ) error {
2024-03-01 15:19:53 +00:00
opts := [ ] config . AppOption {
config . WithConfigFile ( ctx . String ( "config-file" ) ) ,
config . WithJSONStringPreload ( ctx . String ( "preload-models" ) ) ,
config . WithYAMLConfigPreload ( ctx . String ( "preload-models-config" ) ) ,
config . WithModelPath ( ctx . String ( "models-path" ) ) ,
config . WithContextSize ( ctx . Int ( "context-size" ) ) ,
config . WithDebug ( ctx . Bool ( "debug" ) ) ,
config . WithImageDir ( ctx . String ( "image-path" ) ) ,
config . WithAudioDir ( ctx . String ( "audio-path" ) ) ,
config . WithUploadDir ( ctx . String ( "upload-path" ) ) ,
2024-03-26 17:54:35 +00:00
config . WithConfigsDir ( ctx . String ( "config-path" ) ) ,
2024-03-01 15:19:53 +00:00
config . WithF16 ( ctx . Bool ( "f16" ) ) ,
config . WithStringGalleries ( ctx . String ( "galleries" ) ) ,
config . WithModelLibraryURL ( ctx . String ( "remote-library" ) ) ,
config . WithDisableMessage ( false ) ,
config . WithCors ( ctx . Bool ( "cors" ) ) ,
config . WithCorsAllowOrigins ( ctx . String ( "cors-allow-origins" ) ) ,
config . WithThreads ( ctx . Int ( "threads" ) ) ,
config . WithBackendAssets ( backendAssets ) ,
config . WithBackendAssetsOutput ( ctx . String ( "backend-assets-path" ) ) ,
config . WithUploadLimitMB ( ctx . Int ( "upload-limit" ) ) ,
config . WithApiKeys ( ctx . StringSlice ( "api-keys" ) ) ,
config . WithModelsURL ( append ( ctx . StringSlice ( "models" ) , ctx . Args ( ) . Slice ( ) ... ) ... ) ,
2023-07-20 20:10:12 +00:00
}
2023-11-26 17:36:23 +00:00
idleWatchDog := ctx . Bool ( "enable-watchdog-idle" )
busyWatchDog := ctx . Bool ( "enable-watchdog-busy" )
if idleWatchDog || busyWatchDog {
2024-03-01 15:19:53 +00:00
opts = append ( opts , config . EnableWatchDog )
2023-11-26 17:36:23 +00:00
if idleWatchDog {
2024-03-01 15:19:53 +00:00
opts = append ( opts , config . EnableWatchDogIdleCheck )
2023-11-26 17:36:23 +00:00
dur , err := time . ParseDuration ( ctx . String ( "watchdog-idle-timeout" ) )
if err != nil {
return err
}
2024-03-01 15:19:53 +00:00
opts = append ( opts , config . SetWatchDogIdleTimeout ( dur ) )
2023-11-26 17:36:23 +00:00
}
if busyWatchDog {
2024-03-01 15:19:53 +00:00
opts = append ( opts , config . EnableWatchDogBusyCheck )
2023-11-26 17:36:23 +00:00
dur , err := time . ParseDuration ( ctx . String ( "watchdog-busy-timeout" ) )
if err != nil {
return err
}
2024-03-01 15:19:53 +00:00
opts = append ( opts , config . SetWatchDogBusyTimeout ( dur ) )
2023-11-26 17:36:23 +00:00
}
}
2023-11-16 07:20:05 +00:00
if ctx . Bool ( "parallel-requests" ) {
2024-03-01 15:19:53 +00:00
opts = append ( opts , config . EnableParallelBackendRequests )
2023-11-16 07:20:05 +00:00
}
2023-08-18 23:49:33 +00:00
if ctx . Bool ( "single-active-backend" ) {
2024-03-01 15:19:53 +00:00
opts = append ( opts , config . EnableSingleBackend )
2023-08-18 23:49:33 +00:00
}
2023-07-20 20:10:12 +00:00
externalgRPC := ctx . StringSlice ( "external-grpc-backends" )
// split ":" to get backend name and the uri
for _ , v := range externalgRPC {
backend := v [ : strings . IndexByte ( v , ':' ) ]
uri := v [ strings . IndexByte ( v , ':' ) + 1 : ]
2024-03-01 15:19:53 +00:00
opts = append ( opts , config . WithExternalBackend ( backend , uri ) )
2023-07-20 20:10:12 +00:00
}
if ctx . Bool ( "autoload-galleries" ) {
2024-03-01 15:19:53 +00:00
opts = append ( opts , config . EnableGalleriesAutoload )
2023-07-20 20:10:12 +00:00
}
2023-08-18 19:23:14 +00:00
if ctx . Bool ( "preload-backend-only" ) {
2024-03-01 15:19:53 +00:00
_ , _ , _ , err := startup . Startup ( opts ... )
2023-08-18 19:23:14 +00:00
return err
}
2024-03-01 15:19:53 +00:00
cl , ml , options , err := startup . Startup ( opts ... )
if err != nil {
return fmt . Errorf ( "failed basic startup tasks with error %s" , err . Error ( ) )
}
2024-03-18 18:14:48 +00:00
configdir := ctx . String ( "localai-config-dir" )
// Watch the configuration directory
// If the directory does not exist, we don't watch it
if _ , err := os . Stat ( configdir ) ; err == nil {
closeConfigWatcherFn , err := startup . WatchConfigDirectory ( ctx . String ( "localai-config-dir" ) , options )
defer closeConfigWatcherFn ( )
2024-03-01 15:19:53 +00:00
2024-03-18 18:14:48 +00:00
if err != nil {
return fmt . Errorf ( "failed while watching configuration directory %s" , ctx . String ( "localai-config-dir" ) )
}
2023-10-17 16:22:53 +00:00
}
2024-03-01 15:19:53 +00:00
appHTTP , err := http . App ( cl , ml , options )
2023-05-30 10:00:30 +00:00
if err != nil {
2024-03-01 15:19:53 +00:00
log . Error ( ) . Msg ( "Error during HTTP App constructor" )
2023-05-30 10:00:30 +00:00
return err
}
2024-03-01 15:19:53 +00:00
return appHTTP . Listen ( ctx . String ( "address" ) )
2023-03-18 22:59:06 +00:00
} ,
2023-10-12 08:45:34 +00:00
Commands : [ ] * cli . Command {
{
Name : "models" ,
Usage : "List or install models" ,
Subcommands : [ ] * cli . Command {
{
Name : "list" ,
Usage : "List the models avaiable in your galleries" ,
Action : func ( ctx * cli . Context ) error {
var galleries [ ] gallery . Gallery
if err := json . Unmarshal ( [ ] byte ( ctx . String ( "galleries" ) ) , & galleries ) ; err != nil {
log . Error ( ) . Msgf ( "unable to load galleries: %s" , err . Error ( ) )
}
models , err := gallery . AvailableGalleryModels ( galleries , ctx . String ( "models-path" ) )
if err != nil {
return err
}
for _ , model := range models {
if model . Installed {
fmt . Printf ( " * %s@%s (installed)\n" , model . Gallery . Name , model . Name )
} else {
fmt . Printf ( " - %s@%s\n" , model . Gallery . Name , model . Name )
}
}
return nil
} ,
} ,
{
Name : "install" ,
Usage : "Install a model from the gallery" ,
Action : func ( ctx * cli . Context ) error {
modelName := ctx . Args ( ) . First ( )
var galleries [ ] gallery . Gallery
if err := json . Unmarshal ( [ ] byte ( ctx . String ( "galleries" ) ) , & galleries ) ; err != nil {
log . Error ( ) . Msgf ( "unable to load galleries: %s" , err . Error ( ) )
}
progressBar := progressbar . NewOptions (
1000 ,
progressbar . OptionSetDescription ( fmt . Sprintf ( "downloading model %s" , modelName ) ) ,
progressbar . OptionShowBytes ( false ) ,
progressbar . OptionClearOnFinish ( ) ,
)
progressCallback := func ( fileName string , current string , total string , percentage float64 ) {
progressBar . Set ( int ( percentage * 10 ) )
}
err = gallery . InstallModelFromGallery ( galleries , modelName , ctx . String ( "models-path" ) , gallery . GalleryModel { } , progressCallback )
if err != nil {
return err
}
return nil
} ,
} ,
} ,
} ,
2023-10-14 10:27:35 +00:00
{
Name : "tts" ,
Usage : "Convert text to speech" ,
Flags : [ ] cli . Flag {
& cli . StringFlag {
Name : "backend" ,
Value : "piper" ,
Aliases : [ ] string { "b" } ,
Usage : "Backend to run the TTS model" ,
} ,
& cli . StringFlag {
Name : "model" ,
Aliases : [ ] string { "m" } ,
Usage : "Model name to run the TTS" ,
Required : true ,
} ,
2024-03-14 22:08:34 +00:00
& cli . StringFlag {
Name : "voice" ,
Aliases : [ ] string { "v" } ,
Usage : "Voice name to run the TTS (optional)" ,
Required : true ,
} ,
2023-10-14 10:27:35 +00:00
& cli . StringFlag {
Name : "output-file" ,
Aliases : [ ] string { "o" } ,
Usage : "The path to write the output wav file" ,
} ,
} ,
Action : func ( ctx * cli . Context ) error {
modelOption := ctx . String ( "model" )
if modelOption == "" {
return errors . New ( "--model parameter is required" )
}
backendOption := ctx . String ( "backend" )
if backendOption == "" {
backendOption = "piper"
}
outputFile := ctx . String ( "output-file" )
outputDir := ctx . String ( "backend-assets-path" )
if outputFile != "" {
outputDir = filepath . Dir ( outputFile )
}
text := strings . Join ( ctx . Args ( ) . Slice ( ) , " " )
2024-03-01 15:19:53 +00:00
opts := & config . ApplicationConfig {
ModelPath : ctx . String ( "models-path" ) ,
2023-10-14 10:27:35 +00:00
Context : context . Background ( ) ,
AudioDir : outputDir ,
AssetsDestination : ctx . String ( "backend-assets-path" ) ,
}
2024-03-01 15:19:53 +00:00
ml := model . NewModelLoader ( opts . ModelPath )
2023-10-14 10:27:35 +00:00
2024-03-01 15:19:53 +00:00
defer ml . StopAllGRPC ( )
2023-10-14 10:27:35 +00:00
2024-03-14 22:08:34 +00:00
filePath , _ , err := backend . ModelTTS ( backendOption , text , modelOption , ctx . String ( "voice" ) , ml , opts , config . BackendConfig { } )
2023-10-14 10:27:35 +00:00
if err != nil {
return err
}
if outputFile != "" {
if err := os . Rename ( filePath , outputFile ) ; err != nil {
return err
}
fmt . Printf ( "Generate file %s\n" , outputFile )
} else {
fmt . Printf ( "Generate file %s\n" , filePath )
}
return nil
} ,
} ,
2023-10-15 07:17:41 +00:00
{
Name : "transcript" ,
Usage : "Convert audio to text" ,
Flags : [ ] cli . Flag {
& cli . StringFlag {
Name : "backend" ,
Value : "whisper" ,
Aliases : [ ] string { "b" } ,
Usage : "Backend to run the transcription model" ,
} ,
& cli . StringFlag {
Name : "model" ,
Aliases : [ ] string { "m" } ,
Usage : "Model name to run the transcription" ,
} ,
& cli . StringFlag {
Name : "language" ,
Aliases : [ ] string { "l" } ,
Usage : "Language of the audio file" ,
} ,
& cli . IntFlag {
Name : "threads" ,
Aliases : [ ] string { "t" } ,
Usage : "Threads to use" ,
Value : 1 ,
} ,
& cli . StringFlag {
Name : "output-file" ,
Aliases : [ ] string { "o" } ,
Usage : "The path to write the output wav file" ,
} ,
} ,
Action : func ( ctx * cli . Context ) error {
modelOption := ctx . String ( "model" )
filename := ctx . Args ( ) . First ( )
language := ctx . String ( "language" )
threads := ctx . Int ( "threads" )
2024-03-01 15:19:53 +00:00
opts := & config . ApplicationConfig {
ModelPath : ctx . String ( "models-path" ) ,
2023-10-15 07:17:41 +00:00
Context : context . Background ( ) ,
AssetsDestination : ctx . String ( "backend-assets-path" ) ,
}
2024-03-01 15:19:53 +00:00
cl := config . NewBackendConfigLoader ( )
ml := model . NewModelLoader ( opts . ModelPath )
if err := cl . LoadBackendConfigsFromPath ( ctx . String ( "models-path" ) ) ; err != nil {
2023-10-15 07:17:41 +00:00
return err
}
2024-03-01 15:19:53 +00:00
c , exists := cl . GetBackendConfig ( modelOption )
2023-10-15 07:17:41 +00:00
if ! exists {
return errors . New ( "model not found" )
}
2024-03-13 09:05:30 +00:00
c . Threads = & threads
2023-10-15 07:17:41 +00:00
2024-03-01 15:19:53 +00:00
defer ml . StopAllGRPC ( )
2023-10-15 07:17:41 +00:00
2024-03-01 15:19:53 +00:00
tr , err := backend . ModelTranscription ( filename , language , ml , c , opts )
2023-10-15 07:17:41 +00:00
if err != nil {
return err
}
for _ , segment := range tr . Segments {
fmt . Println ( segment . Start . String ( ) , "-" , segment . Text )
}
return nil
} ,
} ,
2023-10-12 08:45:34 +00:00
} ,
2023-03-18 22:59:06 +00:00
}
2023-04-19 16:43:10 +00:00
err = app . Run ( os . Args )
2023-03-18 22:59:06 +00:00
if err != nil {
2023-04-20 16:33:02 +00:00
log . Error ( ) . Msgf ( "error: %s" , err . Error ( ) )
2023-03-18 22:59:06 +00:00
os . Exit ( 1 )
}
}