2023-03-18 22:59:06 +00:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2023-05-03 21:03:31 +00:00
|
|
|
"fmt"
|
2023-03-18 22:59:06 +00:00
|
|
|
"os"
|
2023-05-03 21:03:31 +00:00
|
|
|
"path/filepath"
|
|
|
|
|
2023-04-19 16:43:10 +00:00
|
|
|
api "github.com/go-skynet/LocalAI/api"
|
|
|
|
model "github.com/go-skynet/LocalAI/pkg/model"
|
2023-04-20 16:33:02 +00:00
|
|
|
"github.com/rs/zerolog"
|
|
|
|
"github.com/rs/zerolog/log"
|
2023-03-18 22:59:06 +00:00
|
|
|
"github.com/urfave/cli/v2"
|
|
|
|
)
|
|
|
|
|
2023-04-19 16:43:10 +00:00
|
|
|
func main() {
|
2023-04-20 16:33:02 +00:00
|
|
|
log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr})
|
|
|
|
|
2023-04-19 16:43:10 +00:00
|
|
|
path, err := os.Getwd()
|
2023-03-18 22:59:06 +00:00
|
|
|
if err != nil {
|
2023-04-20 16:33:02 +00:00
|
|
|
log.Error().Msgf("error: %s", err.Error())
|
2023-04-19 16:43:10 +00:00
|
|
|
os.Exit(1)
|
2023-03-18 22:59:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
app := &cli.App{
|
2023-04-19 16:43:10 +00:00
|
|
|
Name: "LocalAI",
|
|
|
|
Usage: "OpenAI compatible API for running LLaMA/GPT models locally on CPU with consumer grade hardware.",
|
|
|
|
Flags: []cli.Flag{
|
|
|
|
&cli.BoolFlag{
|
|
|
|
Name: "f16",
|
|
|
|
EnvVars: []string{"F16"},
|
|
|
|
},
|
2023-04-20 16:33:02 +00:00
|
|
|
&cli.BoolFlag{
|
|
|
|
Name: "debug",
|
|
|
|
EnvVars: []string{"DEBUG"},
|
|
|
|
},
|
2023-05-21 12:38:25 +00:00
|
|
|
&cli.BoolFlag{
|
|
|
|
Name: "cors",
|
|
|
|
EnvVars: []string{"CORS"},
|
|
|
|
},
|
|
|
|
&cli.StringFlag{
|
|
|
|
Name: "cors-allow-origins",
|
|
|
|
EnvVars: []string{"CORS_ALLOW_ORIGINS"},
|
|
|
|
},
|
2023-04-19 16:43:10 +00:00
|
|
|
&cli.IntFlag{
|
2023-06-03 12:25:30 +00:00
|
|
|
Name: "threads",
|
|
|
|
Usage: "Number of threads used for parallel computation. Usage of the number of physical cores in the system is suggested.",
|
|
|
|
EnvVars: []string{"THREADS"},
|
|
|
|
Value: 4,
|
2023-03-18 22:59:06 +00:00
|
|
|
},
|
|
|
|
&cli.StringFlag{
|
2023-06-03 12:25:30 +00:00
|
|
|
Name: "models-path",
|
|
|
|
Usage: "Path containing models used for inferencing",
|
|
|
|
EnvVars: []string{"MODELS_PATH"},
|
|
|
|
Value: filepath.Join(path, "models"),
|
2023-03-18 22:59:06 +00:00
|
|
|
},
|
2023-05-27 07:26:33 +00:00
|
|
|
&cli.StringFlag{
|
2023-06-03 12:25:30 +00:00
|
|
|
Name: "preload-models",
|
|
|
|
Usage: "A List of models to apply in JSON at start",
|
|
|
|
EnvVars: []string{"PRELOAD_MODELS"},
|
2023-05-27 07:26:33 +00:00
|
|
|
},
|
|
|
|
&cli.StringFlag{
|
2023-06-03 12:25:30 +00:00
|
|
|
Name: "preload-models-config",
|
|
|
|
Usage: "A List of models to apply at startup. Path to a YAML config file",
|
|
|
|
EnvVars: []string{"PRELOAD_MODELS_CONFIG"},
|
2023-05-27 07:26:33 +00:00
|
|
|
},
|
2023-04-27 04:18:18 +00:00
|
|
|
&cli.StringFlag{
|
2023-06-03 12:25:30 +00:00
|
|
|
Name: "config-file",
|
|
|
|
Usage: "Config file",
|
|
|
|
EnvVars: []string{"CONFIG_FILE"},
|
2023-04-27 04:18:18 +00:00
|
|
|
},
|
2023-03-18 22:59:06 +00:00
|
|
|
&cli.StringFlag{
|
2023-06-03 12:25:30 +00:00
|
|
|
Name: "address",
|
|
|
|
Usage: "Bind address for the API server.",
|
|
|
|
EnvVars: []string{"ADDRESS"},
|
|
|
|
Value: ":8080",
|
2023-04-19 16:43:10 +00:00
|
|
|
},
|
2023-05-16 17:32:53 +00:00
|
|
|
&cli.StringFlag{
|
2023-06-03 12:25:30 +00:00
|
|
|
Name: "image-path",
|
|
|
|
Usage: "Image directory",
|
|
|
|
EnvVars: []string{"IMAGE_PATH"},
|
|
|
|
Value: "",
|
2023-05-16 17:32:53 +00:00
|
|
|
},
|
2023-06-01 21:38:52 +00:00
|
|
|
&cli.StringFlag{
|
2023-06-03 12:25:30 +00:00
|
|
|
Name: "backend-assets-path",
|
|
|
|
Usage: "Path used to extract libraries that are required by some of the backends in runtime.",
|
|
|
|
EnvVars: []string{"BACKEND_ASSETS_PATH"},
|
|
|
|
Value: "/tmp/localai/backend_data",
|
2023-06-01 21:38:52 +00:00
|
|
|
},
|
2023-04-19 16:43:10 +00:00
|
|
|
&cli.IntFlag{
|
2023-06-03 12:25:30 +00:00
|
|
|
Name: "context-size",
|
|
|
|
Usage: "Default context size of the model",
|
|
|
|
EnvVars: []string{"CONTEXT_SIZE"},
|
|
|
|
Value: 512,
|
2023-04-19 16:43:10 +00:00
|
|
|
},
|
2023-05-12 08:04:20 +00:00
|
|
|
&cli.IntFlag{
|
2023-06-03 12:25:30 +00:00
|
|
|
Name: "upload-limit",
|
|
|
|
Usage: "Default upload-limit. MB",
|
|
|
|
EnvVars: []string{"UPLOAD_LIMIT"},
|
|
|
|
Value: 15,
|
2023-05-12 08:04:20 +00:00
|
|
|
},
|
2023-04-19 16:43:10 +00:00
|
|
|
},
|
|
|
|
Description: `
|
|
|
|
LocalAI is a drop-in replacement OpenAI API which runs inference locally.
|
2023-03-18 22:59:06 +00:00
|
|
|
|
2023-04-19 16:43:10 +00:00
|
|
|
Some of the models compatible are:
|
|
|
|
- Vicuna
|
|
|
|
- Koala
|
|
|
|
- GPT4ALL
|
|
|
|
- GPT4ALL-J
|
2023-04-20 22:06:55 +00:00
|
|
|
- Cerebras
|
2023-04-19 16:43:10 +00:00
|
|
|
- Alpaca
|
2023-04-20 22:06:55 +00:00
|
|
|
- StableLM (ggml quantized)
|
2023-03-18 22:59:06 +00:00
|
|
|
|
2023-04-20 22:06:55 +00:00
|
|
|
It uses llama.cpp, ggml and gpt4all as backend with golang c bindings.
|
2023-03-18 22:59:06 +00:00
|
|
|
`,
|
2023-04-19 16:43:10 +00:00
|
|
|
UsageText: `local-ai [options]`,
|
2023-03-18 22:59:06 +00:00
|
|
|
Copyright: "go-skynet authors",
|
|
|
|
Action: func(ctx *cli.Context) error {
|
2023-05-03 21:03:31 +00:00
|
|
|
fmt.Printf("Starting LocalAI using %d threads, with models path: %s\n", ctx.Int("threads"), ctx.String("models-path"))
|
2023-05-30 10:00:30 +00:00
|
|
|
app, err := api.App(
|
2023-05-21 12:38:25 +00:00
|
|
|
api.WithConfigFile(ctx.String("config-file")),
|
2023-05-27 07:26:33 +00:00
|
|
|
api.WithJSONStringPreload(ctx.String("preload-models")),
|
|
|
|
api.WithYAMLConfigPreload(ctx.String("preload-models-config")),
|
2023-05-21 12:38:25 +00:00
|
|
|
api.WithModelLoader(model.NewModelLoader(ctx.String("models-path"))),
|
|
|
|
api.WithContextSize(ctx.Int("context-size")),
|
|
|
|
api.WithDebug(ctx.Bool("debug")),
|
|
|
|
api.WithImageDir(ctx.String("image-path")),
|
|
|
|
api.WithF16(ctx.Bool("f16")),
|
2023-05-22 22:06:13 +00:00
|
|
|
api.WithDisableMessage(false),
|
2023-05-21 12:38:25 +00:00
|
|
|
api.WithCors(ctx.Bool("cors")),
|
|
|
|
api.WithCorsAllowOrigins(ctx.String("cors-allow-origins")),
|
|
|
|
api.WithThreads(ctx.Int("threads")),
|
2023-06-01 21:38:52 +00:00
|
|
|
api.WithBackendAssets(backendAssets),
|
|
|
|
api.WithBackendAssetsOutput(ctx.String("backend-assets-path")),
|
2023-05-30 10:00:30 +00:00
|
|
|
api.WithUploadLimitMB(ctx.Int("upload-limit")))
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return app.Listen(ctx.String("address"))
|
2023-03-18 22:59:06 +00:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2023-04-19 16:43:10 +00:00
|
|
|
err = app.Run(os.Args)
|
2023-03-18 22:59:06 +00:00
|
|
|
if err != nil {
|
2023-04-20 16:33:02 +00:00
|
|
|
log.Error().Msgf("error: %s", err.Error())
|
2023-03-18 22:59:06 +00:00
|
|
|
os.Exit(1)
|
|
|
|
}
|
|
|
|
}
|