LocalAI/main.go

90 lines
2.2 KiB
Go
Raw Normal View History

2023-03-18 22:59:06 +00:00
package main
import (
"os"
"runtime"
api "github.com/go-skynet/LocalAI/api"
model "github.com/go-skynet/LocalAI/pkg/model"
2023-04-20 16:33:02 +00:00
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
2023-04-11 21:43:43 +00:00
2023-03-18 22:59:06 +00:00
"github.com/urfave/cli/v2"
)
func main() {
2023-04-20 16:33:02 +00:00
log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr})
path, err := os.Getwd()
2023-03-18 22:59:06 +00:00
if err != nil {
2023-04-20 16:33:02 +00:00
log.Error().Msgf("error: %s", err.Error())
os.Exit(1)
2023-03-18 22:59:06 +00:00
}
app := &cli.App{
Name: "LocalAI",
Usage: "OpenAI compatible API for running LLaMA/GPT models locally on CPU with consumer grade hardware.",
Flags: []cli.Flag{
&cli.BoolFlag{
Name: "f16",
EnvVars: []string{"F16"},
},
2023-04-20 16:33:02 +00:00
&cli.BoolFlag{
Name: "debug",
EnvVars: []string{"DEBUG"},
},
&cli.IntFlag{
Name: "threads",
DefaultText: "Number of threads used for parallel computation. Usage of the number of physical cores in the system is suggested.",
EnvVars: []string{"THREADS"},
Value: runtime.NumCPU(),
2023-03-18 22:59:06 +00:00
},
&cli.StringFlag{
Name: "models-path",
DefaultText: "Path containing models used for inferencing",
EnvVars: []string{"MODELS_PATH"},
Value: path,
2023-03-18 22:59:06 +00:00
},
&cli.StringFlag{
Name: "address",
DefaultText: "Bind address for the API server.",
EnvVars: []string{"ADDRESS"},
Value: ":8080",
},
&cli.IntFlag{
Name: "context-size",
DefaultText: "Default context size of the model",
EnvVars: []string{"CONTEXT_SIZE"},
Value: 512,
},
},
Description: `
LocalAI is a drop-in replacement OpenAI API which runs inference locally.
2023-03-18 22:59:06 +00:00
Some of the models compatible are:
- Vicuna
- Koala
- GPT4ALL
- GPT4ALL-J
- Alpaca
2023-03-18 22:59:06 +00:00
It uses llama.cpp and gpt4all as backend, supporting all the models supported by both.
2023-03-18 22:59:06 +00:00
`,
UsageText: `local-ai [options]`,
2023-03-18 22:59:06 +00:00
Copyright: "go-skynet authors",
Action: func(ctx *cli.Context) error {
2023-04-20 16:33:02 +00:00
zerolog.SetGlobalLevel(zerolog.InfoLevel)
if ctx.Bool("debug") {
zerolog.SetGlobalLevel(zerolog.DebugLevel)
}
return api.Start(model.NewModelLoader(ctx.String("models-path")), ctx.String("address"), ctx.Int("threads"), ctx.Int("context-size"), ctx.Bool("f16"))
2023-03-18 22:59:06 +00:00
},
}
err = app.Run(os.Args)
2023-03-18 22:59:06 +00:00
if err != nil {
2023-04-20 16:33:02 +00:00
log.Error().Msgf("error: %s", err.Error())
2023-03-18 22:59:06 +00:00
os.Exit(1)
}
}