diff --git a/api/api.go b/api/api.go index e4aac2fb..543e7566 100644 --- a/api/api.go +++ b/api/api.go @@ -51,6 +51,9 @@ func App(opts ...AppOption) (*fiber.App, error) { })) } + log.Info().Msgf("Starting LocalAI using %d threads, with models path: %s", options.threads, options.loader.ModelPath) + log.Info().Msgf("LocalAI version: %s", internal.PrintableVersion()) + cm := NewConfigMerger() if err := cm.LoadConfigs(options.loader.ModelPath); err != nil { log.Error().Msgf("error loading config files: %s", err.Error()) diff --git a/api/openai.go b/api/openai.go index f361b72e..3826a52f 100644 --- a/api/openai.go +++ b/api/openai.go @@ -214,7 +214,7 @@ func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error { if input.Stream { if len(config.PromptStrings) > 1 { - return errors.New("cannot handle more than 1 `PromptStrings` when `Stream`ing") + return errors.New("cannot handle more than 1 `PromptStrings` when Streaming") } predInput := config.PromptStrings[0] @@ -222,7 +222,9 @@ func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error { // A model can have a "file.bin.tmpl" file associated with a prompt template prefix templatedInput, err := o.loader.TemplatePrefix(templateFile, struct { Input string - }{Input: predInput}) + }{ + Input: predInput, + }) if err == nil { predInput = templatedInput log.Debug().Msgf("Template found, input modified to: %s", predInput) @@ -268,7 +270,9 @@ func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error { // A model can have a "file.bin.tmpl" file associated with a prompt template prefix templatedInput, err := o.loader.TemplatePrefix(templateFile, struct { Input string - }{Input: i}) + }{ + Input: i, + }) if err == nil { i = templatedInput log.Debug().Msgf("Template found, input modified to: %s", i) diff --git a/api/options.go b/api/options.go index b4669bcf..923288ac 100644 --- a/api/options.go +++ b/api/options.go @@ -3,9 +3,11 @@ package api import ( "context" "embed" + "encoding/json" "github.com/go-skynet/LocalAI/pkg/gallery" model "github.com/go-skynet/LocalAI/pkg/model" + "github.com/rs/zerolog/log" ) type Option struct { @@ -69,6 +71,20 @@ func WithBackendAssets(f embed.FS) AppOption { } } +func WithStringGalleries(galls string) AppOption { + return func(o *Option) { + if galls == "" { + log.Debug().Msgf("no galleries to load") + return + } + var galleries []gallery.Gallery + if err := json.Unmarshal([]byte(galls), &galleries); err != nil { + log.Error().Msgf("failed loading galleries: %s", err.Error()) + } + o.galleries = append(o.galleries, galleries...) + } +} + func WithGalleries(galleries []gallery.Gallery) AppOption { return func(o *Option) { o.galleries = append(o.galleries, galleries...) diff --git a/internal/version.go b/internal/version.go index 12246c24..86588b40 100644 --- a/internal/version.go +++ b/internal/version.go @@ -6,5 +6,5 @@ var Version = "" var Commit = "" func PrintableVersion() string { - return fmt.Sprintf("LocalAI %s (%s)", Version, Commit) + return fmt.Sprintf("%s (%s)", Version, Commit) } diff --git a/main.go b/main.go index 12b129c1..fc1dea09 100644 --- a/main.go +++ b/main.go @@ -1,14 +1,11 @@ package main import ( - "encoding/json" - "fmt" "os" "path/filepath" api "github.com/go-skynet/LocalAI/api" "github.com/go-skynet/LocalAI/internal" - "github.com/go-skynet/LocalAI/pkg/gallery" model "github.com/go-skynet/LocalAI/pkg/model" "github.com/rs/zerolog" "github.com/rs/zerolog/log" @@ -126,19 +123,13 @@ Some of the models compatible are: - Alpaca - StableLM (ggml quantized) -It uses llama.cpp, ggml and gpt4all as backend with golang c bindings. +For a list of compatible model, check out: https://localai.io/model-compatibility/index.html `, UsageText: `local-ai [options]`, - Copyright: "go-skynet authors", + Copyright: "Ettore Di Giacinto", Action: func(ctx *cli.Context) error { - fmt.Printf("Starting LocalAI using %d threads, with models path: %s\n", ctx.Int("threads"), ctx.String("models-path")) - galls := ctx.String("galleries") - var galleries []gallery.Gallery - err := json.Unmarshal([]byte(galls), &galleries) - fmt.Println(err) app, err := api.App( api.WithConfigFile(ctx.String("config-file")), - api.WithGalleries(galleries), api.WithJSONStringPreload(ctx.String("preload-models")), api.WithYAMLConfigPreload(ctx.String("preload-models-config")), api.WithModelLoader(model.NewModelLoader(ctx.String("models-path"))), @@ -147,6 +138,7 @@ It uses llama.cpp, ggml and gpt4all as backend with golang c bindings. api.WithImageDir(ctx.String("image-path")), api.WithAudioDir(ctx.String("audio-path")), api.WithF16(ctx.Bool("f16")), + api.WithStringGalleries(ctx.String("galleries")), api.WithDisableMessage(false), api.WithCors(ctx.Bool("cors")), api.WithCorsAllowOrigins(ctx.String("cors-allow-origins")), diff --git a/pkg/gallery/gallery.go b/pkg/gallery/gallery.go index aed52516..8e085929 100644 --- a/pkg/gallery/gallery.go +++ b/pkg/gallery/gallery.go @@ -4,6 +4,7 @@ import ( "fmt" "os" "path/filepath" + "strings" "github.com/go-skynet/LocalAI/pkg/utils" "github.com/imdario/mergo" @@ -17,6 +18,10 @@ type Gallery struct { // Installs a model from the gallery (galleryname@modelname) func InstallModelFromGallery(galleries []Gallery, name string, basePath string, req GalleryModel, downloadStatus func(string, string, string, float64)) error { + + // os.PathSeparator is not allowed in model names. Replace them with "__" to avoid conflicts with file paths. + name = strings.ReplaceAll(name, string(os.PathSeparator), "__") + models, err := AvailableGalleryModels(galleries, basePath) if err != nil { return err