diff --git a/core/http/elements/gallery.go b/core/http/elements/gallery.go index a9e3bf49..16a74553 100644 --- a/core/http/elements/gallery.go +++ b/core/http/elements/gallery.go @@ -6,6 +6,7 @@ import ( "github.com/chasefleming/elem-go" "github.com/chasefleming/elem-go/attrs" + "github.com/go-skynet/LocalAI/core/services" "github.com/go-skynet/LocalAI/pkg/gallery" "github.com/go-skynet/LocalAI/pkg/xsync" ) @@ -72,12 +73,13 @@ func StartProgressBar(uid, progress, text string) string { if progress == "" { progress = "0" } - return elem.Div(attrs.Props{ - "hx-trigger": "done", - "hx-get": "/browse/job/" + uid, - "hx-swap": "innerHTML", - "hx-target": "this", - }, + return elem.Div( + attrs.Props{ + "hx-trigger": "done", + "hx-get": "/browse/job/" + uid, + "hx-swap": "innerHTML", + "hx-target": "this", + }, elem.H3( attrs.Props{ "role": "status", @@ -223,7 +225,7 @@ func deleteButton(modelName string) elem.Node { ) } -func ListModels(models []*gallery.GalleryModel, installing *xsync.SyncedMap[string, string]) string { +func ListModels(models []*gallery.GalleryModel, processing *xsync.SyncedMap[string, string], galleryService *services.GalleryService) string { //StartProgressBar(uid, "0") modelsElements := []elem.Node{} // span := func(s string) elem.Node { @@ -258,7 +260,15 @@ func ListModels(models []*gallery.GalleryModel, installing *xsync.SyncedMap[stri actionDiv := func(m *gallery.GalleryModel) elem.Node { galleryID := fmt.Sprintf("%s@%s", m.Gallery.Name, m.Name) - currentlyInstalling := installing.Exists(galleryID) + currentlyProcessing := processing.Exists(galleryID) + isDeletionOp := false + if currentlyProcessing { + status := galleryService.GetStatus(galleryID) + if status != nil && status.Deletion { + isDeletionOp = true + } + // if status == nil : "Waiting" + } nodes := []elem.Node{ cardSpan("Repository: "+m.Gallery.Name, "fa-brands fa-git-alt"), @@ -292,6 +302,11 @@ func ListModels(models []*gallery.GalleryModel, installing *xsync.SyncedMap[stri ) } + progressMessage := "Installation" + if isDeletionOp { + progressMessage = "Deletion" + } + return elem.Div( attrs.Props{ "class": "px-6 pt-4 pb-2", @@ -303,9 +318,9 @@ func ListModels(models []*gallery.GalleryModel, installing *xsync.SyncedMap[stri nodes..., ), elem.If( - currentlyInstalling, + currentlyProcessing, elem.Node( // If currently installing, show progress bar - elem.Raw(StartProgressBar(installing.Get(galleryID), "0", "Installing")), + elem.Raw(StartProgressBar(processing.Get(galleryID), "0", progressMessage)), ), // Otherwise, show install button (if not installed) or display "Installed" elem.If(m.Installed, elem.Node(elem.Div( @@ -331,12 +346,6 @@ func ListModels(models []*gallery.GalleryModel, installing *xsync.SyncedMap[stri "class": "flex justify-center items-center", } - _, trustRemoteCodeExists := m.Overrides["trust_remote_code"] - if trustRemoteCodeExists { - // should this be checking for trust_remote_code: false? I don't think we ever use that value. - divProperties["class"] = divProperties["class"] + " remote-code" - } - elems = append(elems, elem.Div(divProperties, @@ -352,6 +361,19 @@ func ListModels(models []*gallery.GalleryModel, installing *xsync.SyncedMap[stri ), )) + _, trustRemoteCodeExists := m.Overrides["trust_remote_code"] + if trustRemoteCodeExists { + elems = append(elems, elem.Div( + attrs.Props{ + "class": "flex justify-center items-center bg-red-500 text-white p-2 rounded-lg mt-2", + }, + elem.I(attrs.Props{ + "class": "fa-solid fa-circle-exclamation pr-2", + }), + elem.Text("Attention: Trust Remote Code is required for this model"), + )) + } + elems = append(elems, descriptionDiv(m), actionDiv(m)) modelsElements = append(modelsElements, elem.Div( diff --git a/core/http/endpoints/localai/welcome.go b/core/http/endpoints/localai/welcome.go index 3b36aaf6..9ad0ab5e 100644 --- a/core/http/endpoints/localai/welcome.go +++ b/core/http/endpoints/localai/welcome.go @@ -9,7 +9,7 @@ import ( ) func WelcomeEndpoint(appConfig *config.ApplicationConfig, - cl *config.BackendConfigLoader, ml *model.ModelLoader) func(*fiber.Ctx) error { + cl *config.BackendConfigLoader, ml *model.ModelLoader, modelStatus func() (map[string]string, map[string]string)) func(*fiber.Ctx) error { return func(c *fiber.Ctx) error { models, _ := ml.ListModels() backendConfigs := cl.GetAllBackendConfigs() @@ -24,6 +24,9 @@ func WelcomeEndpoint(appConfig *config.ApplicationConfig, galleryConfigs[m.Name] = cfg } + // Get model statuses to display in the UI the operation in progress + processingModels, taskTypes := modelStatus() + summary := fiber.Map{ "Title": "LocalAI API - " + internal.PrintableVersion(), "Version": internal.PrintableVersion(), @@ -31,6 +34,8 @@ func WelcomeEndpoint(appConfig *config.ApplicationConfig, "ModelsConfig": backendConfigs, "GalleryConfig": galleryConfigs, "ApplicationConfig": appConfig, + "ProcessingModels": processingModels, + "TaskTypes": taskTypes, } if string(c.Context().Request.Header.ContentType()) == "application/json" || len(c.Accepts("html")) == 0 { diff --git a/core/http/endpoints/openai/request.go b/core/http/endpoints/openai/request.go index 9a107bab..d25e05b5 100644 --- a/core/http/endpoints/openai/request.go +++ b/core/http/endpoints/openai/request.go @@ -63,10 +63,14 @@ func getBase64Image(s string) (string, error) { return encoded, nil } - // if the string instead is prefixed with "data:image/jpeg;base64,", drop it - if strings.HasPrefix(s, "data:image/jpeg;base64,") { - return strings.ReplaceAll(s, "data:image/jpeg;base64,", ""), nil + // if the string instead is prefixed with "data:image/...;base64,", drop it + dropPrefix := []string{"data:image/jpeg;base64,", "data:image/png;base64,"} + for _, prefix := range dropPrefix { + if strings.HasPrefix(s, prefix) { + return strings.ReplaceAll(s, prefix, ""), nil + } } + return "", fmt.Errorf("not valid string") } @@ -181,7 +185,7 @@ func updateRequestConfig(config *config.BackendConfig, input *schema.OpenAIReque input.Messages[i].StringContent = fmt.Sprintf("[img-%d]", index) + input.Messages[i].StringContent index++ } else { - fmt.Print("Failed encoding image", err) + log.Error().Msgf("Failed encoding image: %s", err) } } } diff --git a/core/http/routes/ui.go b/core/http/routes/ui.go index 455647e4..d376d10e 100644 --- a/core/http/routes/ui.go +++ b/core/http/routes/ui.go @@ -26,13 +26,35 @@ func RegisterUIRoutes(app *fiber.App, galleryService *services.GalleryService, auth func(*fiber.Ctx) error) { - app.Get("/", auth, localai.WelcomeEndpoint(appConfig, cl, ml)) - // keeps the state of models that are being installed from the UI - var installingModels = xsync.NewSyncedMap[string, string]() + var processingModels = xsync.NewSyncedMap[string, string]() + + // modelStatus returns the current status of the models being processed (installation or deletion) + // it is called asynchonously from the UI + modelStatus := func() (map[string]string, map[string]string) { + processingModelsData := processingModels.Map() + + taskTypes := map[string]string{} + + for k, v := range processingModelsData { + status := galleryService.GetStatus(v) + taskTypes[k] = "Installation" + if status != nil && status.Deletion { + taskTypes[k] = "Deletion" + } else if status == nil { + taskTypes[k] = "Waiting" + } + } + + return processingModelsData, taskTypes + } + + app.Get("/", auth, localai.WelcomeEndpoint(appConfig, cl, ml, modelStatus)) // Show the Models page (all models) app.Get("/browse", auth, func(c *fiber.Ctx) error { + term := c.Query("term") + models, _ := gallery.AvailableGalleryModels(appConfig.Galleries, appConfig.ModelPath) // Get all available tags @@ -47,12 +69,22 @@ func RegisterUIRoutes(app *fiber.App, tags = append(tags, t) } sort.Strings(tags) + + if term != "" { + models = gallery.GalleryModels(models).Search(term) + } + + // Get model statuses + processingModelsData, taskTypes := modelStatus() + summary := fiber.Map{ - "Title": "LocalAI - Models", - "Version": internal.PrintableVersion(), - "Models": template.HTML(elements.ListModels(models, installingModels)), - "Repositories": appConfig.Galleries, - "AllTags": tags, + "Title": "LocalAI - Models", + "Version": internal.PrintableVersion(), + "Models": template.HTML(elements.ListModels(models, processingModels, galleryService)), + "Repositories": appConfig.Galleries, + "AllTags": tags, + "ProcessingModels": processingModelsData, + "TaskTypes": taskTypes, // "ApplicationConfig": appConfig, } @@ -72,17 +104,7 @@ func RegisterUIRoutes(app *fiber.App, models, _ := gallery.AvailableGalleryModels(appConfig.Galleries, appConfig.ModelPath) - filteredModels := []*gallery.GalleryModel{} - for _, m := range models { - if strings.Contains(m.Name, form.Search) || - strings.Contains(m.Description, form.Search) || - strings.Contains(m.Gallery.Name, form.Search) || - strings.Contains(strings.Join(m.Tags, ","), form.Search) { - filteredModels = append(filteredModels, m) - } - } - - return c.SendString(elements.ListModels(filteredModels, installingModels)) + return c.SendString(elements.ListModels(gallery.GalleryModels(models).Search(form.Search), processingModels, galleryService)) }) /* @@ -103,7 +125,7 @@ func RegisterUIRoutes(app *fiber.App, uid := id.String() - installingModels.Set(galleryID, uid) + processingModels.Set(galleryID, uid) op := gallery.GalleryOp{ Id: uid, @@ -129,7 +151,7 @@ func RegisterUIRoutes(app *fiber.App, uid := id.String() - installingModels.Set(galleryID, uid) + processingModels.Set(galleryID, uid) op := gallery.GalleryOp{ Id: uid, @@ -174,10 +196,10 @@ func RegisterUIRoutes(app *fiber.App, status := galleryService.GetStatus(c.Params("uid")) galleryID := "" - for _, k := range installingModels.Keys() { - if installingModels.Get(k) == c.Params("uid") { + for _, k := range processingModels.Keys() { + if processingModels.Get(k) == c.Params("uid") { galleryID = k - installingModels.Delete(k) + processingModels.Delete(k) } } diff --git a/core/http/static/chat.js b/core/http/static/chat.js index db7e7856..7427ddee 100644 --- a/core/http/static/chat.js +++ b/core/http/static/chat.js @@ -26,25 +26,48 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ + function submitKey(event) { event.preventDefault(); localStorage.setItem("key", document.getElementById("apiKey").value); document.getElementById("apiKey").blur(); - } +} + +function submitSystemPrompt(event) { + event.preventDefault(); + localStorage.setItem("system_prompt", document.getElementById("systemPrompt").value); + document.getElementById("systemPrompt").blur(); +} +var image = ""; + function submitPrompt(event) { event.preventDefault(); const input = document.getElementById("input").value; - Alpine.store("chat").add("user", input); + Alpine.store("chat").add("user", input, image); document.getElementById("input").value = ""; const key = localStorage.getItem("key"); + const systemPrompt = localStorage.getItem("system_prompt"); - promptGPT(key, input); + promptGPT(systemPrompt, key, input); +} + +function readInputImage() { + + if (!this.files || !this.files[0]) return; + + const FR = new FileReader(); + + FR.addEventListener("load", function(evt) { + image = evt.target.result; + }); + + FR.readAsDataURL(this.files[0]); } - async function promptGPT(key, input) { + async function promptGPT(systemPrompt, key, input) { const model = document.getElementById("chat-model").value; // Set class "loader" to the element with "loader" id //document.getElementById("loader").classList.add("loader"); @@ -53,6 +76,72 @@ function submitPrompt(event) { document.getElementById("input").disabled = true; document.getElementById('messages').scrollIntoView(false) + messages = Alpine.store("chat").messages(); + + // if systemPrompt isn't empty, push it at the start of messages + if (systemPrompt) { + messages.unshift({ + role: "system", + content: systemPrompt + }); + } + + // loop all messages, and check if there are images. If there are, we need to change the content field + messages.forEach((message) => { + if (message.image) { + // The content field now becomes an array + message.content = [ + { + "type": "text", + "text": message.content + } + ] + message.content.push( + { + "type": "image_url", + "image_url": { + "url": message.image, + } + } + ); + + // remove the image field + delete message.image; + } + }); + + // reset the form and the image + image = ""; + document.getElementById("input_image").value = null; + document.getElementById("fileName").innerHTML = ""; + + // if (image) { + // // take the last element content's and add the image + // last_message = messages[messages.length - 1] + // // The content field now becomes an array + // last_message.content = [ + // { + // "type": "text", + // "text": last_message.content + // } + // ] + // last_message.content.push( + // { + // "type": "image_url", + // "image_url": { + // "url": image, + // } + // } + // ); + // // and we replace it in the messages array + // messages[messages.length - 1] = last_message + + // // reset the form and the image + // image = ""; + // document.getElementById("input_image").value = null; + // document.getElementById("fileName").innerHTML = ""; + // } + // Source: https://stackoverflow.com/a/75751803/11386095 const response = await fetch("/v1/chat/completions", { method: "POST", @@ -62,7 +151,7 @@ function submitPrompt(event) { }, body: JSON.stringify({ model: model, - messages: Alpine.store("chat").messages(), + messages: messages, stream: true, }), }); @@ -122,12 +211,24 @@ function submitPrompt(event) { } document.getElementById("key").addEventListener("submit", submitKey); + document.getElementById("system_prompt").addEventListener("submit", submitSystemPrompt); + document.getElementById("prompt").addEventListener("submit", submitPrompt); document.getElementById("input").focus(); + document.getElementById("input_image").addEventListener("change", readInputImage); - const storeKey = localStorage.getItem("key"); + storeKey = localStorage.getItem("key"); if (storeKey) { document.getElementById("apiKey").value = storeKey; + } else { + document.getElementById("apiKey").value = null; + } + + storesystemPrompt = localStorage.getItem("system_prompt"); + if (storesystemPrompt) { + document.getElementById("systemPrompt").value = storesystemPrompt; + } else { + document.getElementById("systemPrompt").value = null; } marked.setOptions({ diff --git a/core/http/static/general.css b/core/http/static/general.css index 09f6dc7e..fd1161e8 100644 --- a/core/http/static/general.css +++ b/core/http/static/general.css @@ -72,16 +72,6 @@ body { margin: 0.5rem; } -.remote-code { /* Attempt to make this stand out */ - outline-style: solid; - outline-color: red; - outline-width: 0.33rem; -} - -.remote-code::after { - content: "\0026A0 Trust Remote Code Required \0026A0" -} - ul { list-style-type: disc; /* Adds bullet points */ padding-left: 1.25rem; /* Indents the list from the left margin */ diff --git a/core/http/views/chat.html b/core/http/views/chat.html index eebf9083..190cb877 100644 --- a/core/http/views/chat.html +++ b/core/http/views/chat.html @@ -62,17 +62,34 @@ SOFTWARE. +
+
+
+ +
@@ -111,15 +128,19 @@ SOFTWARE. + -
+
+
- + + +
@@ -146,7 +170,7 @@ SOFTWARE. clear() { this.history.length = 0; }, - add(role, content) { + add(role, content, image) { const N = this.history.length - 1; if (this.history.length && this.history[N].role === role) { this.history[N].content += content; @@ -167,6 +191,7 @@ SOFTWARE. role: role, content: content, html: c, + image: image, }); } @@ -191,6 +216,7 @@ SOFTWARE. return { role: message.role, content: message.content, + image: message.image, }; }); }, diff --git a/core/http/views/index.html b/core/http/views/index.html index f8cae175..66de37fa 100644 --- a/core/http/views/index.html +++ b/core/http/views/index.html @@ -10,38 +10,76 @@

Welcome to your LocalAI instance!

-
- -

The FOSS alternative to OpenAI, Claude, ...

Documentation
-
+
+ + + {{ if .ProcessingModels }} +

Operations in progress

+ {{end}} + {{$taskType:=.TaskTypes}} + {{ range $key,$value:=.ProcessingModels }} + {{ $op := index $taskType $key}} + {{$parts := split "@" $key}} +
+
+ {{$parts._1}} (from the '{{$parts._0}}' repository) +
+
+

{{$op}} +

+
+
+ {{ end }} + + {{ if eq (len .ModelsConfig) 0 }}

Ouch! seems you don't have any models installed!

..install something from the 🖼️ Gallery or check the Getting started documentation

{{ else }}

Installed models

We have {{len .ModelsConfig}} pre-loaded models available.

-
    + + + + + + + + + + {{$galleryConfig:=.GalleryConfig}} + {{$noicon:="https://upload.wikimedia.org/wikipedia/commons/6/65/No-Image-Placeholder.svg"}} {{ range .ModelsConfig }} {{ $cfg:= index $galleryConfig .Name}} -
  • -
    - +
  • + + + + {{ end }} - + +
    Model NameBackendActions
    + {{ with $cfg }} - -

    {{.Name}}

    + {{ else}} + + {{ end }} +
    +

    {{.Name}}

    +
    {{ if .Backend }} @@ -52,16 +90,20 @@ auto {{ end }} + - - - +
    {{ end }} + + +
diff --git a/core/http/views/models.html b/core/http/views/models.html index 17561594..fe5af2d5 100644 --- a/core/http/views/models.html +++ b/core/http/views/models.html @@ -63,8 +63,33 @@ {{ end }}
+ - + {{ if .ProcessingModels }} +

Operations in progress

+ {{end}} + {{$taskType:=.TaskTypes}} + {{ range $key,$value:=.ProcessingModels }} + {{ $op := index $taskType $key}} + {{$parts := split "@" $key}} +
+
+ {{$parts._1}} (from the '{{$parts._0}}' repository) +
+
+

{{$op}} +

+
+
+ {{ end }} + + +