mirror of
https://github.com/mudler/LocalAI.git
synced 2024-06-07 19:40:48 +00:00
ad0e30bca5
* refactor: move backends into the backends directory Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * refactor: move main close to implementation for every backend Signed-off-by: Ettore Di Giacinto <mudler@localai.io> --------- Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
43 lines
1013 B
Go
43 lines
1013 B
Go
package transformers
|
|
|
|
// This is a wrapper to statisfy the GRPC service interface
|
|
// It is meant to be used by the main executable that is the server for the specific backend type (falcon, gpt3, etc)
|
|
import (
|
|
"fmt"
|
|
|
|
"github.com/go-skynet/LocalAI/pkg/grpc/base"
|
|
pb "github.com/go-skynet/LocalAI/pkg/grpc/proto"
|
|
|
|
transformers "github.com/go-skynet/go-ggml-transformers.cpp"
|
|
)
|
|
|
|
type MPT struct {
|
|
base.SingleThread
|
|
|
|
mpt *transformers.MPT
|
|
}
|
|
|
|
func (llm *MPT) Load(opts *pb.ModelOptions) error {
|
|
model, err := transformers.NewMPT(opts.ModelFile)
|
|
llm.mpt = model
|
|
return err
|
|
}
|
|
|
|
func (llm *MPT) Predict(opts *pb.PredictOptions) (string, error) {
|
|
return llm.mpt.Predict(opts.Prompt, buildPredictOptions(opts)...)
|
|
}
|
|
|
|
// fallback to Predict
|
|
func (llm *MPT) PredictStream(opts *pb.PredictOptions, results chan string) error {
|
|
go func() {
|
|
res, err := llm.mpt.Predict(opts.Prompt, buildPredictOptions(opts)...)
|
|
|
|
if err != nil {
|
|
fmt.Println("err: ", err)
|
|
}
|
|
results <- res
|
|
close(results)
|
|
}()
|
|
return nil
|
|
}
|