2023-06-01 10:00:06 +00:00
|
|
|
package langchain
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
2024-05-04 15:56:12 +00:00
|
|
|
"fmt"
|
2023-06-01 10:00:06 +00:00
|
|
|
|
|
|
|
"github.com/tmc/langchaingo/llms"
|
|
|
|
"github.com/tmc/langchaingo/llms/huggingface"
|
|
|
|
)
|
|
|
|
|
|
|
|
type HuggingFace struct {
|
|
|
|
modelPath string
|
2024-05-04 15:56:12 +00:00
|
|
|
token string
|
2023-06-01 10:00:06 +00:00
|
|
|
}
|
|
|
|
|
2024-05-04 15:56:12 +00:00
|
|
|
func NewHuggingFace(repoId, token string) (*HuggingFace, error) {
|
|
|
|
if token == "" {
|
|
|
|
return nil, fmt.Errorf("no huggingface token provided")
|
|
|
|
}
|
2023-06-01 10:00:06 +00:00
|
|
|
return &HuggingFace{
|
|
|
|
modelPath: repoId,
|
2024-05-04 15:56:12 +00:00
|
|
|
token: token,
|
2023-06-01 10:00:06 +00:00
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *HuggingFace) PredictHuggingFace(text string, opts ...PredictOption) (*Predict, error) {
|
|
|
|
po := NewPredictOptions(opts...)
|
|
|
|
|
|
|
|
// Init client
|
2024-05-04 15:56:12 +00:00
|
|
|
llm, err := huggingface.New(huggingface.WithToken(s.token))
|
2023-06-01 10:00:06 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Convert from LocalAI to LangChainGo format of options
|
|
|
|
co := []llms.CallOption{
|
|
|
|
llms.WithModel(po.Model),
|
|
|
|
llms.WithMaxTokens(po.MaxTokens),
|
|
|
|
llms.WithTemperature(po.Temperature),
|
|
|
|
llms.WithStopWords(po.StopWords),
|
|
|
|
}
|
|
|
|
|
|
|
|
// Call Inference API
|
|
|
|
ctx := context.Background()
|
|
|
|
completion, err := llm.Call(ctx, text, co...)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return &Predict{
|
|
|
|
Completion: completion,
|
|
|
|
}, nil
|
|
|
|
}
|