mirror of
https://github.com/mudler/LocalAI.git
synced 2024-06-07 19:40:48 +00:00
fix: cut prompt from AutoGPTQ answers
Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
bb7772a364
commit
219751bb21
@ -61,7 +61,12 @@ class BackendServicer(backend_pb2_grpc.BackendServicer):
|
||||
top_p=top_p,
|
||||
repetition_penalty=penalty,
|
||||
)
|
||||
return backend_pb2.Result(message=bytes(pipeline(request.Prompt)[0]["generated_text"], encoding='utf-8'))
|
||||
t = pipeline(request.Prompt)[0]["generated_text"]
|
||||
# Remove prompt from response if present
|
||||
if request.Prompt in t:
|
||||
t = t.replace(request.Prompt, "")
|
||||
|
||||
return backend_pb2.Result(message=bytes(t, encoding='utf-8'))
|
||||
|
||||
def PredictStream(self, request, context):
|
||||
# Implement PredictStream RPC
|
||||
|
Loading…
Reference in New Issue
Block a user