LocalAI/gallery/vicuna-chat.yaml

23 lines
381 B
YAML

---
name: "vicuna-chat"
description: |
Vicuna chat
license: "LLaMA"
config_file: |
backend: llama-cpp
context_size: 4096
roles:
user: "User: "
system: "System: "
assistant: "Assistant: "
f16: true
template:
completion: |
Complete the following sentence: {{.Input}}
chat: |
{{.Input}}
ASSISTANT: