LocalAI/gallery/moondream.yaml

19 lines
347 B
YAML

---
name: "moondream2"
config_file: |
backend: llama-cpp
context_size: 2046
roles:
user: "\nQuestion: "
system: "\nSystem: "
assistant: "\nAnswer: "
stopwords:
- Question:
f16: true
template:
completion: |
Complete the following sentence: {{.Input}}
chat: "{{.Input}}\nAnswer: "