LocalAI/examples/configurations/phi-2.yaml
2024-01-01 14:39:42 +01:00

17 lines
293 B
YAML

name: phi-2
context_size: 2048
f16: true
gpu_layers: 90
mmap: true
trimsuffix:
- "\n"
parameters:
model: huggingface://TheBloke/phi-2-GGUF/phi-2.Q8_0.gguf
temperature: 0.2
top_k: 40
top_p: 0.95
template:
chat: &template |
Instruct: {{.Input}}
Output:
completion: *template