LocalAI/embedded/models/codellama-7b.yaml

14 lines
367 B
YAML
Raw Normal View History

name: codellama-7b
backend: transformers
parameters:
model: codellama/CodeLlama-7b-hf
temperature: 0.2
top_k: 40
seed: -1
top_p: 0.95
usage: |
curl http://localhost:8080/v1/completions -H "Content-Type: application/json" -d '{
"model": "codellama-7b",
"prompt": "import socket\n\ndef ping_exponential_backoff(host: str):"
}'