mirror of
https://github.com/mudler/LocalAI.git
synced 2024-06-07 19:40:48 +00:00
939411300a
* Bump vLLM version to 0.3.2 * Add vLLM model loading options * Remove transformers-exllama * Fix install exllama
12 lines
180 B
Makefile
12 lines
180 B
Makefile
export CONDA_ENV_PATH = "exllama.yml"
|
|
|
|
.PHONY: exllama
|
|
exllama:
|
|
bash install.sh ${CONDA_ENV_PATH}
|
|
|
|
.PHONY: run
|
|
run:
|
|
@echo "Running exllama..."
|
|
bash run.sh
|
|
@echo "exllama run."
|