mirror of
https://github.com/mudler/LocalAI.git
synced 2024-06-07 19:40:48 +00:00
939411300a
* Bump vLLM version to 0.3.2 * Add vLLM model loading options * Remove transformers-exllama * Fix install exllama
16 lines
330 B
Bash
Executable File
16 lines
330 B
Bash
Executable File
#!/bin/bash
|
|
|
|
##
|
|
## A bash script wrapper that runs the exllama server with conda
|
|
export PATH=$PATH:/opt/conda/bin
|
|
|
|
# Activate conda environment
|
|
source activate exllama
|
|
|
|
# get the directory where the bash script is located
|
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
|
|
|
cd $DIR
|
|
|
|
python $DIR/exllama.py $@
|