mirror of
https://github.com/mudler/LocalAI.git
synced 2024-06-07 19:40:48 +00:00
939411300a
* Bump vLLM version to 0.3.2 * Add vLLM model loading options * Remove transformers-exllama * Fix install exllama
27 lines
650 B
Bash
Executable File
27 lines
650 B
Bash
Executable File
#!/bin/bash
|
|
set -ex
|
|
|
|
export PATH=$PATH:/opt/conda/bin
|
|
|
|
# Check if environment exist
|
|
conda_env_exists(){
|
|
! conda list --name "${@}" >/dev/null 2>/dev/null
|
|
}
|
|
|
|
if conda_env_exists "exllama" ; then
|
|
echo "Creating virtual environment..."
|
|
conda env create --name exllama --file $1
|
|
echo "Virtual environment created."
|
|
else
|
|
echo "Virtual environment already exists."
|
|
fi
|
|
|
|
source activate exllama
|
|
|
|
git clone https://github.com/turboderp/exllama $CONDA_PREFIX/exllama && pushd $CONDA_PREFIX/exllama && pip install -r requirements.txt && popd
|
|
|
|
cp -rfv $CONDA_PREFIX/exllama/* ./
|
|
|
|
if [ "$PIP_CACHE_PURGE" = true ] ; then
|
|
pip cache purge
|
|
fi |