fix(vall-e-x): correctly install reqs in environment (#1377)

This commit is contained in:
Ettore Di Giacinto 2023-12-03 21:16:36 +01:00 committed by GitHub
parent 3d71bc9b64
commit 238fec244a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 26 additions and 7 deletions

View File

@ -185,12 +185,6 @@ RUN if [ "${IMAGE_TYPE}" = "extras" ]; then \
PATH=$PATH:/opt/conda/bin make -C backend/python/petals \
; fi
# Copy VALLE-X as it's not a real "lib"
# TODO: this is wrong - we should copy the lib into the conda env path
RUN if [ -d /usr/lib/vall-e-x ]; then \
cp -rfv /usr/lib/vall-e-x/* ./ ; \
fi
# we also copy exllama libs over to resolve exllama import error
# TODO: check if this is still needed
RUN if [ -d /usr/local/lib/python3.9/dist-packages/exllama ]; then \

View File

@ -3,6 +3,7 @@ ttsvalle:
@echo "Creating virtual environment..."
@conda env create --name ttsvalle --file ttsvalle.yml
@echo "Virtual environment created."
bash install.sh
.PHONY: run
run:

View File

@ -0,0 +1,14 @@
#!/bin/bash
##
## A bash script installs the required dependencies of VALL-E-X and prepares the environment
export PATH=$PATH:/opt/conda/bin
# Activate conda environment
source activate ttsvalle
echo $CONDA_PREFIX
git clone https://github.com/Plachtaa/VALL-E-X.git $CONDA_PREFIX/vall-e-x && pushd $CONDA_PREFIX/vall-e-x && pip install -r requirements.txt && popd
cp -rfv $CONDA_PREFIX/vall-e-x/* ./

View File

@ -10,4 +10,4 @@ source activate ttsvalle
# get the directory where the bash script is located
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
python $DIR/ttvalle.py $@
python $DIR/ttsvalle.py $@

View File

@ -354,3 +354,13 @@ docker run --env REBUILD=true localai
# Option 2: set within an env file
docker run --env-file .env localai
```
### Build only a single backend
You can control the backends that are built by setting the `GRPC_BACKENDS` environment variable. For instance, to build only the `llama-cpp` backend only:
```bash
make GRPC_BACKENDS=backend-assets/grpc/llama-cpp build
```
By default, all the backends are built.