mirror of
https://github.com/mudler/LocalAI.git
synced 2024-06-07 19:40:48 +00:00
a1aa6cb7c2
Certain backends as vall-e-x are not meant to be used as a library, so we want to start the process in the same folder where the backend and all the assets are fixes #1394
17 lines
341 B
Bash
Executable File
17 lines
341 B
Bash
Executable File
#!/bin/bash
|
|
|
|
##
|
|
## A bash script wrapper that runs the exllama server with conda
|
|
|
|
export PATH=$PATH:/opt/conda/bin
|
|
|
|
# Activate conda environment
|
|
source activate exllama2
|
|
|
|
# get the directory where the bash script is located
|
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
|
|
|
cd $DIR
|
|
|
|
python $DIR/exllama2_backend.py $@
|