deps(conda): use transformers-env with vllm,exllama(2) (#1554)

* deps(conda): use transformers with vllm

* join vllm, exllama, exllama2, split petals
This commit is contained in:
Ettore Di Giacinto 2024-01-06 13:32:28 +01:00 committed by GitHub
parent ce724a7e55
commit 949da7792d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 40 additions and 131 deletions

View File

@ -45,7 +45,7 @@ dependencies:
- fsspec==2023.6.0
- funcy==2.0
- grpcio==1.59.0
- huggingface-hub==0.16.4
- huggingface-hub
- idna==3.4
- jinja2==3.1.2
- jmespath==1.0.1
@ -70,7 +70,6 @@ dependencies:
- packaging==23.2
- pandas
- peft==0.5.0
- git+https://github.com/bigscience-workshop/petals
- protobuf==4.24.4
- psutil==5.9.5
- pyarrow==13.0.0
@ -85,12 +84,10 @@ dependencies:
- scipy==1.11.3
- six==1.16.0
- sympy==1.12
- tokenizers==0.14.0
- torch==2.1.0
- torchaudio==2.1.0
- tokenizers
- torch==2.1.2
- torchaudio==2.1.2
- tqdm==4.66.1
- transformers==4.34.0
- TTS==0.22.0
- triton==2.1.0
- typing-extensions==4.8.0
- tzdata==2023.3
@ -114,4 +111,7 @@ dependencies:
- sudachipy
- sudachidict_core
- vocos
- vllm==0.2.7
- transformers>=4.36.0 # Required for Mixtral.
- xformers==0.0.23.post1
prefix: /opt/conda/envs/transformers

View File

@ -46,7 +46,7 @@ dependencies:
- fsspec==2023.6.0
- funcy==2.0
- grpcio==1.59.0
- huggingface-hub==0.16.4
- huggingface-hub
- idna==3.4
- jinja2==3.1.2
- jmespath==1.0.1
@ -59,7 +59,6 @@ dependencies:
- packaging==23.2
- pandas
- peft==0.5.0
- git+https://github.com/bigscience-workshop/petals
- protobuf==4.24.4
- psutil==5.9.5
- pyarrow==13.0.0
@ -74,11 +73,10 @@ dependencies:
- scipy==1.11.3
- six==1.16.0
- sympy==1.12
- tokenizers==0.14.0
- torch==2.1.0
- torchaudio==2.1.0
- tokenizers
- torch==2.1.2
- torchaudio==2.1.2
- tqdm==4.66.1
- transformers==4.34.0
- triton==2.1.0
- typing-extensions==4.8.0
- tzdata==2023.3
@ -102,4 +100,7 @@ dependencies:
- sudachipy
- sudachidict_core
- vocos
- vllm==0.2.7
- transformers>=4.36.0 # Required for Mixtral.
- xformers==0.0.23.post1
prefix: /opt/conda/envs/transformers

View File

@ -1,8 +1,6 @@
.PHONY: exllama
exllama:
@echo "Creating virtual environment..."
@conda env create --name exllama --file exllama.yml
@echo "Virtual environment created."
$(MAKE) -C ../common-env/transformers
bash install.sh
.PHONY: run

View File

@ -5,7 +5,7 @@
export PATH=$PATH:/opt/conda/bin
# Activate conda environment
source activate exllama
source activate transformers
echo $CONDA_PREFIX

View File

@ -6,7 +6,7 @@
export PATH=$PATH:/opt/conda/bin
# Activate conda environment
source activate exllama
source activate transformers
# get the directory where the bash script is located
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"

View File

@ -1,8 +1,6 @@
.PHONY: exllama2
exllama2:
@echo "Creating virtual environment..."
@conda env create --name exllama2 --file exllama2.yml
@echo "Virtual environment created."
$(MAKE) -C ../common-env/transformers
bash install.sh
.PHONY: run

View File

@ -5,7 +5,7 @@
export PATH=$PATH:/opt/conda/bin
# Activate conda environment
source activate exllama2
source activate transformers
echo $CONDA_PREFIX

View File

@ -6,7 +6,7 @@
export PATH=$PATH:/opt/conda/bin
# Activate conda environment
source activate exllama2
source activate transformers
# get the directory where the bash script is located
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"

View File

@ -1,6 +1,8 @@
.PHONY: petals
petals:
$(MAKE) -C ../common-env/transformers
@echo "Creating virtual environment..."
@conda env create --name petals --file petals.yml
@echo "Virtual environment created."
.PHONY: run
run:

View File

@ -5,14 +5,16 @@
export PATH=$PATH:/opt/conda/bin
CONDA_ENV=petals
# Activate conda environment
# if source is available use it, or use conda
#
if [ -f /opt/conda/bin/activate ]; then
source activate transformers
source activate $CONDA_ENV
else
eval "$(conda shell.bash hook)"
conda activate transformers
conda activate $CONDA_ENV
fi
# get the directory where the bash script is located

View File

@ -3,7 +3,16 @@
## A bash script wrapper that runs the transformers server with conda
# Activate conda environment
source activate transformers
CONDA_ENV=petals
# Activate conda environment
# if source is available use it, or use conda
#
if [ -f /opt/conda/bin/activate ]; then
source activate $CONDA_ENV
else
eval "$(conda shell.bash hook)"
conda activate $CONDA_ENV
fi
# get the directory where the bash script is located
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"

View File

@ -1,8 +1,6 @@
.PHONY: vllm
vllm:
@echo "Creating virtual environment..."
@conda env create --name vllm --file vllm.yml
@echo "Virtual environment created."
$(MAKE) -C ../common-env/transformers
.PHONY: run
run:

View File

@ -6,7 +6,7 @@
export PATH=$PATH:/opt/conda/bin
# Activate conda environment
source activate vllm
source activate transformers
# get the directory where the bash script is located
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"

View File

@ -3,7 +3,7 @@
## A bash script wrapper that runs the transformers server with conda
# Activate conda environment
source activate vllm
source activate transformers
# get the directory where the bash script is located
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"

View File

@ -1,99 +0,0 @@
name: vllm
channels:
- defaults
dependencies:
- _libgcc_mutex=0.1=main
- _openmp_mutex=5.1=1_gnu
- bzip2=1.0.8=h7b6447c_0
- ca-certificates=2023.08.22=h06a4308_0
- ld_impl_linux-64=2.38=h1181459_1
- libffi=3.4.4=h6a678d5_0
- libgcc-ng=11.2.0=h1234567_1
- libgomp=11.2.0=h1234567_1
- libstdcxx-ng=11.2.0=h1234567_1
- libuuid=1.41.5=h5eee18b_0
- ncurses=6.4=h6a678d5_0
- openssl=3.0.11=h7f8727e_2
- pip=23.2.1=py311h06a4308_0
- python=3.11.5=h955ad1f_0
- readline=8.2=h5eee18b_0
- setuptools=68.0.0=py311h06a4308_0
- sqlite=3.41.2=h5eee18b_0
- tk=8.6.12=h1ccaba5_0
- wheel=0.41.2=py311h06a4308_0
- xz=5.4.2=h5eee18b_0
- zlib=1.2.13=h5eee18b_0
- pip:
- aiosignal==1.3.1
- anyio==3.7.1
- attrs==23.1.0
- certifi==2023.7.22
- charset-normalizer==3.3.0
- click==8.1.7
- cmake==3.27.6
- fastapi==0.103.2
- filelock==3.12.4
- frozenlist==1.4.0
- fsspec==2023.9.2
- grpcio==1.59.0
- h11==0.14.0
- httptools==0.6.0
- huggingface-hub==0.17.3
- idna==3.4
- jinja2==3.1.2
- jsonschema==4.19.1
- jsonschema-specifications==2023.7.1
- lit==17.0.2
- markupsafe==2.1.3
- mpmath==1.3.0
- msgpack==1.0.7
- networkx==3.1
- ninja==1.11.1
- numpy==1.26.0
- nvidia-cublas-cu11==11.10.3.66
- nvidia-cuda-cupti-cu11==11.7.101
- nvidia-cuda-nvrtc-cu11==11.7.99
- nvidia-cuda-runtime-cu11==11.7.99
- nvidia-cudnn-cu11==8.5.0.96
- nvidia-cufft-cu11==10.9.0.58
- nvidia-curand-cu11==10.2.10.91
- nvidia-cusolver-cu11==11.4.0.1
- nvidia-cusparse-cu11==11.7.4.91
- nvidia-nccl-cu11==2.14.3
- nvidia-nvtx-cu11==11.7.91
- packaging==23.2
- pandas==2.1.1
- protobuf==4.24.4
- psutil==5.9.5
- pyarrow==13.0.0
- pydantic==1.10.13
- python-dateutil==2.8.2
- python-dotenv==1.0.0
- pytz==2023.3.post1
- pyyaml==6.0.1
- ray==2.7.0
- referencing==0.30.2
- regex==2023.10.3
- requests==2.31.0
- rpds-py==0.10.4
- safetensors==0.4.0
- sentencepiece==0.1.99
- six==1.16.0
- sniffio==1.3.0
- starlette==0.27.0
- sympy==1.12
- tokenizers==0.14.1
- torch==2.0.1
- tqdm==4.66.1
- transformers==4.34.0
- triton==2.0.0
- typing-extensions==4.8.0
- tzdata==2023.3
- urllib3==2.0.6
- uvicorn==0.23.2
- uvloop==0.17.0
- vllm==0.2.0
- watchfiles==0.20.0
- websockets==11.0.3
- xformers==0.0.22
prefix: /opt/conda/envs/vllm