forked from NVIDIA-AI-IOT/remembr
-
Notifications
You must be signed in to change notification settings - Fork 0
/
vila_setup.sh
34 lines (26 loc) · 1.13 KB
/
vila_setup.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
#!/usr/bin/env bash
# This is required to activate conda environment
eval "$(conda shell.bash hook)"
CONDA_ENV=${1:-""}
if [ -n "$CONDA_ENV" ]; then
conda create -n $CONDA_ENV python=3.10 -y
conda activate $CONDA_ENV
else
echo "Skipping conda environment creation. Make sure you have the correct environment activated."
fi
# This is required to enable PEP 660 support
python -m pip install --upgrade pip
# This is optional if you prefer to use built-in nvcc
# conda install -c nvidia cuda-toolkit -y
# Install FlashAttention2
python -m pip install https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.8/flash_attn-2.5.8+cu122torch2.3cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
cd deps/VILA
# Install VILA
python -m pip install -e .
python -m pip install -e ".[train]"
python -m pip install -e ".[eval]"
# Install HF's Transformers
python -m pip install git+https://github.com/huggingface/[email protected]
site_pkg_path=$(python -c 'import site; print(site.getsitepackages()[0])')
cp -rv ./llava/train/transformers_replace/* $site_pkg_path/transformers/
cp -rv ./llava/train/deepspeed_replace/* $site_pkg_path/deepspeed/