Use all custom wheels.

Intel's stable wheels that were released a day or so prior have a VRAM leak, or some other VRAM usage issue in general.
Also adds CCL, the stable wheels for which seem fine (?)
This commit is contained in:
a-One-Fan 2023-05-01 23:19:10 +03:00
parent bd5e0c1d56
commit 7261ec2b1f

View File

@ -6,7 +6,8 @@ if [[ ! -e /opt/intel/oneapi/setvars.sh ]]; then
echo "You don't have the intel oneAPI base toolkit installed."
doguide=1
else
read -p "Would you like to review the links for resources for setting up oneAPI/your drivers?" yn
echo "Would you like to review the links for resources for setting up oneAPI/your drivers?"
read -p "Y/n: " yn
if [[ $yn == [yY] ]]; then
doguide=1
fi
@ -39,27 +40,33 @@ source ./venv/bin/activate
python3 -m pip install --upgrade pip # just to be safe
# IPEX
python -m pip install torch==1.13.0a0+git6c9b55e torchvision==0.14.1a0 intel_extension_for_pytorch==1.13.120+xpu -f https://developer.intel.com/ipex-whl-stable-xpu
# Non-mandatory torchaudio install
pip list | grep torchaudio
if [[ $? == 1 ]]; then
echo "torchaudio is not installed. You may install one yourself (venv location is ./venv/bin/activate)"
read -p "Would you like the setup to install a custom built wheel for you, also installing/using megatools? (No exits setup) Y/N:" yn
if [[ $yn == [yY] ]]; then
sudo apt install megatools
mkdir temp_torchaudio_wheel
cd ./temp_torchaudio_wheel
megadl https://mega.nz/file/jc4SnCgS#_7vProAZGAw1l0vojLk7Reo7UZEUCeeHT0ye12Haow8
pip install --force-reinstall --no-deps torchaudio-0.13.1+b90d798-cp310-cp310-linux_x86_64.whl
cd ..
rm -rf ./temp_torchaudio_wheel
else
echo "Exiting..."
exit 1
fi
echo ""
echo "Currently, Intel does not provide appropriate wheels: no torchaudio wheels whatsoever, and the latest stable IPEX wheels use too much VRAM."
echo "Building the current master from source, or acquiring wheels from unofficial sources, is necessary."
echo ""
echo "Would you like the setup to install known working, custom built, home grown, corn fed wheels for you, also installing/using megatools?"
read -p "(No exits setup) Y/n:" yn
if [[ $yn == [yY] ]]; then
sudo apt install megatools
mkdir temp_wheels
cd ./temp_wheels
sudo apt-get install megatools
megadl https://mega.nz/folder/LBgQSTyS#BPjGq8WEpjoZ-uQF7deqTg
python -m pip install requests pillow expecttest hypothesis
python -m pip install --force-reinstall torch-1.13.0a0+git49444c3-cp310-cp310-linux_x86_64.whl
python -m pip install --force-reinstall --no-deps torchvision-0.14.1a0+5e8e2f1-cp310-cp310-linux_x86_64.whl
python -m pip install --force-reinstall --no-deps torchaudio-0.13.1+b90d798-cp310-cp310-linux_x86_64.whl
python -m pip install --force-reinstall intel_extension_for_pytorch-1.13.120+git947ab23-cp310-cp310-linux_x86_64.whl
cd ..
rm -rf ./temp_wheels
else
echo "Exiting..."
exit 1
fi
# Luckily, it seems that Intel's CCL is sufficiently good
python -m pip install oneccl_bind_pt -f https://developer.intel.com/ipex-whl-stable-xpu
# install requirements
python3 -m pip install -r ./modules/tortoise-tts/requirements.txt # install TorToiSe requirements
python3 -m pip install -e ./modules/tortoise-tts/ # install TorToiSe