Changed CUDA_INSTALL variable to BNB_CUDA_INSTALL.

This commit is contained in:
Tim Dettmers 2023-07-14 18:16:45 -07:00
parent 8cdec888b1
commit 6ec4f0c374
4 changed files with 16 additions and 17 deletions

View File

@ -264,12 +264,13 @@ Deprecated:
Features:
- Added precompiled CUDA 11.8 binaries to support H100 GPUs without compilation #571
- CUDA SETUP now no longer looks for libcuda and libcudart and relies PyTorch CUDA libraries. To manually override this behavior see: how_to_use_nonpytorch_cuda.md.
- CUDA SETUP now no longer looks for libcuda and libcudart and relies PyTorch CUDA libraries. To manually override this behavior see: how_to_use_nonpytorch_cuda.md. Thank you @rapsealk
Bug fixes:
- Fixed a bug where the default type of absmax was undefined which leads to errors if the default type is different than torch.float32. # 553
- Fixed a missing scipy dependency in requirements.txt. #544
- Fixed a bug, where a view operation could cause an error in 8-bit layers.
- Fixed a bug where CPU bitsandbytes would during the import. #593 Thank you @bilelomrani
Documentation:
- Improved documentation for GPUs that do not support 8-bit matmul. #529

View File

@ -101,17 +101,17 @@ class CUDASetup:
def manual_override(self):
if torch.cuda.is_available():
if 'CUDA_VERSION' in os.environ:
if len(os.environ['CUDA_VERSION']) > 0:
if 'BNB_CUDA_VERSION' in os.environ:
if len(os.environ['BNB_CUDA_VERSION']) > 0:
warn((f'\n\n{"="*80}\n'
'WARNING: Manual override via CUDA_VERSION env variable detected!\n'
'CUDA_VERSION=XXX can be used to load a bitsandbytes version that is different from the PyTorch CUDA version.\n'
'If this was unintended set the CUDA_VERSION variable to an empty string: export CUDA_VERSION=\n'
'WARNING: Manual override via BNB_CUDA_VERSION env variable detected!\n'
'BNB_CUDA_VERSION=XXX can be used to load a bitsandbytes version that is different from the PyTorch CUDA version.\n'
'If this was unintended set the BNB_CUDA_VERSION variable to an empty string: export BNB_CUDA_VERSION=\n'
'If you use the manual override make sure the right libcudart.so is in your LD_LIBRARY_PATH\n'
'For example by adding the following to your .bashrc: export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:<path_to_cuda_dir/lib64\n'
f'Loading CUDA version: CUDA_VERSION={os.environ["CUDA_VERSION"]}'
f'Loading CUDA version: BNB_CUDA_VERSION={os.environ["BNB_CUDA_VERSION"]}'
f'\n{"="*80}\n\n'))
self.binary_name = self.binary_name[:-6] + f'{os.environ["CUDA_VERSION"]}.so'
self.binary_name = self.binary_name[:-6] + f'{os.environ["BNB_CUDA_VERSION"]}.so'
def run_cuda_setup(self):
self.initialized = True
@ -237,10 +237,10 @@ def warn_in_case_of_duplicates(results_paths: Set[Path]) -> None:
f"Found duplicate {CUDA_RUNTIME_LIBS} files: {results_paths}.. "
"We select the PyTorch default libcudart.so, which is {torch.version.cuda},"
"but this might missmatch with the CUDA version that is needed for bitsandbytes."
"To override this behavior set the CUDA_VERSION=<version string, e.g. 122> environmental variable"
"To override this behavior set the BNB_CUDA_VERSION=<version string, e.g. 122> environmental variable"
"For example, if you want to use the CUDA version 122"
"CUDA_VERSION=122 python ..."
"OR set the environmental variable in your .bashrc: export CUDA_VERSION=122"
"BNB_CUDA_VERSION=122 python ..."
"OR set the environmental variable in your .bashrc: export BNB_CUDA_VERSION=122"
"In the case of a manual override, make sure you set the LD_LIBRARY_PATH, e.g."
"export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/cuda-11.2")
CUDASetup.get_instance().add_log_entry(warning_msg, is_warning=True)

View File

@ -24,21 +24,19 @@ wget https://raw.githubusercontent.com/TimDettmers/bitsandbytes/main/cuda_instal
bash cuda install 117 ~/local 1
```
## Setting the environmental variables CUDA_HOME, CUDA_VERSION, and LD_LIBRARY_PATH
## Setting the environmental variables BNB_CUDA_VERSION, and LD_LIBRARY_PATH
To manually override the PyTorch installed CUDA version you need to set to variable, like so:
```bash
export CUDA_HOME=<PATH>
export CUDA_VERSION=<VERSION>
export BNB_CUDA_VERSION=<VERSION>
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:<PATH>
```
For example, to use the local install path from above:
```bash
export CUDA_HOME=/home/tim/local/cuda-11.7
export CUDA_VERSION=117
export BNB_CUDA_VERSION=117
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/home/tim/local/cuda-11.7
```

View File

@ -18,7 +18,7 @@ def read(fname):
setup(
name=f"bitsandbytes",
version=f"0.40.1",
version=f"0.40.1.post1",
author="Tim Dettmers",
author_email="dettmers@cs.washington.edu",
description="k-bit optimizers and matrix multiplication routines.",