lldacing commited on
Commit
3407de9
1 Parent(s): eb7ccd8

Upload WindowsWhlBuilder_cuda.bat

Browse files
Files changed (1) hide show
  1. WindowsWhlBuilder_cuda.bat +5 -2
WindowsWhlBuilder_cuda.bat CHANGED
@@ -1,9 +1,11 @@
1
  @echo off
2
  setlocal enabledelayedexpansion
3
 
 
4
 
5
  :parseArgs
6
- if [%1] == [WORKERS] set MAX_JOBS=%2 & shift & shift & goto :parseargs
 
7
  if [%1] == [FORCE_CXX11_ABI] set FLASH_ATTENTION_FORCE_CXX11_ABI=%2 & shift & shift & goto :parseargs
8
  goto :buildContinue
9
  :end
@@ -27,13 +29,14 @@ rem # We want setuptools >= 49.6.0 otherwise we can't compile the extension if s
27
  rem # https://github.com/pytorch/pytorch/blob/664058fa83f1d8eede5d66418abff6e20bd76ca8/torch/utils/cpp_extension.py#L810
28
  rem # However this still fails so I'm using a newer version of setuptools
29
  rem pip install setuptools==68.0.0
30
- pip install "setuptools>=49.6.0" ninja packaging wheel psutil
31
  rem # Limit MAX_JOBS otherwise the github runner goes OOM
32
  rem # CUDA 11.8 can compile with 2 jobs, but CUDA 12.3 goes OOM
33
  set FLASH_ATTENTION_FORCE_BUILD=TRUE
34
  set BUILD_TARGET=cuda
35
  set DISTUTILS_USE_SDK=1
36
  set dist_dir=dist
 
37
  python setup.py bdist_wheel --dist-dir=%dist_dir%
38
 
39
 
 
1
  @echo off
2
  setlocal enabledelayedexpansion
3
 
4
+ set MAX_JOBS=1
5
 
6
  :parseArgs
7
+ rem Assigning a value to MAX_JOBS via a variable does not work in ninja, I don't know why
8
+ rem if [%1] == [WORKERS] set MAX_JOBS=%2 & shift & shift & goto :parseargs
9
  if [%1] == [FORCE_CXX11_ABI] set FLASH_ATTENTION_FORCE_CXX11_ABI=%2 & shift & shift & goto :parseargs
10
  goto :buildContinue
11
  :end
 
29
  rem # https://github.com/pytorch/pytorch/blob/664058fa83f1d8eede5d66418abff6e20bd76ca8/torch/utils/cpp_extension.py#L810
30
  rem # However this still fails so I'm using a newer version of setuptools
31
  rem pip install setuptools==68.0.0
32
+ pip install "setuptools>=49.6.0" packaging wheel psutil
33
  rem # Limit MAX_JOBS otherwise the github runner goes OOM
34
  rem # CUDA 11.8 can compile with 2 jobs, but CUDA 12.3 goes OOM
35
  set FLASH_ATTENTION_FORCE_BUILD=TRUE
36
  set BUILD_TARGET=cuda
37
  set DISTUTILS_USE_SDK=1
38
  set dist_dir=dist
39
+
40
  python setup.py bdist_wheel --dist-dir=%dist_dir%
41
 
42