Skip to content

Commit

Permalink
Add iomp on windows
Browse files Browse the repository at this point in the history
  • Loading branch information
HGuillemet committed Jun 20, 2024
1 parent c0761cd commit 1378a19
Show file tree
Hide file tree
Showing 5 changed files with 41 additions and 10 deletions.
17 changes: 11 additions & 6 deletions .github/actions/deploy-windows/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@ runs:
rm "C:/msys64/mingw32/bin/clang-cl.exe" "C:/msys64/mingw64/bin/clang-cl.exe" "C:/msys64/mingw32/bin/cmake.exe" "C:/msys64/mingw64/bin/cmake.exe"
rm "C:/Strawberry/c/lib/libz.a" "C:/Strawberry/c/lib/libzlib.a" "C:/Strawberry/c/lib/libzdll.a" "C:/Strawberry/c/bin/cmake.exe"
curl -LO https://github.com/Kitware/CMake/releases/download/v3.26.4/cmake-3.26.4-win64-x64.msi
msiexec /i cmake-3.26.4-win64-x64.msi /qn
rm cmake-3.26.4-win64-x64.msi
rem curl -LO https://github.com/Kitware/CMake/releases/download/v3.30.0-rc3/cmake-3.30.0-rc3-win64-x64.msi
rem msiexec /i cmake-3.30.0-rc3-win64-x64.msi /qn
rem rm cmake-3.30.0-rc3-win64-x64.msi
curl -LO https://archive.apache.org/dist/maven/maven-3/3.6.3/binaries/apache-maven-3.6.3-bin.tar.gz
bash -c "tar -xzf apache-maven-3.6.3-bin.tar.gz -C 'C:/Program Files/'"
Expand Down Expand Up @@ -159,10 +159,15 @@ runs:
move TensorRT-10.0.1.6 "%ProgramFiles%\NVIDIA GPU Computing Toolkit\TensorRT"
)
if "%CI_DEPLOY_MODULE%"=="mkl" (
if not "%CI_DEPLOY_NEED_MKL%"=="" (
echo Installing MKL
curl -LO https://registrationcenter-download.intel.com/akdlm/IRC_NAS/b5a4ff98-2c98-4b28-87f7-32082cac359e/w_onemkl_p_2024.0.0.49672_offline.exe
w_onemkl_p_2024.0.0.49672_offline.exe -s -a -s --eula accept
if "%CI_DEPLOY_NEED_MKL%"=="2022" (
curl -LO https://registrationcenter-download.intel.com/akdlm/IRC_NAS/18899/w_onemkl_p_2022.2.0.9563_offline.exe
w_onemkl_p_2022.2.0.9563_offline.exe -s -a -s --eula accept
) else (
curl -LO https://registrationcenter-download.intel.com/akdlm/IRC_NAS/b5a4ff98-2c98-4b28-87f7-32082cac359e/w_onemkl_p_2024.0.0.49672_offline.exe
w_onemkl_p_2024.0.0.49672_offline.exe -s -a -s --eula accept
)
)
if "%CI_DEPLOY_PLATFORM%"=="windows-x86" if "%CI_DEPLOY_MODULE%"=="flycapture" (
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/mkl.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ env:
CI_DEPLOY_USERNAME: ${{ secrets.CI_DEPLOY_USERNAME }}
CI_DEPLOY_PASSWORD: ${{ secrets.CI_DEPLOY_PASSWORD }}
STAGING_REPOSITORY: ${{ secrets.STAGING_REPOSITORY }}
CI_DEPLOY_NEED_MKL: 1
jobs:
# linux-x86:
# runs-on: ubuntu-20.04
Expand Down
23 changes: 21 additions & 2 deletions pytorch/cppbuild.sh
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,13 @@ if [[ $PLATFORM == windows* ]]; then
cmake --build . --config Release
cmake --install . --config Release --prefix ../dist
cd ../..

#nuget install intelopenmp.devel.win -Version 2024.1.0.964
#export CMAKE_INCLUDE_PATH=${INSTALL_PATH}/intelopenmp.devel.win.2024.1.0.964/build/native/include
#export CMAKE_LIBRARY_PATH="${INSTALL_PATH}/intelopenmp.devel.win.2024.1.0.964/build/native/win-x64;${INSTALL_PATH}/intelopenmp.redist.win.2024.1.0.964/runtimes/win-x64/native"

fi
export libuv_ROOT=`pwd`/libuv/dist
export libuv_ROOT=${INSTALL_PATH}/libuv/dist
fi

if [[ ! -d pytorch ]]; then
Expand Down Expand Up @@ -205,9 +210,23 @@ sedinplace 's/char(\(.*\))/\1/g' torch/csrc/jit/serialization/pickler.h
# some windows header defines a macro named "interface"
sedinplace 's/const std::string& interface)/const std::string\& interface_name)/g' torch/csrc/distributed/c10d/ProcessGroupGloo.hpp

if [[ $PLATFORM == windows* ]]; then
# Remove pytorch adaptations of FindOpenMP.cmake that, without iomp and with
# new versions of VS 2019 including -openmp:experimental and libomp, causes
# final binary to be linked to both libomp and vcomp and produce incorrect results.
# Wait for eventual upstream fix, or for cmake 2.30 that allows to choose between -openmp and -openmp:experimental
# and see if choosing experimental works.
rm cmake/Modules/FindOpenMP.cmake
sedinplace 's/include(${CMAKE_CURRENT_LIST_DIR}\/Modules\/FindOpenMP.cmake)/find_package(OpenMP)/g' cmake/Dependencies.cmake
fi

#USE_FBGEMM=0 USE_KINETO=0 USE_GLOO=0 USE_MKLDNN=0 \
"$PYTHON_BIN_PATH" setup.py build

echo "CMAKECACHE"
cat $BUILD_DIR/CMakeCache.txt
cat $BUILD_DIR/CMakeFiles/CMakeConfigureLog.yaml

rm -Rf ../lib
if [[ ! -e torch/include/gloo ]]; then
ln -sf ../../third_party/gloo/gloo torch/include
Expand All @@ -216,9 +235,9 @@ ln -sf pytorch/torch/include ../include
ln -sf pytorch/torch/lib ../lib
ln -sf pytorch/torch/bin ../bin

# fix library with correct rpath on Mac
case $PLATFORM in
macosx-*)
# fix library with correct rpath
cp /usr/local/lib/libomp.dylib ../lib/libiomp5.dylib
chmod +w ../lib/libiomp5.dylib
install_name_tool -id @rpath/libiomp5.dylib ../lib/libiomp5.dylib
Expand Down
8 changes: 7 additions & 1 deletion pytorch/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -72,9 +72,15 @@
<dependencies>
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>openblas-platform</artifactId>
<artifactId>openblas</artifactId>
<version>0.3.27-${project.parent.version}</version>
</dependency>
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>openblas</artifactId>
<version>0.3.27-${project.parent.version}</version>
<classifier>${javacpp.platform}</classifier>
</dependency>
<dependency>
<groupId>org.bytedeco</groupId>
<artifactId>cuda-platform</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@
},
exclude = {"openblas_config.h", "cblas.h", "lapacke_config.h", "lapacke_mangling.h", "lapack.h", "lapacke.h", "lapacke_utils.h"},
link = {"c10", "torch_cpu", "torch"},
preload = {"[email protected]", "iomp5", "omp", "[email protected]", "asmjit", "fbgemm", "uv"}
preload = {"[email protected]", "iomp5", "omp", "[email protected]", "asmjit", "fbgemm", "uv", "iomp5md"}
),
@Platform(
value = {"linux", "macosx", "windows"},
Expand Down

0 comments on commit 1378a19

Please sign in to comment.