File tree Expand file tree Collapse file tree 3 files changed +7
-7
lines changed
onnxruntime/test/providers/cuda/nhwc Expand file tree Collapse file tree 3 files changed +7
-7
lines changed Original file line number Diff line number Diff line change 2828 dockerfile_path : tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cuda
2929 docker_build_args : ' --build-arg BASEIMAGE=onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20250124.1'
3030 docker_image_repo : onnxruntimecuda12manylinuxbuild
31- extra_build_flags : ' --use_binskim_compliant_compile_flags --build_wheel --cuda_version=12.2 --cuda_home=/usr/local/cuda-12.2 --cudnn_home=/usr/local/cuda-12.2 --enable_cuda_profiling --build_java --cmake_extra_defines CMAKE_CUDA_ARCHITECTURES=80 onnxruntime_BUILD_UNIT_TESTS=ON onnxruntime_ENABLE_CUDA_EP_INTERNAL_TESTS=ON'
31+ extra_build_flags : ' --use_binskim_compliant_compile_flags --build_wheel --cuda_version=12.2 --cuda_home=/usr/local/cuda-12.2 --cudnn_home=/usr/local/cuda-12.2 --enable_cuda_profiling --build_java --cmake_extra_defines CMAKE_CUDA_ARCHITECTURES=90 onnxruntime_BUILD_UNIT_TESTS=ON onnxruntime_ENABLE_CUDA_EP_INTERNAL_TESTS=ON'
3232 python_path_prefix : ' PATH=/opt/python/cp310-cp310/bin:$PATH'
3333 run_tests : false # <<< Do not run tests in this job
3434 upload_build_output : true # <<< Upload the build/Release directory
4141 needs : build-linux-cuda-x64-release
4242 runs-on :
4343 - self-hosted
44- - " 1ES.Pool=Onnxruntime-github-Linux-GPU-A100-WUS3 "
44+ - " 1ES.Pool=Onnxruntime-github-Linux-GPU-H100 "
4545 permissions :
4646 contents : read
4747 packages : read
9898 build_config : Release
9999 mode : ' test' # Set mode to test
100100 execution_providers : ' cuda'
101- extra_build_flags : ' --use_binskim_compliant_compile_flags --cuda_version=12.2 --cuda_home=/usr/local/cuda-12.2 --cudnn_home=/usr/local/cuda-12.2 --enable_cuda_profiling --cmake_extra_defines CMAKE_CUDA_ARCHITECTURES=80 onnxruntime_BUILD_UNIT_TESTS=ON onnxruntime_ENABLE_CUDA_EP_INTERNAL_TESTS=ON'
101+ extra_build_flags : ' --use_binskim_compliant_compile_flags --cuda_version=12.2 --cuda_home=/usr/local/cuda-12.2 --cudnn_home=/usr/local/cuda-12.2 --enable_cuda_profiling --cmake_extra_defines CMAKE_CUDA_ARCHITECTURES=90 onnxruntime_BUILD_UNIT_TESTS=ON onnxruntime_ENABLE_CUDA_EP_INTERNAL_TESTS=ON'
102102 python_path_prefix : ' PATH=/opt/python/cp310-cp310/bin:$PATH'
Original file line number Diff line number Diff line change 2828 dockerfile_path : tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cuda
2929 docker_build_args : ' --build-arg BASEIMAGE=onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20250124.1 --build-arg TRT_VERSION=10.9.0.34-1.cuda12.8 --network=host'
3030 docker_image_repo : onnxruntimetensorrt86gpubuild
31- extra_build_flags : ' --use_binskim_compliant_compile_flags --build_wheel --cuda_version=12.2 --cuda_home=/usr/local/cuda-12.2 --cudnn_home=/usr/local/cuda-12.2 --use_tensorrt --tensorrt_home /usr --build_java --cmake_extra_defines CMAKE_CUDA_ARCHITECTURES=80 onnxruntime_BUILD_UNIT_TESTS=ON onnxruntime_ENABLE_CUDA_EP_INTERNAL_TESTS=ON'
31+ extra_build_flags : ' --use_binskim_compliant_compile_flags --build_wheel --cuda_version=12.2 --cuda_home=/usr/local/cuda-12.2 --cudnn_home=/usr/local/cuda-12.2 --use_tensorrt --tensorrt_home /usr --build_java --cmake_extra_defines CMAKE_CUDA_ARCHITECTURES=90 onnxruntime_BUILD_UNIT_TESTS=ON onnxruntime_ENABLE_CUDA_EP_INTERNAL_TESTS=ON'
3232 python_path_prefix : ' PATH=/opt/python/cp310-cp310/bin:$PATH'
3333 run_tests : false # <<< Do not run tests in this job
3434 upload_build_output : true # <<< Upload the build/Release directory
4141 needs : build-linux-TensorRT-x64-release
4242 runs-on :
4343 - self-hosted
44- - " 1ES.Pool=Onnxruntime-github-Linux-GPU-A100-WUS3 "
44+ - " 1ES.Pool=Onnxruntime-github-Linux-GPU-H100 "
4545 permissions :
4646 contents : read
4747 packages : read
@@ -100,5 +100,5 @@ jobs:
100100 build_config : Release
101101 mode : ' test' # Set mode to test
102102 execution_providers : ' cuda tensorrt'
103- extra_build_flags : ' --use_binskim_compliant_compile_flags --build_wheel --cuda_version=12.2 --cuda_home=/usr/local/cuda-12.2 --cudnn_home=/usr/local/cuda-12.2 --use_tensorrt --tensorrt_home /usr --build_java --cmake_extra_defines CMAKE_CUDA_ARCHITECTURES=80 onnxruntime_BUILD_UNIT_TESTS=ON onnxruntime_ENABLE_CUDA_EP_INTERNAL_TESTS=ON'
103+ extra_build_flags : ' --use_binskim_compliant_compile_flags --build_wheel --cuda_version=12.2 --cuda_home=/usr/local/cuda-12.2 --cudnn_home=/usr/local/cuda-12.2 --use_tensorrt --tensorrt_home /usr --build_java --cmake_extra_defines CMAKE_CUDA_ARCHITECTURES=90 onnxruntime_BUILD_UNIT_TESTS=ON onnxruntime_ENABLE_CUDA_EP_INTERNAL_TESTS=ON'
104104 python_path_prefix : ' PATH=/opt/python/cp310-cp310/bin:$PATH'
Original file line number Diff line number Diff line change 4646 } else if (std::is_same<T, double >::value) { \
4747 MAKE_PROVIDERS_EPS_EXT (2e-4 , pad_to_nc1d) \
4848 } else { \
49- MAKE_PROVIDERS_EPS_EXT (2e -3 , pad_to_nc1d) \
49+ MAKE_PROVIDERS_EPS_EXT (4e -3 , pad_to_nc1d) \
5050 }
5151
5252#define MAKE_PROVIDERS_EPS_TYPE (T ) \
You can’t perform that action at this time.
0 commit comments