From 3f4e22015a6796a0807650d7b2f97b108f8760df Mon Sep 17 00:00:00 2001 From: Jiaming Yuan Date: Sat, 25 Nov 2023 11:25:47 +0800 Subject: [PATCH] Mark NCCL python test optional. (#9804) Skip the tests if XGBoost is not compiled with dlopen. --- .../test_gpu_with_dask/test_gpu_with_dask.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/test_distributed/test_gpu_with_dask/test_gpu_with_dask.py b/tests/test_distributed/test_gpu_with_dask/test_gpu_with_dask.py index 883dbbaf234b..f25ac9fb0853 100644 --- a/tests/test_distributed/test_gpu_with_dask/test_gpu_with_dask.py +++ b/tests/test_distributed/test_gpu_with_dask/test_gpu_with_dask.py @@ -573,6 +573,10 @@ def test_with_asyncio(local_cuda_client: Client) -> None: assert isinstance(output["history"], dict) +@pytest.mark.skipif( + condition=not xgb.build_info()["USE_DLOPEN_NCCL"], + reason="Not compiled with dlopen.", +) def test_invalid_nccl(local_cuda_client: Client) -> None: client = local_cuda_client workers = tm.get_client_workers(client) @@ -592,6 +596,10 @@ def run(wid: int) -> None: client.gather(futures) +@pytest.mark.skipif( + condition=not xgb.build_info()["USE_DLOPEN_NCCL"], + reason="Not compiled with dlopen.", +) @pytest.mark.parametrize("tree_method", ["hist", "approx"]) def test_nccl_load(local_cuda_client: Client, tree_method: str) -> None: X, y, w = tm.make_regression(128, 16, use_cupy=True)