We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent f5404dc commit bdbd417Copy full SHA for bdbd417
tests/e2e/multicard/test_pyhccl_distributed.py
@@ -76,8 +76,8 @@ def worker_fn():
76
assert torch.all(tensor == pynccl_comm.world_size).cpu().item()
77
78
79
-# def test_pyhccl():
80
-# distributed_run(worker_fn, 2)
+def test_pyhccl():
+ distributed_run(worker_fn, 2)
81
82
83
@worker_fn_wrapper
@@ -106,5 +106,5 @@ def broadcast_worker_fn():
106
assert torch.all(recv_tensors[i] == i).cpu().item()
107
108
109
-# def test_pyhccl_broadcast():
110
-# distributed_run(broadcast_worker_fn, 4)
+def test_pyhccl_broadcast():
+ distributed_run(broadcast_worker_fn, 2)
0 commit comments