-
Notifications
You must be signed in to change notification settings - Fork 159
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Using system generated seed in RandomSampler #1441
base: main
Are you sure you want to change the base?
Changes from 4 commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -14,7 +14,7 @@ | |
from torch.utils.data import Dataset | ||
|
||
from torchdata.stateful_dataloader import StatefulDataLoader | ||
from torchdata.stateful_dataloader.sampler import StatefulDistributedSampler | ||
from torchdata.stateful_dataloader.sampler import RandomSampler, StatefulDistributedSampler | ||
|
||
|
||
class MockDataset(Dataset): | ||
|
@@ -34,7 +34,10 @@ def __getitem__(self, idx): | |
"Fails with TSAN with the following error: starting new threads after multi-threaded " | ||
"fork is not supported. Dying (set die_after_fork=0 to override)", | ||
) | ||
@unittest.skipIf(TEST_WITH_ASAN, "DataLoader tests hang in ASAN, see: https://github.com/pytorch/pytorch/issues/66223") | ||
@unittest.skipIf( | ||
TEST_WITH_ASAN, | ||
"DataLoader tests hang in ASAN, see: https://github.com/pytorch/pytorch/issues/66223", | ||
) | ||
class TestDataLoader(TestCase): | ||
def setUp(self): | ||
super().setUp() | ||
|
@@ -44,7 +47,12 @@ def setUp(self): | |
def test_initialization_StatefulDistributedSampler(self): | ||
|
||
sampler = StatefulDistributedSampler( | ||
self.dataset, num_replicas=10, rank=0, shuffle=False, seed=42, drop_last=False | ||
self.dataset, | ||
num_replicas=10, | ||
rank=0, | ||
shuffle=False, | ||
seed=42, | ||
drop_last=False, | ||
) | ||
self.assertEqual(sampler.dataset, self.dataset) | ||
self.assertEqual(sampler.num_replicas, 10) | ||
|
@@ -139,7 +147,8 @@ def test_drop_last_effect(self): | |
) | ||
|
||
self.assertTrue( | ||
len(indices_with_drop) <= len(indices_without_drop), "Drop last should result in fewer or equal indices" | ||
len(indices_with_drop) <= len(indices_without_drop), | ||
"Drop last should result in fewer or equal indices", | ||
) | ||
|
||
def test_data_order_with_shuffle(self): | ||
|
@@ -153,7 +162,11 @@ def test_data_order_with_shuffle(self): | |
for batch in dataloader: | ||
data_loaded.extend(batch) | ||
self.assertEqual(len(data_loaded), len(self.dataset), "All data should be loaded") | ||
self.assertEqual(data_loaded, data_sampled, "Data loaded by DataLoader should match data sampled by sampler") | ||
self.assertEqual( | ||
data_loaded, | ||
data_sampled, | ||
"Data loaded by DataLoader should match data sampled by sampler", | ||
) | ||
|
||
def test_data_order_without_shuffle(self): | ||
sampler = StatefulDistributedSampler(self.dataset, num_replicas=1, rank=0, shuffle=False) | ||
|
@@ -167,8 +180,16 @@ def test_data_order_without_shuffle(self): | |
for batch in dataloader: | ||
data_loaded.extend(batch) | ||
self.assertEqual(len(data_loaded), len(self.dataset), "All data should be loaded") | ||
self.assertEqual(data_loaded, data_sampled, "Data loaded by DataLoader should match data sampled by sampler") | ||
self.assertEqual(data_loaded, list(range(100)), "Data loaded by DataLoader should be in original order") | ||
self.assertEqual( | ||
data_loaded, | ||
data_sampled, | ||
"Data loaded by DataLoader should match data sampled by sampler", | ||
) | ||
self.assertEqual( | ||
data_loaded, | ||
list(range(100)), | ||
"Data loaded by DataLoader should be in original order", | ||
) | ||
|
||
def test_data_distribution_across_replicas(self): | ||
num_replicas = 5 | ||
|
@@ -181,9 +202,37 @@ def test_data_distribution_across_replicas(self): | |
data_loaded.extend([int(x.item()) for x in batch]) | ||
all_data.extend(data_loaded) | ||
self.assertEqual( | ||
sorted(all_data), list(range(100)), "All data points should be covered exactly once across all replicas" | ||
sorted(all_data), | ||
list(range(100)), | ||
"All data points should be covered exactly once across all replicas", | ||
) | ||
|
||
def test_seed_replicability(self): | ||
|
||
seed = 0 | ||
torch.manual_seed(seed) | ||
dl1 = StatefulDataLoader(self.dataset, batch_size=1, shuffle=True) | ||
data_dl1 = [] | ||
for batch in dl1: | ||
data_dl1.append(batch) | ||
|
||
seed = 0 | ||
torch.manual_seed(seed) | ||
dl2 = StatefulDataLoader(self.dataset, batch_size=1, shuffle=True) | ||
data_dl2 = [] | ||
for batch in dl2: | ||
data_dl2.append(batch) | ||
|
||
seed = 1 | ||
torch.manual_seed(seed) | ||
dl3 = StatefulDataLoader(self.dataset, batch_size=1, shuffle=True) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We can rename |
||
data_dl3 = [] | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Can we call this |
||
for batch in dl3: | ||
data_dl3.append(batch) | ||
|
||
self.assertEqual(data_dl1, data_dl2, "Data should be replicable with same seed") | ||
self.assertNotEqual(data_dl1, data_dl3, "Data should not be replicable with different seed") | ||
|
||
|
||
if __name__ == "__main__": | ||
run_tests() |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -48,12 +48,17 @@ def state_dict(self) -> Dict[str, Any]: | |
|
||
class RandomSampler(torch.utils.data.sampler.RandomSampler): | ||
def __init__( | ||
self, data_source: Sized, replacement: bool = False, num_samples: Optional[int] = None, generator=None | ||
self, | ||
data_source: Sized, | ||
replacement: bool = False, | ||
num_samples: Optional[int] = None, | ||
generator=None, | ||
): | ||
if generator is None: | ||
# Ensure that underlying sampler has something repeatable | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. let's remove or update this comment |
||
seed = int(torch.empty((), dtype=torch.int64).random_().item()) | ||
generator = torch.Generator() | ||
generator.manual_seed(1) | ||
generator.manual_seed(seed) | ||
super().__init__(data_source, replacement, num_samples, generator) | ||
|
||
def __iter__(self): | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Instead of checking for specific seeds 0 and 1, we can generalize it to two randomly generated seeds.
And also add a assert to ensure both seeds are not equal.