diff --git a/CHANGELOG.md b/CHANGELOG.md index 863d0d577b048..2591b1955a28f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed an issue with forward hooks not being removed after model summary ([#2298](https://github.com/PyTorchLightning/pytorch-lightning/pull/2298)) +- Fixed an issue how _has_len handles `NotImplementedError` e.g. raised by `torchtext.data.Iterator` ([#2293](https://github.com/PyTorchLightning/pytorch-lightning/pull/2293)), ([#2307](https://github.com/PyTorchLightning/pytorch-lightning/pull/2307)) + +- Fixed `average_precision` metric ([#2319](https://github.com/PyTorchLightning/pytorch-lightning/pull/2319)) + - Fixed ROC metric for CUDA tensors ([#2304](https://github.com/PyTorchLightning/pytorch-lightning/pull/2304)) - Fixed `average_precision` metric ([#2319](https://github.com/PyTorchLightning/pytorch-lightning/pull/2319)) diff --git a/tests/base/dataloaders.py b/tests/base/dataloaders.py index ab4f5a5c21ca2..da47772dfb298 100644 --- a/tests/base/dataloaders.py +++ b/tests/base/dataloaders.py @@ -7,6 +7,7 @@ def __init__(self, dataloader): self.dataloader = dataloader self.iter = iter(dataloader) self.count = 0 + self.dataloader.num_workers = 0 # reduce chance for hanging pytest def __iter__(self): self.count = 0 @@ -23,23 +24,14 @@ def __next__(self): return next(self.iter) -class CustomNotImplementedErrorDataloader: - - def __init__(self, dataloader): - self.dataloader = dataloader - self.iter = iter(dataloader) - self.count = 0 +class CustomNotImplementedErrorDataloader(CustomInfDataloader): def __len__(self): """raise NotImplementedError""" raise NotImplementedError - def __iter__(self): - self.count = 0 - return self - def __next__(self): - if self.count >= 50: + if self.count >= 2: raise StopIteration self.count = self.count + 1 try: diff --git a/tests/base/model_utilities.py b/tests/base/model_utilities.py index 5af8545e4518b..3c2293e4e0baf 100644 --- a/tests/base/model_utilities.py +++ b/tests/base/model_utilities.py @@ -12,7 +12,7 @@ def dataloader(self, train): loader = DataLoader( dataset=dataset, batch_size=self.batch_size, - num_workers=3, + num_workers=0, shuffle=train, ) return loader diff --git a/tests/trainer/test_dataloaders.py b/tests/trainer/test_dataloaders.py index 1e895cfcfd129..d4d33f01e5236 100644 --- a/tests/trainer/test_dataloaders.py +++ b/tests/trainer/test_dataloaders.py @@ -295,18 +295,6 @@ def test_train_inf_dataloader_error(tmpdir): trainer.fit(model) -@pytest.mark.skip('TODO: speed up this test') -def test_train_not_implemented_error_dataloader_error(tmpdir): - """Test not_implemented_error train data loader (e.g. IterableDataset)""" - model = EvalModelTemplate() - model.train_dataloader = model.train_dataloader__not_implemented_error - - trainer = Trainer(default_root_dir=tmpdir, max_epochs=1, val_check_interval=0.5) - - with pytest.raises(MisconfigurationException, match='not_implemented_error DataLoader'): - trainer.fit(model) - - @pytest.mark.skip('TODO: speed up this test') def test_val_inf_dataloader_error(tmpdir): """Test inf train data loader (e.g. IterableDataset)""" @@ -319,18 +307,6 @@ def test_val_inf_dataloader_error(tmpdir): trainer.fit(model) -@pytest.mark.skip('TODO: speed up this test') -def test_val_not_implemented_error_dataloader_error(tmpdir): - """Test not_implemented_error train data loader (e.g. IterableDataset)""" - model = EvalModelTemplate() - model.val_dataloader = model.val_dataloader__not_implemented_error - - trainer = Trainer(default_root_dir=tmpdir, max_epochs=1, limit_val_batches=0.5) - - with pytest.raises(MisconfigurationException, match='not_implemented_error DataLoader'): - trainer.fit(model) - - @pytest.mark.skip('TODO: speed up this test') def test_test_inf_dataloader_error(tmpdir): """Test inf train data loader (e.g. IterableDataset)""" @@ -343,18 +319,6 @@ def test_test_inf_dataloader_error(tmpdir): trainer.test(model) -@pytest.mark.skip('TODO: speed up this test') -def test_test_not_implemented_error_dataloader_error(tmpdir): - """Test not_implemented_error train data loader (e.g. IterableDataset)""" - model = EvalModelTemplate() - model.test_dataloader = model.test_dataloader__not_implemented_error - - trainer = Trainer(default_root_dir=tmpdir, max_epochs=1, limit_test_batches=0.5) - - with pytest.raises(MisconfigurationException, match='not_implemented_error DataLoader'): - trainer.test(model) - - @pytest.mark.parametrize('check_interval', [50, 1.0]) @pytest.mark.skip('TODO: speed up this test') def test_inf_train_dataloader(tmpdir, check_interval): @@ -373,24 +337,6 @@ def test_inf_train_dataloader(tmpdir, check_interval): assert result == 1 -@pytest.mark.parametrize('check_interval', [50, 1.0]) -@pytest.mark.skip('TODO: speed up this test') -def test_not_implemented_error_train_dataloader(tmpdir, check_interval): - """Test not_implemented_error train data loader (e.g. IterableDataset)""" - - model = EvalModelTemplate() - model.train_dataloader = model.train_dataloader__not_implemented_error - - trainer = Trainer( - default_root_dir=tmpdir, - max_epochs=1, - val_check_interval=check_interval - ) - result = trainer.fit(model) - # verify training completed - assert result == 1 - - @pytest.mark.parametrize('check_interval', [1.0]) @pytest.mark.skip('TODO: speed up this test') def test_inf_val_dataloader(tmpdir, check_interval): @@ -411,26 +357,6 @@ def test_inf_val_dataloader(tmpdir, check_interval): assert result == 1 -@pytest.mark.parametrize('check_interval', [1.0]) -@pytest.mark.skip('TODO: speed up this test') -def test_not_implemented_error_dataloader(tmpdir, check_interval): - """Test not_implemented_error data loader (e.g. IterableDataset)""" - - model = EvalModelTemplate() - model.val_dataloader = model.val_dataloader__not_implemented_error - - # logger file to get meta - trainer = Trainer( - default_root_dir=tmpdir, - max_epochs=1, - val_check_interval=check_interval, - ) - result = trainer.fit(model) - - # verify training completed - assert result == 1 - - def test_error_on_zero_len_dataloader(tmpdir): """ Test that error is raised if a zero-length dataloader is defined """ @@ -586,3 +512,75 @@ def train_dataloader(self): # where we will get fewer metrics than gpus result = trainer.fit(model) assert 1 == result + + +@pytest.mark.parametrize('check_interval', [1.0]) +def test_val_dataloader_not_implemented_error(tmpdir, check_interval): + """Test not_implemented_error data loader (e.g. IterableDataset)""" + + model = EvalModelTemplate() + model.val_dataloader = model.val_dataloader__not_implemented_error + + # logger file to get meta + trainer = Trainer( + default_root_dir=tmpdir, + max_steps=5, + max_epochs=1, + val_check_interval=check_interval, + ) + result = trainer.fit(model) + + # verify training completed + assert result == 1 + + +@pytest.mark.parametrize('check_interval', [50, 1.0]) +def test_train_dataloader_not_implemented_error(tmpdir, check_interval): + """Test not_implemented_error train data loader (e.g. IterableDataset)""" + + model = EvalModelTemplate() + model.train_dataloader = model.train_dataloader__not_implemented_error + model.val_dataloader = model.val_dataloader__not_implemented_error + + trainer = Trainer( + default_root_dir=tmpdir, + max_steps=5, + max_epochs=1, + val_check_interval=check_interval + ) + result = trainer.fit(model) + # verify training completed + assert result == 1 + + +def test_train_dataloader_not_implemented_error_failed(tmpdir): + """Test not_implemented_error train data loader (e.g. IterableDataset)""" + model = EvalModelTemplate() + model.train_dataloader = model.train_dataloader__not_implemented_error + + trainer = Trainer(default_root_dir=tmpdir, max_steps=5, max_epochs=1, val_check_interval=0.5) + + with pytest.raises(MisconfigurationException, match='infinite DataLoader'): + trainer.fit(model) + + +def test_val_dataloader_not_implemented_error_failed(tmpdir): + """Test not_implemented_error train data loader (e.g. IterableDataset)""" + model = EvalModelTemplate() + model.val_dataloader = model.val_dataloader__not_implemented_error + + trainer = Trainer(default_root_dir=tmpdir, max_steps=5, max_epochs=1, limit_val_batches=0.5) + + with pytest.raises(MisconfigurationException, match='infinite DataLoader'): + trainer.fit(model) + + +def test_test_dataloader_not_implemented_error_failed(tmpdir): + """Test not_implemented_error train data loader (e.g. IterableDataset)""" + model = EvalModelTemplate() + model.test_dataloader = model.test_dataloader__not_implemented_error + + trainer = Trainer(default_root_dir=tmpdir, max_steps=5, max_epochs=1, limit_test_batches=0.5) + + with pytest.raises(MisconfigurationException, match='infinite DataLoader'): + trainer.test(model)