-
Notifications
You must be signed in to change notification settings - Fork 3.6k
Closed
Labels
data handlingGeneric data-related topicGeneric data-related topicfeatureIs an improvement or enhancementIs an improvement or enhancement
Milestone
Description
Bug description
When the LM.val_dataloader
method returns None, two unexpected behaviors take place:
- There is a warning that shouldn't show:
UserWarning: Total length of `NoneType` across ranks is zero. Please make sure this was your intention.
- There is an error, because the loop tries to run validation with
None
as the dataloader:
TypeError: 'NoneType' object is not iterable
How to reproduce the bug
import os
import torch
from torch.utils.data import DataLoader, Dataset
from lightning.pytorch import LightningModule, Trainer
class RandomDataset(Dataset):
def __init__(self, size, length):
self.len = length
self.data = torch.randn(length, size)
def __getitem__(self, index):
return self.data[index]
def __len__(self):
return self.len
class BoringModel(LightningModule):
def __init__(self):
super().__init__()
self.layer = torch.nn.Linear(32, 2)
def forward(self, x):
return self.layer(x)
def training_step(self, batch, batch_idx):
loss = self(batch).sum()
self.log("train_loss", loss)
return {"loss": loss}
def validation_step(self, batch, batch_idx):
loss = self(batch).sum()
self.log("valid_loss", loss)
def train_dataloader(self):
return DataLoader(RandomDataset(32, 64), batch_size=2)
def val_dataloader(self):
return None
def configure_optimizers(self):
return torch.optim.SGD(self.layer.parameters(), lr=0.1)
def run():
model = BoringModel()
trainer = Trainer(
default_root_dir=os.getcwd(),
limit_train_batches=1,
limit_val_batches=1,
num_sanity_val_steps=0,
max_epochs=2,
enable_model_summary=False,
)
trainer.fit(model)
if __name__ == "__main__":
run()
Error messages and logs
File "/Users/adrian/repositories/lightning/examples/pl_bug_report/bug_report_model.py", line 62, in <module>
run()
File "/Users/adrian/repositories/lightning/examples/pl_bug_report/bug_report_model.py", line 58, in run
trainer.fit(model)
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/trainer/trainer.py", line 517, in fit
call._call_and_handle_interrupt(
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/trainer/call.py", line 44, in _call_and_handle_interrupt
return trainer_fn(*args, **kwargs)
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/trainer/trainer.py", line 556, in _fit_impl
self._run(model, ckpt_path=ckpt_path)
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/trainer/trainer.py", line 928, in _run
results = self._run_stage()
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/trainer/trainer.py", line 967, in _run_stage
self._run_train()
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/trainer/trainer.py", line 988, in _run_train
self.fit_loop.run()
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/loops/fit_loop.py", line 192, in run
self.advance()
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/loops/fit_loop.py", line 365, in advance
self.epoch_loop.run(self._data_fetcher)
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/loops/training_epoch_loop.py", line 134, in run
self.on_advance_end()
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/loops/training_epoch_loop.py", line 248, in on_advance_end
self.val_loop.run()
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/loops/utilities.py", line 167, in _decorator
return loop_run(self, *args, **kwargs)
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/loops/evaluation_loop.py", line 93, in run
self.reset()
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/loops/evaluation_loop.py", line 184, in reset
iter(data_fetcher) # creates the iterator inside the fetcher
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/loops/fetchers.py", line 104, in __iter__
super().__iter__()
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/loops/fetchers.py", line 54, in __iter__
self.dataloader_iter = iter(self.dataloader)
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/utilities/combined_loader.py", line 242, in __iter__
iter(iterator)
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/utilities/combined_loader.py", line 121, in __iter__
super().__iter__()
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/utilities/combined_loader.py", line 35, in __iter__
self.iterators = [iter(iterable) for iterable in self.iterables]
File "/Users/adrian/repositories/lightning/src/lightning/pytorch/utilities/combined_loader.py", line 35, in <listcomp>
self.iterators = [iter(iterable) for iterable in self.iterables]
TypeError: 'NoneType' object is not iterable
Environment
Current environment
#- PyTorch Lightning Version (e.g., 1.5.0): 2.0.0rc0
#- PyTorch Version (e.g., 2.0): 1.13.1
#- Python version (e.g., 3.9): 3.10
#- OS (e.g., Linux): MacOS
More info
No response
Metadata
Metadata
Assignees
Labels
data handlingGeneric data-related topicGeneric data-related topicfeatureIs an improvement or enhancementIs an improvement or enhancement