tpu.py 1.4 KB

123456789101112131415161718192021222324252627282930313233343536
  1. # Copyright 2024 The HuggingFace Team. All rights reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. from torch.utils.data import DataLoader
  15. from ..utils import is_torch_xla_available
  16. def tpu_spmd_dataloader(dataloader: DataLoader):
  17. if is_torch_xla_available():
  18. import torch_xla.distributed.parallel_loader as pl
  19. assert isinstance(
  20. dataloader, pl.MpDeviceLoader
  21. ), "The dataloader must be a `torch_xla.distributed.parallel_loader.MpDeviceLoader`."
  22. # This is to support PyTorch/XLA FSDP via SPMD.
  23. # Here we shard the input data's 0th dim across the fsdp axis.
  24. import torch_xla.distributed.spmd as xs
  25. sharding_spec = xs.ShardingSpec(xs.get_global_mesh(), ("fsdp", None))
  26. dataloader._parallel_loader_kwargs["input_sharding"] = sharding_spec
  27. return dataloader
  28. else:
  29. return dataloader