Skip to content

Commit b95ae65

Browse files
committed
fix the format issues
Signed-off-by: yisheng <[email protected]>
1 parent 24f0b95 commit b95ae65

File tree

2 files changed

+6
-6
lines changed

2 files changed

+6
-6
lines changed

accelerator/real_accelerator.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -140,14 +140,15 @@ def get_accelerator():
140140
try:
141141
import torch
142142

143-
# torch.xpu supported in upstream pytorch.
143+
# torch.xpu will be supported in upstream pytorch-2.8.
144144
# Currently we can run on xpu device only using pytorch,
145145
# also reserve the old path using ipex when the torch version is old.
146146
if hasattr(torch, 'xpu'):
147-
if torch.cuda.device_count() == 0 and torch.xpu.device_count() > 0 and torch.xpu.is_available():
148-
accelerator_name = "xpu"
149-
else:
150-
pass
147+
if torch.cuda.device_count() == 0: #ignore-cuda
148+
if torch.xpu.device_count() > 0 and torch.xpu.is_available():
149+
accelerator_name = "xpu"
150+
else:
151+
pass
151152
except ImportError as e:
152153
pass
153154
if accelerator_name is None:

accelerator/xpu_accelerator.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,6 @@ def is_synchronized_device(self):
3939
return False
4040

4141
def use_host_timers(self):
42-
# WA XPU event will be consolidated in 2.6
4342
if not ipex_imported_p:
4443
return self.is_synchronized_device()
4544
else:

0 commit comments

Comments
 (0)