We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent b6761ea commit 69724e8Copy full SHA for 69724e8
setup.py
@@ -48,8 +48,9 @@ def load_module_from_path(module_name, path):
48
"so vLLM may not be able to run correctly", sys.platform)
49
VLLM_TARGET_DEVICE = "empty"
50
elif (sys.platform.startswith("linux") and torch.version.cuda is None
51
- and os.getenv("VLLM_TARGET_DEVICE") is None):
52
- # if cuda is not available and VLLM_TARGET_DEVICE is not set,
+ and os.getenv("VLLM_TARGET_DEVICE") is None
+ and torch.version.hip is None):
53
+ # if cuda or hip is not available and VLLM_TARGET_DEVICE is not set,
54
# fallback to cpu
55
VLLM_TARGET_DEVICE = "cpu"
56
0 commit comments