diff options
| author | Yuren Hao <yurenh2@timan108.cs.illinois.edu> | 2025-09-10 14:58:32 -0500 |
|---|---|---|
| committer | Yuren Hao <yurenh2@timan108.cs.illinois.edu> | 2025-09-10 14:58:32 -0500 |
| commit | 9bdd1091a8045d74823d1f5de94b041805e42c3f (patch) | |
| tree | b63843e7bce35b95bc3bd87aaee19059fdf2982c /.tmp_gpu_check2.py | |
| parent | 9d5f2379ac25b4b58e2600544f61172dbb15b67a (diff) | |
Diffstat (limited to '.tmp_gpu_check2.py')
| -rw-r--r-- | .tmp_gpu_check2.py | 16 |
1 files changed, 16 insertions, 0 deletions
diff --git a/.tmp_gpu_check2.py b/.tmp_gpu_check2.py new file mode 100644 index 0000000..7555155 --- /dev/null +++ b/.tmp_gpu_check2.py @@ -0,0 +1,16 @@ +import os +print("ENV CUDA_VISIBLE_DEVICES:", os.environ.get("CUDA_VISIBLE_DEVICES")) +try: + import torch + print("torch:", torch.__version__, "cuda:", getattr(torch.version, "cuda", None)) + print("built_with_cuda:", torch.backends.cuda.is_built()) + print("device_count:", torch.cuda.device_count()) + print("is_available:", torch.cuda.is_available()) + if torch.cuda.device_count() > 0: + for i in range(torch.cuda.device_count()): + try: + print(f"[{i}]", torch.cuda.get_device_name(i)) + except Exception as e: + print(f"[{i}] name error:", e) +except Exception as e: + print("torch error:", repr(e)) |
