3/07/2025

Check my torch support GPU

checkgpu.py

..

import torch

# Check PyTorch version
print(f"PyTorch version: {torch.__version__}")

# Check if CUDA/ROCm is available (unified API in newer PyTorch)
print(f"Is GPU available: {torch.cuda.is_available()}")

# Check how many GPUs are available
if torch.cuda.is_available():
print(f"Number of GPUs: {torch.cuda.device_count()}")
# Print device properties for each GPU
for i in range(torch.cuda.device_count()):
props = torch.cuda.get_device_properties(i)
print(f"\nDevice {i}: {props.name}")
print(f" Total memory: {props.total_memory / 1024**3:.2f} GB")
if hasattr(props, 'major'):
print(f" Compute capability: {props.major}.{props.minor}")

# Try a simple operation on GPU
if torch.cuda.is_available():
device = torch.device("cuda:0") # Use the first GPU
x = torch.ones(5, 5, device=device)
y = x + 1
print("\nGPU computation test:")
print(y)
print("GPU computation successful! study.marearts.com")
else:
print("\nNo GPUs available for PyTorch.")

.

๐Ÿ™

Thank you!

No comments:

Post a Comment