torch cuda GPU사용중 확인코드

Leejaegun·2024년 9월 20일
import torch

is_cuda = torch.cuda.is_available()
print(f"CUDA is available: {is_cuda}")

if is_cuda:
    gpu_count = torch.cuda.device_count()
    print(f"Number of GPUs available: {gpu_count}")
    
    current_device = torch.cuda.current_device()
    print(f"Current GPU device index: {current_device}")
    
    for i in range(gpu_count):
        print(f"GPU {i}: {torch.cuda.get_device_name(i)}")
    
    print(f"Memory allocated: {torch.cuda.memory_allocated(0) / 1e9:.2f} GB")
    print(f"Memory cached: {torch.cuda.memory_reserved(0) / 1e9:.2f} GB")

device = torch.device("cuda" if is_cuda else "cpu")
print(f"Using device: {device}")
profile
Lee_AA

0개의 댓글