import torchdef check_torch_gpu():print(f"PyTorch version: {torch.__version__}")cuda_available = torch.cuda.is_available()print(f"CUDA available: {cuda_available}")if cuda_available:print(f"CUDA version: {torch.version.cuda}")device_count = torch.cuda.device_count()print(f"Number of available GPUs: {device_count}")for i in range(device_count):print(f"\nGPU {i}:")print(f" Name: {torch.cuda.get_device_name(i)}")print(f" Compute Capability: {torch.cuda.get_device_capability(i)}")print(f" Total Memory: {torch.cuda.get_device_properties(i).total_memory / 1024**3:.2f} GB")else:print("No GPU available. PyTorch will use CPU.")if __name__ == "__main__":check_torch_gpu()