diff options
| author | haoyuren <13851610112@163.com> | 2026-02-22 12:01:17 -0600 |
|---|---|---|
| committer | haoyuren <13851610112@163.com> | 2026-02-22 12:01:17 -0600 |
| commit | dda6db0777620f8139bd476e27e6b275c0679358 (patch) | |
| tree | ac2c52e6f1b28b3b4b1ceef15efc9d2997459a94 /train_colab.ipynb | |
| parent | 7e15218730fe86b88ac0a53cc84bf929416a0687 (diff) | |
Fix total_mem → total_memory in Colab GPU check
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
Diffstat (limited to 'train_colab.ipynb')
| -rw-r--r-- | train_colab.ipynb | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/train_colab.ipynb b/train_colab.ipynb index a591beb..2a092d7 100644 --- a/train_colab.ipynb +++ b/train_colab.ipynb @@ -34,7 +34,7 @@ { "cell_type": "code", "metadata": {}, - "source": "import torch\nprint(f\"PyTorch: {torch.__version__}\")\nprint(f\"CUDA available: {torch.cuda.is_available()}\")\nif torch.cuda.is_available():\n print(f\"GPU: {torch.cuda.get_device_name(0)}\")\n print(f\"Memory: {torch.cuda.get_device_properties(0).total_mem / 1024**3:.1f} GB\")\nelse:\n print(\"WARNING: No GPU detected. Go to Runtime → Change runtime type → GPU\")", + "source": "import torch\nprint(f\"PyTorch: {torch.__version__}\")\nprint(f\"CUDA available: {torch.cuda.is_available()}\")\nif torch.cuda.is_available():\n print(f\"GPU: {torch.cuda.get_device_name(0)}\")\n print(f\"Memory: {torch.cuda.get_device_properties(0).total_memory / 1024**3:.1f} GB\")\nelse:\n print(\"WARNING: No GPU detected. Go to Runtime → Change runtime type → GPU\")", "execution_count": null, "outputs": [] }, |
