#!/bin/bash echo "=== ollama arch + version ===" file $(which ollama); ollama --version; uname -m echo echo "=== does nvidia-smi work on Jetson? ===" nvidia-smi 2>&1 | head -5 || echo "NO nvidia-smi (expected on JetPack 5 — Tegra uses tegrastats)" echo echo "=== tegrastats (Jetson GPU util) — 2 s sample ===" timeout 2 tegrastats 2>&1 | head -2 echo echo "=== Ollama 'inference compute' line — THE answer ===" journalctl -u ollama -n 200 --no-pager 2>/dev/null | grep -E "inference compute|vram|library=|starting runner|GPU" | tail -15 echo echo "=== Ollama service env ===" systemctl cat ollama 2>/dev/null | grep -E "Environment|ExecStart" echo echo "=== JetPack / CUDA on this box ===" cat /etc/nv_tegra_release 2>/dev/null | head -1 ls /usr/local/cuda/lib64/libcudart.so* 2>/dev/null | head -3 echo echo "=== does Ollama's own lib dir exist? (stock install) ===" ls /usr/lib/ollama/ /usr/local/lib/ollama/ 2>/dev/null