Marcus/run_marcus.py

37 lines
1.6 KiB
Python

#!/usr/bin/env python3
"""
run_marcus.py — Marcus AI Brain (terminal mode)
Usage: python3 run_marcus.py
"""
import os
import sys
import warnings
# Silence known-harmless third-party warnings before ANY heavy import fires
# them. Keeps the terminal dashboard readable.
# - TypedStorage : fires from torch during yolov8m.pt checkpoint load
# - torch.cuda.amp : fires in ultralytics when FP16 is enabled on Jetson torch 2.1
# - Whisper CPU : fires on every transcribe call; we intentionally force CPU
# to avoid a torch-aarch64 CUDA deserialization bug
warnings.filterwarnings("ignore", message=".*TypedStorage is deprecated.*")
warnings.filterwarnings("ignore", message=".*torch\\.cuda\\.amp.*")
warnings.filterwarnings("ignore", message=".*Performing inference on CPU when CUDA is available.*")
os.environ.setdefault("PYTHONWARNINGS", "ignore::UserWarning:torch._utils")
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
if PROJECT_ROOT not in sys.path:
sys.path.insert(0, PROJECT_ROOT)
# NOTE: we intentionally do NOT try to lower Marcus's oom_score_adj here.
# The Linux kernel requires CAP_SYS_RESOURCE to set a negative oom_score_adj,
# and Marcus typically runs as an unprivileged user, so any write fails with
# PermissionError — silent no-op. Instead, install_ollama_jetson.sh adds
# OOMScoreAdjust=500 to the Ollama systemd unit. Under memory pressure the
# kernel then kills Ollama (auto-restarts, model cold-loads again on next
# vision query) rather than Marcus (robot brain).
from Brain.marcus_brain import run_terminal
if __name__ == "__main__":
run_terminal()