// Gargantua v2 — main orchestrator.
// Owns WebGL2 context, HDR pipeline (raymarch -> ping-pong bloom -> composite),
// camera + interaction state, telemetry, captions, presets, particles, share.
//
// Companion files (loaded before this one):
//   blackhole-shader.js   — shader source strings
//   blackhole-audio.jsx   — BlackHoleAudio class (sub-bass + hooks)
//   blackhole-hud.jsx     — BHControlPanel / BHTelemetry / BHCaption / etc.

const {
  useState: _bhUseState,
  useEffect: _bhUseEffect,
  useRef: _bhUseRef,
} = React;
const useState = _bhUseState;
const useEffect = _bhUseEffect;
const useRef = _bhUseRef;

// ---- camera presets --------------------------------------------------------
const BH_PRESETS = [
  {
    key: "photon",
    label: "Photon Sphere",
    yaw: 0.4,
    pitch: 0.05,
    dist: 4.2,
    fov: 1.0,
  },
  {
    key: "jet",
    label: "Polar Jet",
    yaw: 1.5,
    pitch: 1.05,
    dist: 9.0,
    fov: 1.05,
  },
  {
    key: "horizon",
    label: "Horizon Skim",
    yaw: 0.2,
    pitch: -0.02,
    dist: 3.4,
    fov: 1.15,
  },
  {
    key: "edge",
    label: "Disk Edge-on",
    yaw: 0.0,
    pitch: 0.015,
    dist: 8.5,
    fov: 0.95,
  },
  {
    key: "wormhole",
    label: "Wormhole",
    yaw: 0.0,
    pitch: 0.0,
    // dist 2.6 + fov 1.6 triggered GPU TDR (driver timeout) on software
    // renderers — too many ray-march iterations near the event horizon.
    // Pulled back to 5.0 / 1.3 — still dramatic, no longer fries the driver.
    dist: 5.0,
    fov: 1.3,
  },
  {
    key: "free",
    label: "Free Orbit",
    yaw: 0.4,
    pitch: 0.18,
    dist: 11.0,
    fov: 1.0,
  },
];

const BH_FACTS = [
  "A black hole is a place in space with a super strong pull.",
  "The bright ring is hot space dust swirling around the black hole.",
  "Light bends near a black hole, so space can look stretchy and swirly.",
  "The safest way to explore is from far away, like a space telescope.",
  "Some black holes shoot glowing jets high above and below the swirl.",
  "The dark middle is called the event horizon.",
  "Drag gently to look around the glowing space whirlpool.",
];

const BH_LABELS = {
  photonSphere: {
    title: "PHOTON SPHERE",
    body: "Light can loop around here, making a bright space ring.",
  },
  ergosphere: {
    title: "ERGOSPHERE",
    body: "Space gets pulled around the spinning black hole.",
  },
  jet: {
    title: "SPACE JET",
    body: "Glowing gas shoots away like a space fountain.",
  },
  isco: {
    title: "FAST RING",
    body: "This is where the glowing dust races around very fast.",
  },
  horizon: {
    title: "EVENT HORIZON",
    body: "The dark middle is the no-return edge. We watch from far away.",
  },
};

// ---- shader compile helpers -----------------------------------------------
function compileShader(gl, type, src) {
  const s = gl.createShader(type);
  if (!s) {
    // createShader returns null when the context is lost (e.g. left over
    // from a prior mount whose cleanup called WEBGL_lose_context). Surface
    // it loudly instead of letting shaderSource throw an opaque TypeError.
    const e = new Error(
      "createShader returned null — WebGL context appears lost. " +
        "isContextLost=" +
        (gl.isContextLost ? gl.isContextLost() : "unknown"),
    );
    e.detail = e.message;
    throw e;
  }
  gl.shaderSource(s, src);
  gl.compileShader(s);
  if (!gl.getShaderParameter(s, gl.COMPILE_STATUS)) {
    const log = gl.getShaderInfoLog(s) || "(empty log)";
    const kind = type === gl.VERTEX_SHADER ? "VERTEX" : "FRAGMENT";
    // Extract first ~6 numbered lines around the first error reported by the
    // driver, plus the GLSL header, so the on-screen fallback shows enough
    // context to diagnose remotely (SwiftShader / ANGLE / hardware drivers
    // surface different errors for the same source).
    const headerLines = src.split("\n").slice(0, 4).join("\n");
    const firstError = (log.match(/ERROR: \d+:\d+:[^\n]+/) || [
      log.split("\n")[0],
    ])[0];
    const detail = `[${kind} SHADER] ${firstError}\n--- header ---\n${headerLines}`;
    console.error("Shader log:\n" + log);
    const e = new Error("shader compile failed: " + firstError);
    e.detail = detail;
    e.log = log;
    e.kind = kind;
    throw e;
  }
  return s;
}

function linkProg(gl, vs, fs) {
  const p = gl.createProgram();
  gl.attachShader(p, vs);
  gl.attachShader(p, fs);
  gl.linkProgram(p);
  if (!gl.getProgramParameter(p, gl.LINK_STATUS)) {
    const log = gl.getProgramInfoLog(p) || "(empty log)";
    console.error(log);
    const e = new Error("link failed: " + log.split("\n")[0]);
    e.detail = "[LINK] " + log;
    e.log = log;
    throw e;
  }
  return p;
}

function makeFloatFBO(gl, w, h, hdr = true) {
  const tex = gl.createTexture();
  gl.bindTexture(gl.TEXTURE_2D, tex);
  const internalFormat = hdr ? gl.RGBA16F : gl.RGBA8;
  const type = hdr ? gl.HALF_FLOAT : gl.UNSIGNED_BYTE;
  gl.texImage2D(gl.TEXTURE_2D, 0, internalFormat, w, h, 0, gl.RGBA, type, null);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
  const fbo = gl.createFramebuffer();
  gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
  gl.framebufferTexture2D(
    gl.FRAMEBUFFER,
    gl.COLOR_ATTACHMENT0,
    gl.TEXTURE_2D,
    tex,
    0,
  );
  if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) !== gl.FRAMEBUFFER_COMPLETE) {
    gl.deleteTexture(tex);
    gl.deleteFramebuffer(fbo);
    throw new Error(`framebuffer incomplete (${hdr ? "HDR" : "LDR"})`);
  }
  return { tex, fbo, w, h };
}

function detectQuality() {
  // crude device heuristic; refined by frame-time after first ~60 frames.
  const cores = navigator.hardwareConcurrency || 4;
  const isMobile = /android|iphone|ipad|ipod/i.test(navigator.userAgent);
  if (isMobile) return 0; // low
  if (cores <= 4) return 1; // medium
  return 2; // ultra
}

// ---- main React component --------------------------------------------------
function BlackHoleLevel({ onExit }) {
  const canvasRef = useRef(null);
  const overlayRef = useRef(null); // 2D canvas for particle trails
  const loadingRef = useRef(null);
  const audioRef = useRef(null);
  const [done, setDone] = useState(false);
  const [webglFailed, setWebglFailed] = useState(false);
  const [webglErrorMsg, setWebglErrorMsg] = useState("");
  const [quality, setQuality] = useState(detectQuality());
  const [scienceMode, setScienceMode] = useState(false);
  const [telemetryVisible, setTelemetryVisible] = useState(false);

  // Live mirror so the WebGL effect can react to quality / scienceMode /
  // telemetry-visibility changes without rebuilding the entire pipeline
  // (which used to cause a black-frame glitch on every auto-quality-down).
  const bhLiveRef = useRef({
    quality,
    scienceMode,
    telemetryVisible,
  });
  useEffect(() => {
    bhLiveRef.current.quality = quality;
    bhLiveRef.current.scienceMode = scienceMode;
    bhLiveRef.current.telemetryVisible = telemetryVisible;
  }, [quality, scienceMode, telemetryVisible]);
  const [telemetry, setTelemetry] = useState({
    r: 11,
    vOrb: 0,
    tDisk: 1,
    gamma: 1,
    magnification: 1,
    frameDrag: 0,
    fps: 60,
    steps: 180,
  });
  const [caption, setCaption] = useState(null);
  const [lookLabel, setLookLabel] = useState(null);
  const [fact, setFact] = useState(null);
  const [eggVisible, setEggVisible] = useState(false);
  const [recording, setRecording] = useState(false);
  // physics + camera state lives in a ref so the render loop never re-renders us
  const stateRef = useRef({
    yaw: 0.4,
    pitch: 0.18,
    dist: 11.0,
    targetYaw: 0.4,
    targetPitch: 0.18,
    targetDist: 11.0,
    fov: 1.0,
    targetFov: 1.0,
    autoOrbit: true,
    spin: 0.85,
    accretion: 1.1,
    diskTemp: 1.0,
    timeDilation: 1.0,
    jetStrength: 0.9,
    flare: 0.0,
    showGeodesic: false,
    yawVel: 0,
    pitchVel: 0, // inertia
    distVel: 0,
    timeScale: 1.0,
    holdSpace: false,
    particles: [], // {p:[x,y,z], v:[x,y,z], age, alive, trail:[]}
    stableTimer: 0, // for easter egg
    rHistory: [],
    lastRetargetT: 0,
    stuckOnFeature: null,
    stuckSince: 0,
    audio: null,
  });

  // bridge HUD changes into stateRef without re-rendering
  const setState = (patch) => {
    Object.assign(stateRef.current, patch);
    // mirror a few values up so HUD slider thumb position updates
    setHudKey((k) => k + 1);
  };
  const [hudKey, setHudKey] = useState(0);

  useEffect(() => {
    const canvas = canvasRef.current;
    const overlay = overlayRef.current;
    if (!canvas) return;

    // ---- WebGL2 context ----
    const gl = canvas.getContext("webgl2", {
      antialias: false,
      alpha: false,
      preserveDrawingBuffer: true,
    });
    if (!gl) {
      setWebglErrorMsg(
        "getContext('webgl2') returned null — browser/driver does not expose WebGL2.",
      );
      setWebglFailed(true);
      return;
    }
    const hasFloat = !!gl.getExtension("EXT_color_buffer_float");
    const hasHalfFloat = !!gl.getExtension("EXT_color_buffer_half_float");
    const hdr = hasFloat || hasHalfFloat;
    // No bail-out when neither extension exists — fall through to LDR (RGBA8)
    // pipeline. makeFloatFBO honors `hdr` flag below.

    // ---- compile programs ----
    // Track every shader so cleanup can detach + delete them. Without this the
    // GL driver keeps the shader objects alive (they stay attached to each
    // program), which is a classic WebGL resource leak across re-mounts.
    let progMain, progBlur, progComp;
    const allShaders = [];
    try {
      const vs = compileShader(gl, gl.VERTEX_SHADER, BH_VERT);
      allShaders.push(vs);
      const fsMain = compileShader(gl, gl.FRAGMENT_SHADER, BH_FRAG_MAIN);
      const fsBlur = compileShader(gl, gl.FRAGMENT_SHADER, BH_FRAG_BLUR);
      const fsComp = compileShader(gl, gl.FRAGMENT_SHADER, BH_FRAG_COMPOSITE);
      allShaders.push(fsMain, fsBlur, fsComp);
      progMain = linkProg(gl, vs, fsMain);
      progBlur = linkProg(gl, vs, fsBlur);
      progComp = linkProg(gl, vs, fsComp);
    } catch (e) {
      console.error(e);
      const renderer =
        (gl.getExtension("WEBGL_debug_renderer_info") &&
          gl.getParameter(gl.UNMASKED_RENDERER_WEBGL)) ||
        gl.getParameter(gl.RENDERER) ||
        "unknown";
      const detail =
        (e && e.detail) || (e && e.log) || (e && e.message) || String(e);
      setWebglErrorMsg(
        "Shader compile/link failed.\n" +
          "Renderer: " +
          renderer +
          "\n" +
          "GLSL ver: " +
          gl.getParameter(gl.SHADING_LANGUAGE_VERSION) +
          "\n" +
          "GL ver:   " +
          gl.getParameter(gl.VERSION) +
          "\n" +
          "---\n" +
          detail,
      );
      setWebglFailed(true);
      return;
    }

    // ---- fullscreen quad ----
    const buf = gl.createBuffer();
    gl.bindBuffer(gl.ARRAY_BUFFER, buf);
    gl.bufferData(
      gl.ARRAY_BUFFER,
      new Float32Array([-1, -1, 1, -1, -1, 1, 1, 1]),
      gl.STATIC_DRAW,
    );
    // getAttribLocation is a constant lookup; doing it inside bindAttribs
    // means 3 GPU round-trips every frame. Cache once per program. (Programs
    // are created once per mount; locations don't change.)
    const attribLocCache = new WeakMap();
    const bindAttribs = (prog) => {
      gl.useProgram(prog);
      let loc = attribLocCache.get(prog);
      if (loc === undefined) {
        loc = gl.getAttribLocation(prog, "a");
        attribLocCache.set(prog, loc);
      }
      gl.enableVertexAttribArray(loc);
      gl.vertexAttribPointer(loc, 2, gl.FLOAT, false, 0, 0);
    };

    // ---- uniform locations ----
    const U = {
      main: {
        uRes: gl.getUniformLocation(progMain, "uRes"),
        uTime: gl.getUniformLocation(progMain, "uTime"),
        uCamPos: gl.getUniformLocation(progMain, "uCamPos"),
        uCamRot: gl.getUniformLocation(progMain, "uCamRot"),
        uFov: gl.getUniformLocation(progMain, "uFov"),
        uSky: gl.getUniformLocation(progMain, "uSky"),
        uSkyOn: gl.getUniformLocation(progMain, "uSkyOn"),
        uSpin: gl.getUniformLocation(progMain, "uSpin"),
        uAccretion: gl.getUniformLocation(progMain, "uAccretion"),
        uDiskTemp: gl.getUniformLocation(progMain, "uDiskTemp"),
        uQuality: gl.getUniformLocation(progMain, "uQuality"),
        uShowGeodesic: gl.getUniformLocation(progMain, "uShowGeodesic"),
        uFlare: gl.getUniformLocation(progMain, "uFlare"),
        uJetStrength: gl.getUniformLocation(progMain, "uJetStrength"),
        uTimeDilation: gl.getUniformLocation(progMain, "uTimeDilation"),
      },
      blur: {
        uSrc: gl.getUniformLocation(progBlur, "uSrc"),
        uTexel: gl.getUniformLocation(progBlur, "uTexel"),
        uDir: gl.getUniformLocation(progBlur, "uDir"),
      },
      comp: {
        uHdr: gl.getUniformLocation(progComp, "uHdr"),
        uBloom: gl.getUniformLocation(progComp, "uBloom"),
        uTime: gl.getUniformLocation(progComp, "uTime"),
        uBloomStrength: gl.getUniformLocation(progComp, "uBloomStrength"),
        uGrainStrength: gl.getUniformLocation(progComp, "uGrainStrength"),
        uScanline: gl.getUniformLocation(progComp, "uScanline"),
        uProximity: gl.getUniformLocation(progComp, "uProximity"),
      },
    };

    // ---- sky texture ----
    // The CDN image can resolve after unmount under React StrictMode (mount,
    // cleanup, remount); the first onload would otherwise create a texture in
    // a torn-down context. Track disposal and bail if it fires too late.
    let disposed = false;
    const skyImg = new Image();
    skyImg.crossOrigin = "anonymous";
    let skyTex = null,
      skyLoaded = false;
    skyImg.onload = () => {
      if (disposed || gl.isContextLost()) return;
      skyTex = gl.createTexture();
      gl.bindTexture(gl.TEXTURE_2D, skyTex);
      gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
      gl.texImage2D(
        gl.TEXTURE_2D,
        0,
        gl.RGBA,
        gl.RGBA,
        gl.UNSIGNED_BYTE,
        skyImg,
      );
      gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.REPEAT);
      gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
      gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
      gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
      skyLoaded = true;
    };
    skyImg.onerror = () => {
      // Network/CORS failure — keep skyLoaded=false so shader uses procedural
      // starfield branch. Quiet console rather than red error spam.
      if (!disposed) console.warn("blackhole: sky texture failed to load");
    };
    skyImg.src =
      "https://cdn.jsdelivr.net/gh/jeromeetienne/threex.planets@master/images/galaxy_starfield.png";

    // ---- FBOs (HDR + 2 ping-pong bloom, half-res) ----
    let hdrFBO = null,
      bloomA = null,
      bloomB = null;
    function recreateFBOs(w, h) {
      const release = (fb) => {
        if (!fb) return;
        gl.deleteTexture(fb.tex);
        gl.deleteFramebuffer(fb.fbo);
      };
      release(hdrFBO);
      release(bloomA);
      release(bloomB);
      hdrFBO = makeFloatFBO(gl, w, h, hdr);
      bloomA = makeFloatFBO(gl, Math.max(2, w >> 1), Math.max(2, h >> 1), hdr);
      bloomB = makeFloatFBO(gl, Math.max(2, w >> 1), Math.max(2, h >> 1), hdr);
    }

    // ---- resize ----
    function resize() {
      const dprBase = Math.min(window.devicePixelRatio || 1, 1.5);
      // dpr scales with quality: low=0.75, med=1.0, ultra=full
      const liveQuality = bhLiveRef.current.quality;
      const qScale = liveQuality === 0 ? 0.65 : liveQuality === 1 ? 0.9 : 1.0;
      const dpr = dprBase * qScale;
      const w = Math.max(2, Math.floor(window.innerWidth * dpr));
      const h = Math.max(2, Math.floor(window.innerHeight * dpr));
      canvas.width = w;
      canvas.height = h;
      canvas.style.width = window.innerWidth + "px";
      canvas.style.height = window.innerHeight + "px";
      if (overlay) {
        overlay.width = w;
        overlay.height = h;
        overlay.style.width = window.innerWidth + "px";
        overlay.style.height = window.innerHeight + "px";
      }
      recreateFBOs(w, h);
    }
    window.addEventListener("resize", resize);
    resize();

    // ---- audio ----
    // Pending timeouts get cleared on cleanup so they can't setState after
    // unmount or mutate stateRef belonging to a stale mount.
    const pendingTimeouts = new Set();
    const safeTimeout = (fn, ms) => {
      const id = setTimeout(() => {
        pendingTimeouts.delete(id);
        fn();
      }, ms);
      pendingTimeouts.add(id);
      return id;
    };
    const audio = new BlackHoleAudio();
    stateRef.current.audio = audio;
    audioRef.current = audio;
    audio.onCaption((c) => {
      setCaption(c);
      safeTimeout(
        () => setCaption((cur) => (cur && cur.key === c.key ? null : cur)),
        c.durationMs,
      );
    });

    // First user interaction unlocks WebAudio. Hook it on canvas pointerdown.
    const unlockAudio = () => {
      audio.init();
      audio.resume();
      canvas.removeEventListener("pointerdown", unlockAudio);
    };
    canvas.addEventListener("pointerdown", unlockAudio);

    // ---- pointer interaction ----
    let drag = false,
      lx = 0,
      ly = 0,
      lt = 0;
    const onDown = (e) => {
      drag = true;
      lx = e.clientX;
      ly = e.clientY;
      lt = performance.now();
      stateRef.current.autoOrbit = false;
      try {
        canvas.setPointerCapture(e.pointerId);
      } catch (_) {}
    };
    const onMove = (e) => {
      if (!drag) return;
      const dx = e.clientX - lx,
        dy = e.clientY - ly;
      stateRef.current.targetYaw -= dx * 0.005;
      stateRef.current.targetPitch += dy * 0.005;
      stateRef.current.targetPitch = Math.max(
        -1.45,
        Math.min(1.45, stateRef.current.targetPitch),
      );
      const now = performance.now();
      const dt = Math.max(1, now - lt);
      stateRef.current.yawVel = ((-dx * 0.005) / (dt * 0.001)) * 0.05;
      stateRef.current.pitchVel = ((dy * 0.005) / (dt * 0.001)) * 0.05;
      lx = e.clientX;
      ly = e.clientY;
      lt = now;
    };
    const onUp = (e) => {
      drag = false;
      try {
        canvas.releasePointerCapture(e.pointerId);
      } catch (_) {}
    };
    let lastTap = 0;
    const onClick = (e) => {
      const now = performance.now();
      // double-tap toggles auto-orbit
      if (now - lastTap < 300) {
        stateRef.current.autoOrbit = !stateRef.current.autoOrbit;
      }
      lastTap = now;
    };
    const onWheel = (e) => {
      e.preventDefault();
      stateRef.current.targetDist = Math.max(
        2.6,
        Math.min(30, stateRef.current.targetDist + e.deltaY * 0.012),
      );
    };
    canvas.addEventListener("pointerdown", onDown);
    canvas.addEventListener("pointermove", onMove);
    canvas.addEventListener("pointerup", onUp);
    canvas.addEventListener("pointercancel", onUp);
    canvas.addEventListener("click", onClick);
    canvas.addEventListener("wheel", onWheel, { passive: false });

    // ---- pinch-to-zoom (mobile) ----
    const touches = new Map();
    let pinchPrev = null;
    const onTouchStart = (e) => {
      for (const t of e.changedTouches)
        touches.set(t.identifier, { x: t.clientX, y: t.clientY });
      if (touches.size === 2) {
        const ts = Array.from(touches.values());
        pinchPrev = Math.hypot(ts[0].x - ts[1].x, ts[0].y - ts[1].y);
      }
    };
    const onTouchMove = (e) => {
      for (const t of e.changedTouches)
        touches.set(t.identifier, { x: t.clientX, y: t.clientY });
      if (touches.size === 2 && pinchPrev != null) {
        const ts = Array.from(touches.values());
        const d = Math.hypot(ts[0].x - ts[1].x, ts[0].y - ts[1].y);
        const dd = (pinchPrev - d) * 0.04;
        stateRef.current.targetDist = Math.max(
          2.6,
          Math.min(30, stateRef.current.targetDist + dd),
        );
        pinchPrev = d;
      }
    };
    const onTouchEnd = (e) => {
      for (const t of e.changedTouches) touches.delete(t.identifier);
      if (touches.size < 2) pinchPrev = null;
    };
    canvas.addEventListener("touchstart", onTouchStart, { passive: true });
    canvas.addEventListener("touchmove", onTouchMove, { passive: true });
    canvas.addEventListener("touchend", onTouchEnd, { passive: true });

    // ---- hotkeys ----
    const applyPreset = (key) => {
      const p = BH_PRESETS.find((x) => x.key === key);
      if (!p) return;
      const s = stateRef.current;
      s.targetYaw = p.yaw;
      s.targetPitch = p.pitch;
      s.targetDist = p.dist;
      s.targetFov = p.fov;
      s.autoOrbit = false;
      // wormhole tease: brief fov pulse + chirp
      if (key === "wormhole") {
        // Pulse FOV briefly for the "tease" effect, but cap at 1.3 (was 1.6)
        // and shorten duration (was 1400ms) so the GPU isn't sustained at
        // worst-case ray-march load.
        s.targetFov = 1.3;
        if (s.audio) s.audio.triggerChirp();
        safeTimeout(() => {
          stateRef.current.targetFov = 1.0;
        }, 700);
      }
    };
    const onKey = (e) => {
      if (e.key === "Escape") {
        onExit();
        return;
      }
      if (e.key === "t" || e.key === "T") setTelemetryVisible((v) => !v);
      if (e.key === "g" || e.key === "G") setScienceMode((v) => !v);
      if (e.code === "Space") {
        e.preventDefault();
        stateRef.current.holdSpace = true;
        stateRef.current.targetTimeScale = 0.18;
      }
      if (e.key >= "1" && e.key <= "6") {
        const idx = parseInt(e.key, 10) - 1;
        if (BH_PRESETS[idx]) applyPreset(BH_PRESETS[idx].key);
      }
    };
    const onKeyUp = (e) => {
      if (e.code === "Space") {
        stateRef.current.holdSpace = false;
        stateRef.current.targetTimeScale = 1.0;
      }
    };
    window.addEventListener("keydown", onKey);
    window.addEventListener("keyup", onKeyUp);

    // ---- click-to-launch particle (Phase 2) ----
    const launchParticle = (e) => {
      // converts screen click into a starting ray, fires a JS-side test particle
      const rect = canvas.getBoundingClientRect();
      const nx = ((e.clientX - rect.left) / rect.width) * 2 - 1;
      const ny = -(((e.clientY - rect.top) / rect.height) * 2 - 1);
      const aspect = rect.width / rect.height;
      const fov = stateRef.current.fov;
      const dirCam = [nx * fov * aspect, ny * fov, -1];
      const m = stateRef.current.camRot;
      if (!m) return;
      const dirW = [
        m[0] * dirCam[0] + m[3] * dirCam[1] + m[6] * dirCam[2],
        m[1] * dirCam[0] + m[4] * dirCam[1] + m[7] * dirCam[2],
        m[2] * dirCam[0] + m[5] * dirCam[1] + m[8] * dirCam[2],
      ];
      const len = Math.hypot(dirW[0], dirW[1], dirW[2]);
      const dir = [dirW[0] / len, dirW[1] / len, dirW[2] / len];
      const cam = stateRef.current.camPos;
      // start a few units in front of the camera
      const start = [
        cam[0] + dir[0] * 1.5,
        cam[1] + dir[1] * 1.5,
        cam[2] + dir[2] * 1.5,
      ];
      stateRef.current.particles.push({
        p: start.slice(),
        v: dir.slice(),
        age: 0,
        alive: true,
        trail: [start.slice()],
        hue: Math.random(),
      });
      if (stateRef.current.particles.length > 14)
        stateRef.current.particles.shift();
      if (stateRef.current.audio) stateRef.current.audio.triggerChirp();
    };
    // alt-click or shift-click launches a particle (so plain click stays for double-tap)
    const onContextMenu = (e) => {
      e.preventDefault();
      launchParticle(e);
    };
    const onParticlePointerDown = (e) => {
      if (e.shiftKey || e.altKey || e.button === 2) launchParticle(e);
    };
    canvas.addEventListener("contextmenu", onContextMenu);
    canvas.addEventListener("pointerdown", onParticlePointerDown);

    // ---- WebGL context loss handling ----
    // GPU TDR / driver timeout (the wormhole preset historically tripped this on
    // software renderers — see BH_PRESETS.wormhole comment). When the context is
    // lost we must (a) preventDefault so the browser will attempt a restore,
    // (b) stop the render loop, and (c) surface a fallback. On restore we just
    // reload the level by re-mounting (cheaper than re-running the entire
    // setup inline; setWebglFailed -> Back button is the existing flow).
    let contextLost = false;
    const onContextLost = (ev) => {
      ev.preventDefault();
      contextLost = true;
      if (raf) cancelAnimationFrame(raf);
    };
    const onContextRestored = () => {
      // Surface fallback rather than attempt in-place re-init (programs,
      // textures, FBOs are all dead handles now). User taps Back -> re-enters.
      setWebglErrorMsg(
        "WebGL context was lost (likely GPU driver timeout). Tap Back and re-enter Gargantua to restart the renderer.",
      );
      setWebglFailed(true);
    };
    canvas.addEventListener("webglcontextlost", onContextLost);
    canvas.addEventListener("webglcontextrestored", onContextRestored);

    // ---- play original narration MP3 (kept as deep-lore mode) ----
    if (window.__narration) window.__narration.play("blackhole.mp3");
    const audioEl = document.getElementById("narration");
    const onAudioEnd = () => setDone(true);
    if (audioEl) audioEl.addEventListener("ended", onAudioEnd);

    // ---- fact pop-up timer ----
    const factTimer = setInterval(() => {
      if (Math.random() < 0.7) {
        setFact(BH_FACTS[Math.floor(Math.random() * BH_FACTS.length)]);
        safeTimeout(() => setFact(null), 7000);
      }
    }, 25000);

    // ---- main render loop ----
    const t0 = performance.now();
    let loadingHidden = false;
    let raf;
    let frameCount = 0;
    let frameTimeAccum = 0;
    let lastFrameTime = t0;
    let fpsSample = 60;
    let qualityProbe = 0;
    let lastQualityForResize = bhLiveRef.current.quality;

    function frame() {
      // Bail out cleanly if context was lost mid-flight — every gl.* call below
      // would otherwise raise GL_INVALID_OPERATION and spam the console.
      if (contextLost || gl.isContextLost()) {
        return;
      }
      const now = performance.now();
      const dtMs = now - lastFrameTime;
      lastFrameTime = now;
      frameCount += 1;
      frameTimeAccum += dtMs;
      if (frameCount % 30 === 0) {
        fpsSample = 30000 / frameTimeAccum;
        frameTimeAccum = 0;
      }

      const t = (now - t0) / 1000;
      const s = stateRef.current;

      // smooth time scale
      const targetTs = s.targetTimeScale != null ? s.targetTimeScale : 1.0;
      s.timeScale += (targetTs - s.timeScale) * 0.08;

      // auto-quality: only scales DOWN, never up — prevents flip-flop loops.
      // User can still pick Ultra manually via the HUD.
      qualityProbe += 1;
      const liveQuality = bhLiveRef.current.quality;
      if (qualityProbe === 90 && fpsSample < 35 && liveQuality > 0) {
        setQuality((q) => Math.max(0, q - 1));
      }
      // React to live quality changes (auto-down or manual HUD pick) without
      // tearing down the WebGL stack — just re-run resize() to update DPR.
      if (liveQuality !== lastQualityForResize) {
        lastQualityForResize = liveQuality;
        resize();
      }

      // camera smoothing + inertia decay
      if (s.autoOrbit) s.targetYaw += 0.0014;
      else {
        s.targetYaw += s.yawVel;
        s.targetPitch += s.pitchVel;
        s.yawVel *= 0.92;
        s.pitchVel *= 0.92;
      }
      s.yaw += (s.targetYaw - s.yaw) * 0.085;
      s.pitch += (s.targetPitch - s.pitch) * 0.085;
      s.dist += (s.targetDist - s.dist) * 0.06;
      s.fov += ((s.targetFov || 1.0) - s.fov) * 0.06;

      // build camera basis
      const cy = Math.cos(s.yaw),
        sy = Math.sin(s.yaw);
      const cp = Math.cos(s.pitch),
        sp = Math.sin(s.pitch);
      const cam = [s.dist * cp * sy, s.dist * sp, s.dist * cp * cy];
      const fwd = [-cam[0], -cam[1], -cam[2]];
      const fl = Math.hypot(fwd[0], fwd[1], fwd[2]);
      fwd[0] /= fl;
      fwd[1] /= fl;
      fwd[2] /= fl;
      const upWorld = [0, 1, 0];
      let right = [
        fwd[1] * upWorld[2] - fwd[2] * upWorld[1],
        fwd[2] * upWorld[0] - fwd[0] * upWorld[2],
        fwd[0] * upWorld[1] - fwd[1] * upWorld[0],
      ];
      const rl = Math.hypot(right[0], right[1], right[2]) || 1;
      right = [right[0] / rl, right[1] / rl, right[2] / rl];
      const up = [
        right[1] * fwd[2] - right[2] * fwd[1],
        right[2] * fwd[0] - right[0] * fwd[2],
        right[0] * fwd[1] - right[1] * fwd[0],
      ];
      const negFwd = [-fwd[0], -fwd[1], -fwd[2]];
      const camRot = new Float32Array([
        right[0],
        right[1],
        right[2],
        up[0],
        up[1],
        up[2],
        negFwd[0],
        negFwd[1],
        negFwd[2],
      ]);
      s.camPos = cam;
      s.camRot = camRot;

      // periodic flare envelope
      s.flare = Math.max(0, s.flare - dtMs * 0.001);
      if (Math.random() < 0.01) {
        s.flare = 0.7 + Math.random() * 0.3;
        if (s.audio) s.audio.triggerFlare();
      }

      // ---- pass 1: raymarch into HDR ----
      gl.bindFramebuffer(gl.FRAMEBUFFER, hdrFBO.fbo);
      gl.viewport(0, 0, hdrFBO.w, hdrFBO.h);
      bindAttribs(progMain);
      gl.uniform2f(U.main.uRes, hdrFBO.w, hdrFBO.h);
      gl.uniform1f(U.main.uTime, t * s.timeScale);
      gl.uniform3f(U.main.uCamPos, cam[0], cam[1], cam[2]);
      gl.uniformMatrix3fv(U.main.uCamRot, false, camRot);
      gl.uniform1f(U.main.uFov, s.fov);
      gl.uniform1f(U.main.uSpin, s.spin);
      gl.uniform1f(U.main.uAccretion, s.accretion);
      gl.uniform1f(U.main.uDiskTemp, s.diskTemp);
      gl.uniform1f(U.main.uQuality, liveQuality);
      gl.uniform1f(
        U.main.uShowGeodesic,
        bhLiveRef.current.scienceMode ? 1.0 : 0.0,
      );
      gl.uniform1f(U.main.uFlare, s.flare);
      gl.uniform1f(U.main.uJetStrength, s.jetStrength);
      gl.uniform1f(U.main.uTimeDilation, s.timeDilation * s.timeScale);
      if (skyLoaded) {
        gl.activeTexture(gl.TEXTURE0);
        gl.bindTexture(gl.TEXTURE_2D, skyTex);
        gl.uniform1i(U.main.uSky, 0);
        gl.uniform1f(U.main.uSkyOn, 1.0);
      } else {
        gl.uniform1f(U.main.uSkyOn, 0.0);
      }
      gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);

      // ---- pass 2: bloom blur ping-pong ----
      // First iteration sources from hdrFBO (full-res, alpha = bloom mask).
      // Subsequent iterations alternate bloomA <-> bloomB (half-res, alpha=1).
      const bloomIters = liveQuality === 0 ? 1 : liveQuality === 1 ? 2 : 3;
      bindAttribs(progBlur);
      const blurPass = (src, dst, dirX, dirY) => {
        gl.bindFramebuffer(gl.FRAMEBUFFER, dst.fbo);
        gl.viewport(0, 0, dst.w, dst.h);
        gl.activeTexture(gl.TEXTURE0);
        gl.bindTexture(gl.TEXTURE_2D, src.tex);
        gl.uniform1i(U.blur.uSrc, 0);
        gl.uniform2f(U.blur.uTexel, 1.0 / src.w, 1.0 / src.h);
        gl.uniform2f(U.blur.uDir, dirX, dirY);
        gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
      };
      // Ping-pong must always pick hMid != bloomSrc to avoid sampling the
      // same texture currently bound as the framebuffer (GL_INVALID_OPERATION
      // feedback loop). Iter 0: src=hdrFBO -> hMid=bloomA -> vOut=bloomB.
      // Iter 1: src=bloomB -> hMid=bloomA -> vOut=bloomB. Etc. Each ping-pong
      // writes to one buffer while reading the other.
      let bloomSrc = hdrFBO;
      for (let i = 0; i < bloomIters; i++) {
        const hMid = bloomSrc === bloomA ? bloomB : bloomA;
        const vOut = hMid === bloomA ? bloomB : bloomA;
        blurPass(bloomSrc, hMid, 1.0, 0.0);
        blurPass(hMid, vOut, 0.0, 1.0);
        bloomSrc = vOut;
      }
      const bloomFinal = bloomSrc;

      // ---- pass 3: composite to backbuffer ----
      gl.bindFramebuffer(gl.FRAMEBUFFER, null);
      gl.viewport(0, 0, canvas.width, canvas.height);
      bindAttribs(progComp);
      gl.activeTexture(gl.TEXTURE0);
      gl.bindTexture(gl.TEXTURE_2D, hdrFBO.tex);
      gl.uniform1i(U.comp.uHdr, 0);
      gl.activeTexture(gl.TEXTURE1);
      gl.bindTexture(gl.TEXTURE_2D, bloomFinal.tex);
      gl.uniform1i(U.comp.uBloom, 1);
      gl.uniform1f(U.comp.uTime, t);
      gl.uniform1f(U.comp.uBloomStrength, 0.85);
      gl.uniform1f(U.comp.uGrainStrength, 0.018);
      gl.uniform1f(U.comp.uScanline, 1.0);
      gl.uniform1f(U.comp.uProximity, audio.proximity || 0);
      gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);

      // ---- particle integration + 2D overlay ----
      stepParticles(s, dtMs * 0.001 * s.timeScale);
      drawOverlay(overlay, s, cam, camRot, canvas.width, canvas.height);

      // ---- audio bridge ----
      if (audio.enabled) audio.setProximity(s.dist);

      // ---- telemetry + look label every ~6 frames ----
      if (frameCount % 6 === 0) {
        const r = s.dist;
        // Skip the React state update entirely when the HUD telemetry panel
        // is collapsed — the look-target detection below still runs.
        if (bhLiveRef.current.telemetryVisible) {
          const vOrb = Math.min(0.92, 1.0 / Math.sqrt(r / 2.0));
          const gamma = 1.0 / Math.sqrt(Math.max(0.02, 1 - vOrb * vOrb));
          const grav = Math.sqrt(Math.max(0.02, 1 - 2 / r));
          const tDilFactor = grav * (1.0 / gamma);
          const mag = 1.0 + Math.exp(-Math.max(0, r - 2.5) * 1.5) * 4.0;
          const drag = (2 * s.spin) / Math.max(1, r * r * r);
          setTelemetry({
            r,
            vOrb,
            tDisk: s.diskTemp,
            gamma,
            magnification: mag,
            frameDrag: drag,
            fps: fpsSample,
            steps: liveQuality === 0 ? 90 : liveQuality === 1 ? 180 : 280,
          });
        }
        // look-target detection
        const feat = pickFeatureFromCamera(s);
        if (feat !== s.stuckOnFeature) {
          s.stuckOnFeature = feat;
          s.stuckSince = now;
          setLookLabel(null);
          if (window.__narration && feat) {
            const zoneFile = {
              photonSphere: "game_blackhole_photon_sphere.mp3",
              ergosphere: "game_blackhole_ergosphere.mp3",
              jet: "game_blackhole_jet.mp3",
              horizon: "game_blackhole_event_horizon.mp3",
            }[feat];
            if (zoneFile) window.__narration.play(zoneFile);
          }
        } else if (feat && now - s.stuckSince > 1500) {
          setLookLabel(BH_LABELS[feat] || null);
        }
        // (Zone narration is now handled above via window.__narration with
        //  pre-recorded Grok TTS clips; the procedural s.audio.speak path
        //  is intentionally disabled to avoid double-narrating each zone.)
        // stable orbit detector for easter egg
        s.rHistory.push(r);
        if (s.rHistory.length > 90) s.rHistory.shift();
        if (s.rHistory.length === 90) {
          const mean = s.rHistory.reduce((a, b) => a + b, 0) / 90;
          const variance =
            s.rHistory.reduce((a, b) => a + (b - mean) * (b - mean), 0) / 90;
          if (variance < 0.04 && !eggVisible) setEggVisible(true);
        }
      }

      if (!loadingHidden && t > 0.5) {
        loadingHidden = true;
        if (loadingRef.current) loadingRef.current.classList.add("hide");
      }
      raf = requestAnimationFrame(frame);
    }
    raf = requestAnimationFrame(frame);

    // ---- cleanup ----
    return () => {
      disposed = true;
      cancelAnimationFrame(raf);
      window.removeEventListener("resize", resize);
      window.removeEventListener("keydown", onKey);
      window.removeEventListener("keyup", onKeyUp);
      canvas.removeEventListener("pointerdown", onDown);
      canvas.removeEventListener("pointermove", onMove);
      canvas.removeEventListener("pointerup", onUp);
      canvas.removeEventListener("pointercancel", onUp);
      canvas.removeEventListener("click", onClick);
      canvas.removeEventListener("wheel", onWheel);
      canvas.removeEventListener("contextmenu", onContextMenu);
      canvas.removeEventListener("pointerdown", onParticlePointerDown);
      canvas.removeEventListener("touchstart", onTouchStart);
      canvas.removeEventListener("touchmove", onTouchMove);
      canvas.removeEventListener("touchend", onTouchEnd);
      canvas.removeEventListener("webglcontextlost", onContextLost);
      canvas.removeEventListener("webglcontextrestored", onContextRestored);
      canvas.removeEventListener("pointerdown", unlockAudio);
      if (audioEl) audioEl.removeEventListener("ended", onAudioEnd);
      if (window.__narration) window.__narration.stop();
      clearInterval(factTimer);
      for (const id of pendingTimeouts) clearTimeout(id);
      pendingTimeouts.clear();
      try {
        audio.shutdown();
      } catch (_) {}
      try {
        if (skyTex) gl.deleteTexture(skyTex);
        if (hdrFBO) {
          gl.deleteTexture(hdrFBO.tex);
          gl.deleteFramebuffer(hdrFBO.fbo);
        }
        if (bloomA) {
          gl.deleteTexture(bloomA.tex);
          gl.deleteFramebuffer(bloomA.fbo);
        }
        if (bloomB) {
          gl.deleteTexture(bloomB.tex);
          gl.deleteFramebuffer(bloomB.fbo);
        }
        if (buf) gl.deleteBuffer(buf);
        // Detach shaders from every program they're attached to before
        // deleting the program — otherwise the shader object lives on as long
        // as the program is referenced, which compounds across re-mounts.
        for (const prog of [progMain, progBlur, progComp]) {
          if (!prog) continue;
          for (const sh of allShaders) {
            try {
              gl.detachShader(prog, sh);
            } catch (_) {}
          }
          gl.deleteProgram(prog);
        }
        for (const sh of allShaders) {
          try {
            gl.deleteShader(sh);
          } catch (_) {}
        }
        // Note: do NOT call WEBGL_lose_context.loseContext() here. React 18
        // StrictMode runs effects twice on mount (mount -> cleanup -> remount),
        // and a forced context loss during the first cleanup leaves the canvas
        // permanently dead — every subsequent shaderSource(null, ...) throws
        // "Argument 1 ('shader') must be an instance of WebGLShader". The
        // browser releases GPU resources automatically on canvas unmount.
      } catch (_) {}
    };
    // No deps: the effect intentionally runs once for the canvas lifetime.
    // Live values (quality, scienceMode, telemetryVisible) are read through
    // bhLiveRef inside the render loop so changing them does NOT rebuild
    // the WebGL pipeline.
  }, []);

  // ---------------- screenshot / share ----------------
  const onScreenshot = () => {
    const canvas = canvasRef.current;
    if (!canvas) return;
    canvas.toBlob((blob) => {
      if (!blob) return;
      const url = URL.createObjectURL(blob);
      const a = document.createElement("a");
      a.href = url;
      a.download = `gargantua-${Date.now()}.png`;
      document.body.appendChild(a);
      a.click();
      a.remove();
      setTimeout(() => URL.revokeObjectURL(url), 1000);
    }, "image/png");
  };

  // ring buffer of last 8 seconds via MediaRecorder
  const recRef = useRef(null);
  const onRecord = () => {
    const canvas = canvasRef.current;
    if (!canvas) return;
    if (recording) {
      try {
        recRef.current && recRef.current.stop();
      } catch (_) {}
      setRecording(false);
      return;
    }
    try {
      const stream = canvas.captureStream(60);
      const mr = new MediaRecorder(stream, {
        mimeType: "video/webm;codecs=vp9",
        videoBitsPerSecond: 8_000_000,
      });
      const chunks = [];
      mr.ondataavailable = (e) => {
        if (e.data.size > 0) chunks.push(e.data);
      };
      mr.onstop = () => {
        const blob = new Blob(chunks, { type: "video/webm" });
        const url = URL.createObjectURL(blob);
        const a = document.createElement("a");
        a.href = url;
        a.download = `gargantua-${Date.now()}.webm`;
        document.body.appendChild(a);
        a.click();
        a.remove();
        setTimeout(() => URL.revokeObjectURL(url), 2000);
      };
      mr.start();
      recRef.current = mr;
      setRecording(true);
      // auto-stop at 8 seconds
      setTimeout(() => {
        try {
          if (mr.state === "recording") mr.stop();
        } catch (_) {}
        setRecording(false);
      }, 8000);
    } catch (e) {
      console.warn("recording unsupported", e);
    }
  };

  const onShare = async () => {
    const canvas = canvasRef.current;
    if (!canvas) return;
    canvas.toBlob(async (blob) => {
      if (!blob) return;
      const file = new File([blob], "gargantua.png", { type: "image/png" });
      const text = `r=${telemetry.r.toFixed(2)} M · v_orb=${telemetry.vOrb.toFixed(2)}c · γ=${telemetry.gamma.toFixed(2)} · gargantua`;
      try {
        if (
          navigator.share &&
          navigator.canShare &&
          navigator.canShare({ files: [file] })
        ) {
          await navigator.share({ files: [file], text, title: "Gargantua" });
          return;
        }
      } catch (_) {}
      // fallback: copy text + download
      try {
        await navigator.clipboard.writeText(text);
      } catch (_) {}
      onScreenshot();
    }, "image/png");
  };

  const onEgg = () => {
    setEggVisible(false);
    stateRef.current.jetStrength = 1.4;
    if (stateRef.current.audio) {
      stateRef.current.audio.speak(
        "egg",
        "Grok was here. Don't tell the singularity.",
        { repeat: true, durationMs: 5500 },
      );
      stateRef.current.audio.triggerFlare();
    }
    setTimeout(() => {
      stateRef.current.jetStrength = 0.9;
    }, 6000);
  };

  if (webglFailed) {
    return (
      <div className="bh-level">
        <div className="bh-fallback">
          <p>WebGL2 with float textures is required for Gargantua v2.</p>
          {webglErrorMsg && (
            <p
              style={{
                fontFamily: "monospace",
                fontSize: "11px",
                opacity: 0.85,
                maxWidth: "720px",
                wordBreak: "break-word",
                whiteSpace: "pre-wrap",
                textAlign: "left",
                margin: "12px auto",
                padding: "12px",
                background: "rgba(255,255,255,0.05)",
                border: "1px solid rgba(255,255,255,0.1)",
                borderRadius: "6px",
              }}
            >
              {webglErrorMsg}
            </p>
          )}
          <button className="bh-btn-primary" onClick={() => onExit()}>
            Back
          </button>
        </div>
      </div>
    );
  }

  // mirror state into HUD-friendly object
  const hudState = {
    spin: stateRef.current.spin,
    accretion: stateRef.current.accretion,
    diskTemp: stateRef.current.diskTemp,
    targetDist: stateRef.current.targetDist,
    timeDilation: stateRef.current.timeDilation,
    jetStrength: stateRef.current.jetStrength,
  };

  return (
    <div className="bh-level">
      <canvas ref={canvasRef} className="bh-canvas" />
      <canvas ref={overlayRef} className="bh-particle-overlay" />
      <div ref={loadingRef} className="bh-loading">
        OPENING SPACE VIEW...
      </div>
      <div className="bh-overlay">
        <div className="bh-title">
          Gargantua
          <div className="bh-subtitle">
            Drag gently to look around. Tap the buttons to visit bright space
            places.
          </div>
        </div>
        <div className="bh-stats">
          <div>
            <b>SCHWARZSCHILD RADIUS</b> 12 million km
          </div>
          <div>
            <b>EVENT HORIZON TEMP</b> ~ 0 K
          </div>
          <div>
            <b>ACCRETION DISK</b> 10 million °C
          </div>
          <div>
            <b>SPIN</b> {hudState.spin.toFixed(3)} a/M
          </div>
        </div>
      </div>
      <BHControlPanel
        state={hudState}
        setState={setState}
        presets={BH_PRESETS}
        onPreset={(k) => {
          const p = BH_PRESETS.find((x) => x.key === k);
          if (!p) return;
          const s = stateRef.current;
          s.targetYaw = p.yaw;
          s.targetPitch = p.pitch;
          s.targetDist = p.dist;
          s.targetFov = p.fov;
          s.autoOrbit = false;
          if (k === "wormhole") {
            s.targetFov = 1.3;
            if (s.audio) s.audio.triggerChirp();
            setTimeout(() => {
              stateRef.current.targetFov = 1.0;
            }, 700);
          }
        }}
        quality={quality}
        setQuality={setQuality}
        scienceMode={scienceMode}
        setScienceMode={setScienceMode}
      />
      <BHTelemetry telemetry={telemetry} visible={telemetryVisible} />
      <BHCaption caption={caption} />
      <BHLookLabel label={lookLabel} />
      <BHFactPopup fact={fact} />
      <BHShareBar
        onScreenshot={onScreenshot}
        onRecord={onRecord}
        onShare={onShare}
        recording={recording}
      />
      {eggVisible && <BHEasterEgg onClick={onEgg} />}
      <button className="bh-btn-back-floating" onClick={() => onExit()}>
        ← Back
      </button>
      <div className="bh-watermark">xAI · Gargantua v2</div>
      {done && (
        <div className="bh-finished">
          <div className="bh-finished-card">
            <div className="bh-finished-emoji">🕳️</div>
            <h1>Singularity achieved.</h1>
            <p>Gravity always wins — but you can still run.</p>
            <div className="bh-finished-actions">
              <button
                className="bh-btn-primary"
                onClick={() => onExit({ replay: true })}
              >
                Hear it again
              </button>
              <button
                className="bh-btn-secondary"
                onClick={() => setDone(false)}
              >
                Keep orbiting
              </button>
              <button className="bh-btn-secondary" onClick={() => onExit()}>
                Back to system
              </button>
            </div>
          </div>
        </div>
      )}
    </div>
  );
}

// ---- particle integrator ---------------------------------------------------
// Cheap leapfrog using same approximate-Kerr potential as the shader.
function stepParticles(state, dt) {
  if (!state.particles.length) return;
  const horizon = 2.0;
  for (const part of state.particles) {
    if (!part.alive) continue;
    part.age += dt;
    let p = part.p,
      v = part.v;
    const sub = 4;
    const h = dt / sub;
    for (let i = 0; i < sub; i++) {
      const r = Math.hypot(p[0], p[1], p[2]);
      if (r < horizon) {
        part.alive = false;
        break;
      }
      const r2 = r * r;
      const inv = 1.0 / r;
      const rhat = [p[0] * inv, p[1] * inv, p[2] * inv];
      const bend =
        (1.55 / Math.max(0.36, r2)) * (1.0 + 1.4 * smoothstep(3.5, 1.5, r));
      const ax = -rhat[0] * bend,
        ay = -rhat[1] * bend,
        az = -rhat[2] * bend;
      // frame drag around Y
      const pl = Math.hypot(p[0], p[2]);
      let dx = 0,
        dz = 0;
      if (pl > 0.001 && state.spin > 0.001) {
        const drag = (2.0 * state.spin) / Math.max(0.5, r2 * r);
        dx = (-p[2] / pl) * drag * 0.55;
        dz = (p[0] / pl) * drag * 0.55;
      }
      v[0] += (ax + dx) * h;
      v[1] += ay * h;
      v[2] += (az + dz) * h;
      // very mild damping so particles don't explode numerically near horizon
      const vlen = Math.hypot(v[0], v[1], v[2]);
      if (vlen > 1.2) {
        v[0] *= 1.2 / vlen;
        v[1] *= 1.2 / vlen;
        v[2] *= 1.2 / vlen;
      }
      p[0] += v[0] * h;
      p[1] += v[1] * h;
      p[2] += v[2] * h;
    }
    if (part.alive) {
      part.trail.push([p[0], p[1], p[2]]);
      if (part.trail.length > 80) part.trail.shift();
      if (part.age > 12) part.alive = false;
    }
  }
  // sweep dead
  state.particles = state.particles
    .filter((x) => x.alive || x.trail.length > 0)
    .map((x) => {
      if (!x.alive) x.trail.shift();
      return x;
    })
    .filter((x) => x.alive || x.trail.length > 0);
}

function smoothstep(a, b, x) {
  const t = Math.max(0, Math.min(1, (x - a) / (b - a)));
  return t * t * (3 - 2 * t);
}

// project world point through camera to canvas pixel coords
function projectToCanvas(p, cam, camRot, fov, w, h) {
  const dx = p[0] - cam[0],
    dy = p[1] - cam[1],
    dz = p[2] - cam[2];
  // camRot is column-major basis (right, up, -fwd). View = inverse.
  // inverse of orthonormal = transpose
  const vx = camRot[0] * dx + camRot[1] * dy + camRot[2] * dz;
  const vy = camRot[3] * dx + camRot[4] * dy + camRot[5] * dz;
  const vz = camRot[6] * dx + camRot[7] * dy + camRot[8] * dz;
  if (vz >= -0.05) return null;
  const aspect = w / h;
  const sx = vx / -vz / fov / aspect;
  const sy = vy / -vz / fov;
  return { x: (sx * 0.5 + 0.5) * w, y: (1.0 - (sy * 0.5 + 0.5)) * h, z: -vz };
}

function drawOverlay(canvas, state, cam, camRot, w, h) {
  if (!canvas) return;
  const ctx = canvas.getContext("2d");
  ctx.clearRect(0, 0, w, h);
  ctx.lineWidth = 2;
  ctx.lineCap = "round";
  for (const part of state.particles) {
    const trail = part.trail;
    if (trail.length < 2) continue;
    const hue = Math.floor(part.hue * 360);
    for (let i = 1; i < trail.length; i++) {
      const a = projectToCanvas(trail[i - 1], cam, camRot, state.fov, w, h);
      const b = projectToCanvas(trail[i], cam, camRot, state.fov, w, h);
      if (!a || !b) continue;
      const fade = i / trail.length;
      ctx.strokeStyle = `hsla(${hue}, 90%, 65%, ${fade * 0.85})`;
      ctx.beginPath();
      ctx.moveTo(a.x, a.y);
      ctx.lineTo(b.x, b.y);
      ctx.stroke();
    }
    // head
    const head = projectToCanvas(
      trail[trail.length - 1],
      cam,
      camRot,
      state.fov,
      w,
      h,
    );
    if (head) {
      ctx.fillStyle = `hsla(${hue}, 95%, 80%, 0.95)`;
      ctx.beginPath();
      ctx.arc(head.x, head.y, 4, 0, Math.PI * 2);
      ctx.fill();
    }
  }
}

// ---- look-target picker (very rough) --------------------------------------
function pickFeatureFromCamera(s) {
  const r = s.dist;
  // pitch near horizontal, looking equator-ish: disk / horizon / ISCO
  const absPitch = Math.abs(s.pitch);
  if (absPitch < 0.15) {
    if (r < 3.2) return "horizon";
    if (r < 4.5) return "photonSphere";
    if (r < 7.5) return "isco";
    return null;
  }
  if (absPitch > 0.8) return "jet";
  if (r < 3.5) return "ergosphere";
  return null;
}

window.BlackHoleLevel = BlackHoleLevel;
