/* global React, VT_HERO, VT_HERO_EQUIPMENTS, VT_HERO_TRANSCRIPT, VT_CATEGORIES, Badge, Btn, Dot, Icon, Waveform, EquipIcon, VideoFrame, formatTS, formatDuration */
// Technician mobile capture screen — iPad orientation

const { useState: _tcUseS, useEffect: _tcUseE, useRef: _tcUseR } = React;

function TechCaptureScreen({ onClose, visit }) {
  const [phase, setPhase] = _tcUseS('consent'); // consent | live | paused | ending
  const [elapsed, setElapsed] = _tcUseS(0);
  const [detections, setDetections] = _tcUseS([]);
  const [captions, setCaptions] = _tcUseS([]);
  const [audioLevel, setAudioLevel] = _tcUseS(0);
  const [currentScene, setCurrentScene] = _tcUseS('facade');
  // Real-mode state (quand visit est passée)
  const isRealMode = !!(visit && (visit._backendId || visit.id));
  const visitBackendId = visit && (visit._backendId || visit.id);
  const videoRef = _tcUseR(null);
  const streamRef = _tcUseR(null);
  const recorderRef = _tcUseR(null);
  const audioCtxRef = _tcUseR(null);
  const analyserRef = _tcUseR(null);
  const [uploadStatus, setUploadStatus] = _tcUseS({ sent: 0, pending: 0, errors: 0, queued: 0 });

  // Auto-flush la queue offline IndexedDB quand le réseau revient
  _tcUseE(() => {
    if (!window.AE_PWA_DB) return;
    const onFlushed = (e) => {
      const { sent, pending } = e.detail || {};
      setUploadStatus(s => ({ ...s, sent: s.sent + (sent || 0), queued: pending || 0 }));
    };
    window.addEventListener('ae:queue-flushed', onFlushed);
    // Initial : count items pending in queue
    window.AE_PWA_DB.queue.list().then(items => {
      setUploadStatus(s => ({ ...s, queued: items.length }));
    });
    return () => window.removeEventListener('ae:queue-flushed', onFlushed);
  }, []);
  const [realError, setRealError] = _tcUseS('');

  _tcUseE(() => {
    if (phase !== 'live') return;
    const t = setInterval(() => setElapsed(e => e + 1), 1000);
    const lvl = setInterval(() => setAudioLevel(Math.random() * 0.7 + 0.1), 150);
    return () => { clearInterval(t); clearInterval(lvl); };
  }, [phase]);

  // Scene progression based on elapsed
  _tcUseE(() => {
    if (elapsed < 30) setCurrentScene('facade');
    else if (elapsed < 80) setCurrentScene('utility_room');
    else if (elapsed < 140) setCurrentScene('meter_box');
    else if (elapsed < 200) setCurrentScene('boiler');
    else setCurrentScene('roof_drone');
  }, [elapsed]);

  // ── MODE RÉEL : getUserMedia + MediaRecorder + audio analyser ───────
  _tcUseE(() => {
    if (!isRealMode) return;
    if (phase !== 'live') {
      if (recorderRef.current && recorderRef.current.state !== 'inactive') {
        try { recorderRef.current.stop(); } catch(_) {}
      }
      if (streamRef.current) {
        streamRef.current.getTracks().forEach(t => t.stop());
        streamRef.current = null;
      }
      if (audioCtxRef.current) {
        try { audioCtxRef.current.close(); } catch(_) {}
        audioCtxRef.current = null;
      }
      return;
    }
    // Démarrer capture réelle
    let cancelled = false;
    const API_BASE = (window.AE_API && window.AE_API.BASE) || '';
    (async () => {
      try {
        // Appel POST /api/visite/:id/start
        fetch(`${API_BASE}/api/visite/${visitBackendId}/start`, { method: 'POST' }).catch(() => {});

        // getUserMedia
        const constraints = visit.type === 'audio'
          ? { audio: true, video: false }
          : { audio: true, video: { facingMode: 'environment', width: { ideal: 1280 }, height: { ideal: 720 } } };
        const stream = await navigator.mediaDevices.getUserMedia(constraints);
        if (cancelled) { stream.getTracks().forEach(t => t.stop()); return; }
        streamRef.current = stream;
        if (videoRef.current) {
          videoRef.current.srcObject = stream;
          videoRef.current.play().catch(() => {});
        }

        // Analyser audio pour visualisation
        try {
          const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
          audioCtxRef.current = audioCtx;
          const src = audioCtx.createMediaStreamSource(stream);
          const analyser = audioCtx.createAnalyser();
          analyser.fftSize = 128;
          src.connect(analyser);
          analyserRef.current = analyser;
          const data = new Uint8Array(analyser.frequencyBinCount);
          const tick = () => {
            if (cancelled || !analyserRef.current) return;
            analyser.getByteFrequencyData(data);
            const avg = data.reduce((a, b) => a + b, 0) / data.length;
            setAudioLevel(avg / 255);
            requestAnimationFrame(tick);
          };
          tick();
        } catch (_) {}

        // MediaRecorder avec chunks toutes les 30 secondes
        const mime = visit.type === 'audio'
          ? (MediaRecorder.isTypeSupported('audio/webm;codecs=opus') ? 'audio/webm;codecs=opus' : 'audio/webm')
          : (MediaRecorder.isTypeSupported('video/webm;codecs=vp9,opus') ? 'video/webm;codecs=vp9,opus' : 'video/webm');
        const rec = new MediaRecorder(stream, { mimeType: mime, videoBitsPerSecond: 2_500_000 });
        recorderRef.current = rec;
        rec.ondataavailable = async (e) => {
          if (!e.data || !e.data.size) return;
          setUploadStatus(s => ({ ...s, pending: s.pending + 1 }));
          const kind = visit.type === 'audio' ? 'audio' : 'video';
          const capturedAt = new Date().toISOString();

          // P3.2 : si offline, on stocke directement dans IndexedDB sans tenter le fetch
          if (!navigator.onLine && window.AE_PWA_DB) {
            await window.AE_PWA_DB.queue.add({
              visitId: visitBackendId, kind, blob: e.data, mimeType: e.data.type || mime,
              size: e.data.size, capturedAt,
            });
            setUploadStatus(s => ({ sent: s.sent, pending: s.pending - 1, errors: s.errors, queued: (s.queued || 0) + 1 }));
            return;
          }

          const fd = new FormData();
          const fname = `chunk-${Date.now()}.webm`;
          fd.append('file', e.data, fname);
          fd.append('type', kind);
          fd.append('capturedAt', capturedAt);
          try {
            const r = await fetch(`${API_BASE}/api/visite/${visitBackendId}/media`, { method: 'POST', body: fd });
            if (!r.ok) throw new Error('HTTP ' + r.status);
            setUploadStatus(s => ({ sent: s.sent + 1, pending: s.pending - 1, errors: s.errors, queued: s.queued || 0 }));
          } catch (err) {
            setUploadStatus(s => ({ sent: s.sent, pending: s.pending - 1, errors: s.errors + 1, queued: (s.queued || 0) + 1 }));
            // Fallback offline IndexedDB (via pwa-storage.jsx) — supporte des blobs de plusieurs Mo
            if (window.AE_PWA_DB) {
              await window.AE_PWA_DB.queue.add({
                visitId: visitBackendId, kind, blob: e.data, mimeType: e.data.type || mime,
                size: e.data.size, capturedAt,
              });
            }
          }
        };
        rec.onerror = (ev) => setRealError('MediaRecorder error: ' + (ev.error?.name || 'unknown'));
        rec.start(30_000); // chunk every 30s
      } catch (err) {
        setRealError('Caméra/micro inaccessible: ' + (err.message || err.name));
      }
    })();
    return () => { cancelled = true; };
  }, [phase, isRealMode, visitBackendId]);

  // Au démontage du composant, arrêter proprement
  _tcUseE(() => {
    return () => {
      if (recorderRef.current && recorderRef.current.state !== 'inactive') {
        try { recorderRef.current.stop(); } catch(_) {}
      }
      if (streamRef.current) streamRef.current.getTracks().forEach(t => t.stop());
      if (audioCtxRef.current) { try { audioCtxRef.current.close(); } catch(_) {} }
    };
  }, []);

  // Appel /stop quand on termine en mode réel
  _tcUseE(() => {
    if (!isRealMode || phase !== 'ending') return;
    const API_BASE = (window.AE_API && window.AE_API.BASE) || '';
    fetch(`${API_BASE}/api/visite/${visitBackendId}/stop`, { method: 'POST' }).catch(() => {});
    // Déclenche le pipeline IA en arrière-plan
    fetch(`${API_BASE}/api/visite/${visitBackendId}/analyze`, { method: 'POST' }).catch(() => {});
  }, [phase, isRealMode, visitBackendId]);

  // Trigger fake detections + captions over time (MODE DÉMO uniquement, pas réel)
  _tcUseE(() => {
    if (phase !== 'live' || isRealMode) return;
    const TRIGGERS = [
      { at: 3,  type: 'caption', speaker: 'tech',   text: 'Bonjour Madame, je suis Karim d\'Audits-Énergies.' },
      { at: 6,  type: 'caption', speaker: 'client', text: 'Bonjour, entrez, je vous en prie.' },
      { at: 10, type: 'caption', speaker: 'tech',   text: 'Je vais commencer par une vue d\'ensemble extérieure.' },
      { at: 16, type: 'detection', eq: { id: 'd1', category: 'pv_panels', brand: 'Toiture visible', confidence: 0.76, note: 'Potentiel PV détecté' } },
      { at: 22, type: 'caption', speaker: 'tech',   text: 'Orientation plein sud, ombrage limité — parfait pour du photovoltaïque.' },
      { at: 28, type: 'caption', speaker: 'client', text: 'On y pense depuis deux ans.' },
      { at: 34, type: 'caption', speaker: 'tech',   text: 'Montrez-moi le local technique maintenant.' },
      { at: 42, type: 'detection', eq: { id: 'd2', category: 'pac_air_eau', brand: 'Daikin', model: 'Altherma 3 R ECH²O', confidence: 0.94, note: 'PAC air/eau 16 kW · R32' } },
      { at: 46, type: 'caption', speaker: 'tech',   text: 'Ah, une Daikin Altherma, excellente installation.' },
      { at: 52, type: 'detection', eq: { id: 'd3', category: 'ballon_thermo', brand: 'Atlantic', model: 'Calypso VS 250L', confidence: 0.88, note: 'Ballon thermodynamique' } },
      { at: 58, type: 'caption', speaker: 'client', text: 'Il date de 2019, on n\'a jamais eu de souci.' },
      { at: 66, type: 'caption', speaker: 'tech',   text: 'Parfait. Le tableau électrique est où ?' },
      { at: 74, type: 'caption', speaker: 'client', text: 'Juste là dans l\'entrée.' },
      { at: 82, type: 'detection', eq: { id: 'd4', category: 'compteur', brand: 'Enedis', model: 'Linky G3', confidence: 0.97, note: 'OCR PRM complet' } },
      { at: 88, type: 'caption', speaker: 'tech',   text: 'Linky G3 — je récupère le PRM directement.' },
      { at: 96, type: 'caption', speaker: 'client', text: 'On voudrait aussi isoler les combles.' },
      { at: 102, type: 'caption', speaker: 'tech',  text: 'Bonne idée, on va monter voir.' },
    ];
    const todo = TRIGGERS.filter(t => t.at === elapsed);
    if (!todo.length) return;
    todo.forEach(t => {
      if (t.type === 'detection') {
        setDetections(prev => [{ ...t.eq, at: elapsed, fadeIn: Date.now() }, ...prev.slice(0, 4)]);
      } else if (t.type === 'caption') {
        setCaptions(prev => [...prev.slice(-3), { ...t, at: elapsed, id: Date.now() }]);
      }
    });
  }, [elapsed, phase]);

  return (
    <div style={{
      position: 'fixed', inset: 0, zIndex: 90,
      background: '#000',
      display: 'grid', placeItems: 'center',
      padding: 20,
    }}>
      {/* Close button outside bezel */}
      <button onClick={onClose} style={{
        position: 'absolute', top: 16, right: 16, zIndex: 100,
        padding: '6px 10px', background: 'rgba(255,255,255,0.1)', color: '#fff',
        border: '1px solid rgba(255,255,255,0.2)', borderRadius: 5, fontSize: 11,
        fontFamily: 'var(--font-mono)',
      }}>
        <svg width="10" height="10" viewBox="0 0 16 16" stroke="currentColor" strokeWidth="1.8" fill="none" style={{ display: 'inline-block', verticalAlign: 'middle', marginRight: 5 }}><path d="m4 4 8 8M12 4l-8 8"/></svg>
        Fermer l'aperçu terrain
      </button>

      {/* iPad bezel */}
      <div style={{
        width: 900, height: 640,
        background: '#1a1a1a',
        borderRadius: 32,
        padding: 14,
        boxShadow: '0 0 0 3px #333, 0 40px 80px rgba(0,0,0,0.6)',
        position: 'relative',
      }}>
        {/* Camera */}
        <div style={{ position: 'absolute', top: 24, left: '50%', transform: 'translateX(-50%)', width: 6, height: 6, borderRadius: '50%', background: '#0a0a0a', border: '1px solid #333' }}/>

        <div style={{
          width: '100%', height: '100%',
          background: 'var(--paper)', borderRadius: 22, overflow: 'hidden', position: 'relative',
        }}>
          {phase === 'consent' && (
            <ConsentGate
              visit={visit}
              onAccept={async () => {
                // En mode réel : enregistrer le consentement RGPD avant de démarrer
                if (isRealMode) {
                  const API_BASE = (window.AE_API && window.AE_API.BASE) || '';
                  try {
                    await fetch(`${API_BASE}/api/visite/${visitBackendId}/audio/consent`, {
                      method: 'POST',
                      headers: { 'Content-Type': 'application/json' },
                      body: JSON.stringify({ consentRecorded: true, consentTimestamp: new Date().toISOString() }),
                    });
                  } catch (_) { /* non-bloquant */ }
                }
                setPhase('live');
              }}
              onClose={onClose}
            />
          )}
          {(phase === 'live' || phase === 'paused' || phase === 'ending') && (
            <LiveCapture
              elapsed={elapsed}
              phase={phase}
              setPhase={setPhase}
              detections={detections}
              captions={captions}
              audioLevel={audioLevel}
              currentScene={currentScene}
              isRealMode={isRealMode}
              videoRef={videoRef}
              visit={visit}
              uploadStatus={uploadStatus}
              realError={realError}
              onEnd={() => { setPhase('ending'); setTimeout(onClose, 2400); }}
            />
          )}
        </div>
      </div>

      {/* Device label */}
      <div style={{ position: 'absolute', bottom: 16, left: '50%', transform: 'translateX(-50%)', fontSize: 10, color: 'rgba(255,255,255,0.4)', fontFamily: 'var(--font-mono)', letterSpacing: 1 }}>
        TABLETTE TECHNICIEN · iPad 11" · App Audits Énergies Capture
      </div>
    </div>
  );
}

// ─── Consent gate ────────────────────────────────
function ConsentGate({ onAccept, visit }) {
  const clientName = visit?.client || visit?.metadata?.clientName || 'Marie-Anne Dupuis';
  const address = visit?.clientAddress || '42 rue des Camélias, 77300 Fontainebleau';
  const ref = visit?.ref || 'DOS-2026-024';
  return (
    <div style={{ height: '100%', display: 'grid', gridTemplateRows: '48px 1fr auto', background: 'var(--paper)' }}>
      {/* iOS-ish status bar */}
      <div style={{ display: 'flex', alignItems: 'center', padding: '0 24px', fontSize: 12, fontFamily: 'var(--font-mono)', color: 'var(--ink-2)', borderBottom: '1px solid var(--hairline)' }}>
        <span style={{ fontWeight: 600 }}>14:32</span>
        <span style={{ flex: 1 }}/>
        <span style={{ display: 'inline-flex', gap: 10 }}>
          <span>4G ●●●●</span>
          <span>85%</span>
        </span>
      </div>

      <div style={{ padding: '32px 48px', overflowY: 'auto' }}>
        <div style={{ display: 'flex', alignItems: 'center', gap: 10, marginBottom: 20 }}>
          <div style={{ width: 28, height: 28, borderRadius: 6, background: 'var(--ink)', color: 'var(--signal)', display: 'grid', placeItems: 'center', fontSize: 14, fontWeight: 700 }}>DC</div>
          <div style={{ fontSize: 11, letterSpacing: 2, color: 'var(--ink-3)', fontFamily: 'var(--font-mono)' }}>DISCOVERY · TERRAIN</div>
          <span style={{ flex: 1 }}/>
          <div style={{ padding: '3px 8px', background: 'var(--signal-tint)', color: 'var(--signal-deep)', border: '1px solid var(--signal-soft)', borderRadius: 3, fontSize: 10, fontFamily: 'var(--font-mono)', fontWeight: 600 }}>{ref}</div>
        </div>

        <div style={{ fontSize: 11, color: 'var(--ink-4)', textTransform: 'uppercase', letterSpacing: 0.8, fontWeight: 600, marginBottom: 6 }}>Consentement bénéficiaire</div>
        <h1 style={{ fontSize: 26, fontWeight: 600, letterSpacing: '-0.02em', lineHeight: 1.15, marginBottom: 14 }}>Avant de commencer, l'accord explicite de {clientName}.</h1>

        <div style={{ padding: 14, background: 'var(--paper-2)', borderRadius: 6, border: '1px solid var(--hairline)', marginBottom: 16, fontSize: 13, lineHeight: 1.6, color: 'var(--ink-2)' }}>
          « Je soussigné(e) <strong>{clientName}</strong>, {address && <>situé(e) au {address}, </>}autorise <strong>Audits-Énergies</strong> à <strong>enregistrer les échanges audio et vidéo</strong> lors de cette visite technique, à fins d'analyse automatisée pour la constitution d'un dossier CEE. Les données sont conservées 3 ans, accessibles sur demande. Je peux retirer mon consentement à tout moment. »
        </div>

        <div style={{ display: 'grid', gridTemplateColumns: 'repeat(3, 1fr)', gap: 8, marginBottom: 18 }}>
          {[
            { icon: Icon.lock, label: 'Chiffrement AES-256', sub: 'bout-en-bout' },
            { icon: Icon.doc, label: 'RGPD + CNIL', sub: 'Art. 6-1-a' },
            { icon: Icon.refresh, label: 'Retrait possible', sub: 'à tout moment' },
          ].map((p, i) => (
            <div key={i} style={{ padding: 10, background: 'var(--paper-2)', borderRadius: 5, display: 'flex', alignItems: 'center', gap: 8 }}>
              <p.icon style={{ color: 'var(--signal-deep)' }}/>
              <div>
                <div style={{ fontSize: 11, fontWeight: 600, color: 'var(--ink-2)' }}>{p.label}</div>
                <div style={{ fontSize: 10, color: 'var(--ink-4)' }}>{p.sub}</div>
              </div>
            </div>
          ))}
        </div>

        <div style={{ display: 'flex', alignItems: 'flex-start', gap: 10, padding: 12, background: 'var(--signal-tint)', border: '1px solid var(--signal-soft)', borderRadius: 6 }}>
          <svg width="16" height="16" viewBox="0 0 16 16" stroke="var(--signal-deep)" strokeWidth="1.5" fill="none"><path d="M8 2a6 6 0 1 1 0 12 6 6 0 0 1 0-12"/><path d="M8 6v4M8 12v.01"/></svg>
          <div style={{ flex: 1, fontSize: 12, color: 'var(--ink-2)' }}>
            L'enregistrement se fait en local puis chiffré avant envoi. Le hash SHA-256 du consentement est inscrit dans le ledger avec horodatage.
          </div>
        </div>
      </div>

      <div style={{ padding: '16px 48px 20px', borderTop: '1px solid var(--hairline)', background: 'var(--paper-2)', display: 'flex', gap: 10 }}>
        <button style={{ flex: '0 0 auto', padding: '14px 18px', background: 'transparent', color: 'var(--ink-3)', border: '1px solid var(--line-2)', borderRadius: 7, fontSize: 14, fontWeight: 500 }}>
          Refuser · reporter
        </button>
        <button onClick={onAccept} style={{
          flex: 1, padding: '14px 18px',
          background: 'var(--ink)', color: 'var(--signal)',
          border: 'none', borderRadius: 7, fontSize: 14, fontWeight: 600,
          display: 'flex', alignItems: 'center', justifyContent: 'center', gap: 10,
          boxShadow: '0 1px 0 rgba(255,255,255,0.1) inset, 0 4px 10px rgba(0,0,0,0.3)',
        }}>
          <svg width="16" height="16" viewBox="0 0 16 16" stroke="currentColor" strokeWidth="2" fill="none"><path d="m3 8 3 3 7-7"/></svg>
          J'accepte — démarrer la visite
        </button>
      </div>
    </div>
  );
}

// ─── Live capture ────────────────────────────────
function LiveCapture({ elapsed, phase, setPhase, detections, captions, audioLevel, currentScene, onEnd, isRealMode, videoRef, visit, uploadStatus, realError }) {
  const lastCaption = captions[captions.length - 1];
  return (
    <div style={{ height: '100%', position: 'relative', background: '#0a0a0a' }}>
      {/* Video viewport */}
      <div style={{ position: 'absolute', inset: 0 }}>
        {isRealMode ? (
          visit && visit.type === 'audio' ? (
            <div style={{ height: '100%', display: 'grid', placeItems: 'center', background: 'linear-gradient(135deg, #0a0a0a, #1a1a2e)' }}>
              <div style={{ textAlign: 'center', color: '#fff' }}>
                <svg width="80" height="80" viewBox="0 0 24 24" stroke="var(--signal)" strokeWidth="1.2" fill="none" style={{ marginBottom: 16 }}>
                  <path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"/>
                  <path d="M19 10v2a7 7 0 0 1-14 0v-2"/>
                  <line x1="12" y1="19" x2="12" y2="23"/>
                  <line x1="8" y1="23" x2="16" y2="23"/>
                </svg>
                <div style={{ fontSize: 14, color: 'rgba(255,255,255,0.7)', fontFamily: 'var(--font-mono)', letterSpacing: 2, textTransform: 'uppercase' }}>Capture audio</div>
                <div style={{ fontSize: 24, fontWeight: 600, marginTop: 8 }}>{formatDuration(elapsed)}</div>
                <div style={{ fontSize: 11, color: 'rgba(255,255,255,0.5)', marginTop: 12 }}>Niveau : {Math.round(audioLevel * 100)}%</div>
              </div>
            </div>
          ) : (
            <video ref={videoRef} playsInline muted autoPlay style={{ width: '100%', height: '100%', objectFit: 'cover', background: '#000' }} />
          )
        ) : (
          <VideoFrame scene={currentScene} timestamp={elapsed}/>
        )}
        {isRealMode && realError && (
          <div style={{ position: 'absolute', top: 60, left: '50%', transform: 'translateX(-50%)', padding: '8px 14px', background: 'rgba(230,107,92,0.95)', color: '#fff', fontSize: 12, borderRadius: 6, maxWidth: 400, textAlign: 'center' }}>
            ⚠ {realError}
          </div>
        )}
      </div>

      {/* Top HUD */}
      <div style={{ position: 'absolute', top: 0, left: 0, right: 0, padding: '14px 18px', display: 'flex', alignItems: 'center', gap: 10, background: 'linear-gradient(to bottom, rgba(0,0,0,0.6), transparent)', fontFamily: 'var(--font-mono)' }}>
        <div style={{ padding: '4px 10px', background: phase === 'paused' ? 'rgba(245,200,80,0.2)' : 'rgba(230,107,92,0.2)', color: phase === 'paused' ? '#f5c850' : 'var(--rouge)', border: `1px solid ${phase === 'paused' ? '#f5c850' : 'var(--rouge)'}`, fontSize: 11, letterSpacing: 1, fontWeight: 700, display: 'flex', alignItems: 'center', gap: 6 }}>
          {phase === 'paused'
            ? <>⏸ PAUSE</>
            : <>
                <span style={{ width: 8, height: 8, borderRadius: '50%', background: 'var(--rouge)', animation: 'dot-pulse 1s infinite' }}/>
                REC · {formatDuration(elapsed)}
              </>
          }
        </div>
        <div style={{ padding: '4px 10px', background: 'rgba(0,0,0,0.55)', color: '#fff', fontSize: 11 }}>
          {isRealMode && visit?.ref ? visit.ref : `VT-2026-${String(Math.floor(Date.now() / 1000) % 1000).padStart(3, '0')}`}
        </div>
        <span style={{ flex: 1 }}/>
        {isRealMode ? (
          <div style={{ padding: '4px 10px', background: 'rgba(0,0,0,0.55)', color: '#fff', fontSize: 11, display: 'flex', alignItems: 'center', gap: 6 }}>
            <Dot tone={uploadStatus?.errors > 0 ? 'rouge' : uploadStatus?.queued > 0 ? 'amber' : uploadStatus?.pending > 0 ? 'plasma' : 'signal'} size={6}/>
            {uploadStatus?.pending > 0
              ? `Upload en cours · ${uploadStatus.pending}`
              : uploadStatus?.queued > 0
                ? `📵 ${uploadStatus.queued} en attente (offline)`
                : `Chunks envoyés · ${uploadStatus?.sent || 0}`}
            {uploadStatus?.errors > 0 && <span style={{ color: 'var(--rouge)' }}> · {uploadStatus.errors} err</span>}
          </div>
        ) : (
          <div style={{ padding: '4px 10px', background: 'rgba(0,0,0,0.55)', color: '#fff', fontSize: 11, display: 'flex', alignItems: 'center', gap: 6 }}>
            <Dot tone="signal" size={6}/>
            Claude · analyse live
          </div>
        )}
        <div style={{ padding: '4px 10px', background: 'rgba(0,0,0,0.55)', color: '#fff', fontSize: 11 }}>
          {isRealMode ? (navigator.onLine ? 'En ligne' : 'Hors ligne') : '4G · 42 Mbps'}
        </div>
      </div>

      {/* Scene label ticker */}
      <div style={{ position: 'absolute', top: 60, left: 18, padding: '3px 9px', background: 'rgba(0,0,0,0.55)', color: 'var(--signal)', fontSize: 10, fontFamily: 'var(--font-mono)', letterSpacing: 1, textTransform: 'uppercase', fontWeight: 700, display: 'flex', alignItems: 'center', gap: 6 }}>
        <span style={{ width: 4, height: 4, background: 'var(--signal)', borderRadius: '50%' }}/>
        SCÈNE · {({ facade: 'Façade extérieure', utility_room: 'Local technique', meter_box: 'Tableau électrique', boiler: 'Chaudière', roof_drone: 'Toiture · drone' })[currentScene]}
      </div>

      {/* Live AI detections rail — left side */}
      <div style={{ position: 'absolute', left: 16, top: 100, bottom: 160, width: 240, display: 'flex', flexDirection: 'column', gap: 6, overflow: 'hidden' }}>
        {detections.slice(0, 4).map((d, i) => (
          <DetectionCard key={d.at} d={d} age={elapsed - d.at} opacity={1 - i * 0.15}/>
        ))}
      </div>

      {/* Transcript captions — bottom center */}
      {lastCaption && (
        <div style={{ position: 'absolute', bottom: 118, left: 60, right: 60, display: 'flex', justifyContent: 'center' }}>
          <div style={{
            maxWidth: 620, padding: '10px 16px',
            background: 'rgba(0,0,0,0.78)', color: '#fff',
            fontSize: 15, lineHeight: 1.4, borderRadius: 6,
            borderLeft: `3px solid ${lastCaption.speaker === 'tech' ? 'var(--signal)' : '#f5c850'}`,
          }}>
            <span style={{ fontSize: 10, color: lastCaption.speaker === 'tech' ? 'var(--signal)' : '#f5c850', fontFamily: 'var(--font-mono)', textTransform: 'uppercase', letterSpacing: 0.5, fontWeight: 700, marginRight: 8 }}>
              {lastCaption.speaker === 'tech' ? 'KARIM' : 'Mme DUPUIS'}
            </span>
            {lastCaption.text}
          </div>
        </div>
      )}

      {/* Bottom control bar */}
      <div style={{ position: 'absolute', bottom: 0, left: 0, right: 0, padding: '14px 18px 16px', background: 'linear-gradient(to top, rgba(0,0,0,0.75), transparent)' }}>
        {/* Audio level + waveform */}
        <div style={{ display: 'flex', alignItems: 'center', gap: 10, marginBottom: 10 }}>
          <Icon.mic style={{ color: '#fff' }}/>
          <div style={{ flex: 1, height: 22, background: 'rgba(255,255,255,0.08)', borderRadius: 3, overflow: 'hidden', position: 'relative' }}>
            <Waveform seed={`live-${Math.floor(elapsed / 2)}`} width={600} height={22} bars={80} color="rgba(127,255,183,0.7)" bg="transparent" progress={1}/>
            {/* Live peak */}
            <div style={{ position: 'absolute', right: 8, top: '50%', transform: 'translateY(-50%)', display: 'flex', alignItems: 'center', gap: 2 }}>
              {Array.from({ length: 5 }).map((_, i) => {
                const active = audioLevel > i * 0.15;
                return <span key={i} style={{ width: 2, height: 6 + i * 2, background: active ? 'var(--signal)' : 'rgba(255,255,255,0.15)' }}/>;
              })}
            </div>
          </div>
          <span className="mono" style={{ fontSize: 11, color: 'rgba(255,255,255,0.7)' }}>
            {detections.length} détections · {captions.length} segments
          </span>
        </div>

        {/* Main controls */}
        <div style={{ display: 'flex', alignItems: 'center', gap: 10 }}>
          <TechActionButton icon="🎙️" label="Marquer" sub="moment clé" onClick={() => {}}/>
          <TechActionButton icon="📍" label="GPS" sub="position"/>
          <TechActionButton icon="📷" label="Photo HD"/>
          <span style={{ flex: 1 }}/>

          {phase === 'live' && (
            <>
              <button onClick={() => setPhase('paused')} style={roundBtn('#fff', 'rgba(255,255,255,0.15)')}>
                <svg width="18" height="18" viewBox="0 0 16 16" fill="currentColor"><rect x="4" y="3" width="3" height="10"/><rect x="9" y="3" width="3" height="10"/></svg>
              </button>
              <button onClick={onEnd} style={roundBtn('#0B1F12', 'var(--rouge)', 70)}>
                <svg width="22" height="22" viewBox="0 0 16 16" fill="currentColor"><rect x="4" y="4" width="8" height="8" rx="1"/></svg>
              </button>
            </>
          )}
          {phase === 'paused' && (
            <>
              <button onClick={() => setPhase('live')} style={roundBtn('#0B1F12', 'var(--signal)', 70)}>
                <svg width="22" height="22" viewBox="0 0 16 16" fill="currentColor"><path d="M4 3l9 5-9 5z"/></svg>
              </button>
              <button onClick={onEnd} style={roundBtn('#fff', 'rgba(230,107,92,0.25)')}>
                <svg width="18" height="18" viewBox="0 0 16 16" fill="currentColor"><rect x="4" y="4" width="8" height="8" rx="1"/></svg>
              </button>
            </>
          )}
          {phase === 'ending' && (
            <div style={{ padding: '14px 20px', background: 'var(--signal)', color: '#0B1F12', fontWeight: 700, borderRadius: 10, display: 'flex', alignItems: 'center', gap: 10 }}>
              <svg width="16" height="16" viewBox="0 0 16 16" stroke="currentColor" strokeWidth="2" fill="none"><path d="m3 8 3 3 7-7"/></svg>
              Upload sécurisé · Analyse Claude en cours…
            </div>
          )}
        </div>
      </div>

      {/* Ending overlay */}
      {phase === 'ending' && (
        <div style={{ position: 'absolute', inset: 0, background: 'rgba(0,0,0,0.85)', display: 'grid', placeItems: 'center', animation: 'fadein 0.3s' }}>
          <div style={{ textAlign: 'center', color: '#fff' }}>
            <svg width="48" height="48" viewBox="0 0 48 48" fill="none" stroke="var(--signal)" strokeWidth="2.5" style={{ marginBottom: 16 }}>
              <circle cx="24" cy="24" r="20"/>
              <path d="m14 24 7 7 13-14"/>
            </svg>
            <div style={{ fontSize: 20, fontWeight: 600, marginBottom: 6 }}>Visite clôturée</div>
            <div style={{ fontSize: 13, color: 'rgba(255,255,255,0.6)', fontFamily: 'var(--font-mono)' }}>
              {formatDuration(elapsed)} · {detections.length} détections · rapport dans ~2 min
            </div>
          </div>
        </div>
      )}
    </div>
  );
}

function DetectionCard({ d, age, opacity }) {
  const cat = VT_CATEGORIES[d.category] || { label: d.category, tone: 'signal' };
  const conf = Math.round((d.confidence || 0.85) * 100);
  return (
    <div style={{
      padding: 8, background: 'rgba(11,31,18,0.85)', backdropFilter: 'blur(8px)',
      border: '1px solid rgba(127,255,183,0.3)', borderLeft: '3px solid var(--signal)',
      borderRadius: 4, color: '#fff',
      opacity, transform: age < 1 ? 'translateX(-20px)' : 'translateX(0)', transition: 'all 300ms',
      animation: age < 1 ? 'slide-in-left 400ms' : 'none',
    }}>
      <div style={{ display: 'flex', alignItems: 'center', gap: 6 }}>
        <div style={{ width: 24, height: 24, borderRadius: 3, background: 'rgba(127,255,183,0.15)', color: 'var(--signal)', display: 'grid', placeItems: 'center' }}>
          <EquipIcon cat={d.category} size={14}/>
        </div>
        <div style={{ flex: 1, minWidth: 0 }}>
          <div style={{ fontSize: 10, color: 'var(--signal)', fontFamily: 'var(--font-mono)', textTransform: 'uppercase', letterSpacing: 0.5, fontWeight: 700 }}>
            {cat.label}
          </div>
          <div style={{ fontSize: 12, fontWeight: 600, whiteSpace: 'nowrap', overflow: 'hidden', textOverflow: 'ellipsis' }}>{d.brand}</div>
        </div>
        <div style={{ textAlign: 'right' }}>
          <div className="mono" style={{ fontSize: 11, fontWeight: 700, color: conf > 85 ? 'var(--signal)' : '#f5c850' }}>{conf}%</div>
          <div className="mono" style={{ fontSize: 8, color: 'rgba(255,255,255,0.4)' }}>+{age}s</div>
        </div>
      </div>
      {d.note && <div style={{ marginTop: 4, fontSize: 10, color: 'rgba(255,255,255,0.7)' }}>{d.note}</div>}
    </div>
  );
}

function TechActionButton({ icon, label, sub, onClick }) {
  return (
    <button onClick={onClick} style={{
      padding: '8px 10px', background: 'rgba(255,255,255,0.1)', color: '#fff',
      border: '1px solid rgba(255,255,255,0.15)', borderRadius: 6,
      display: 'flex', flexDirection: 'column', alignItems: 'flex-start',
      minWidth: 70, backdropFilter: 'blur(6px)',
    }}>
      <div style={{ fontSize: 14 }}>{icon} <span style={{ fontSize: 11, fontWeight: 600, marginLeft: 2 }}>{label}</span></div>
      {sub && <div style={{ fontSize: 9, color: 'rgba(255,255,255,0.5)', fontFamily: 'var(--font-mono)' }}>{sub}</div>}
    </button>
  );
}

function roundBtn(color, bg, size = 48) {
  return {
    width: size, height: size, borderRadius: '50%',
    background: bg, color,
    display: 'grid', placeItems: 'center',
    border: 'none', cursor: 'pointer',
    boxShadow: '0 4px 10px rgba(0,0,0,0.4)',
  };
}

// Inject animations
if (typeof document !== 'undefined' && !document.getElementById('tech-capture-anim')) {
  const s = document.createElement('style');
  s.id = 'tech-capture-anim';
  s.textContent = `
    @keyframes slide-in-left { from { transform: translateX(-30px); opacity: 0 } to { transform: translateX(0); opacity: 1 } }
    @keyframes fadein { from { opacity: 0 } to { opacity: 1 } }
  `;
  document.head.appendChild(s);
}

Object.assign(window, { TechCaptureScreen });
