// Hero variants for Premura landing page
// Three visual treatments — selectable via Tweaks.

const PHONE_ICON = (
  <svg width="11" height="11" viewBox="0 0 20 20" fill="none" stroke="currentColor" strokeWidth="1.6" strokeLinecap="round" strokeLinejoin="round">
    <path d="M4 4.5a1.5 1.5 0 0 1 1.5-1.5h2l1.5 4-2 1a11 11 0 0 0 5 5l1-2 4 1.5v2a1.5 1.5 0 0 1-1.5 1.5A13 13 0 0 1 4 4.5Z"/>
  </svg>
);

const CHECK_ICON = (
  <svg width="11" height="11" viewBox="0 0 20 20" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round">
    <path d="M4 10.5 8 14l8-9"/>
  </svg>
);

const CAL_ICON = (
  <svg width="12" height="12" viewBox="0 0 20 20" fill="none" stroke="currentColor" strokeWidth="1.6" strokeLinecap="round" strokeLinejoin="round">
    <rect x="3" y="4.5" width="14" height="12" rx="1.5"/>
    <path d="M3 8h14M7 3v3M13 3v3"/>
  </svg>
);

// ---------- Variant A: Live VOICE call ----------
// A phone-call visual: live waveform + closed-captioning so the audio
// nature of the product is unmistakable.
//
// `t` is the start time (seconds) of each line in assets/sample-call.mp3.
const TIMINGS = [
  { who: "caller", text: "Hi, my crown popped off this morning. Can someone see me today?", t: 0.000 },
  { who: "agent",  text: "Oh no — that sounds uncomfortable. Let me check the schedule. Are you a patient with us already?", t: 3.833 },
  { who: "caller", text: "Yeah, my name's Sarah Chen.", t: 9.752 },
  { who: "agent",  text: "Got it. Dr. Patel has a two-fifteen today, or nine-forty tomorrow morning. Which works better?", t: 11.849 },
  { who: "caller", text: "Today, two-fifteen — definitely today.", t: 18.733 },
  { who: "agent",  text: "Booked. I'm sending a confirmation to the number on file.", t: 21.846 },
];
const AUDIO_DURATION = 25.846;
const AUDIO_SRC = "assets/sample-call.mp3";
const WAVE_BAR_COUNT = 28;
// A line is "speaking" until SPEAKING_TAIL_GAP seconds before the next line —
// then it enters a brief gap (tile goes idle, caption stays).
const SPEAKING_TAIL_GAP = 0.35;

// 28 wave bars; deterministic pseudo-random heights so the idle/preview state
// looks organic.
const WAVE_BARS = Array.from({ length: WAVE_BAR_COUNT }, (_, i) => {
  const h = 14 + Math.round(34 * Math.abs(Math.sin(i * 1.7) * Math.cos(i * 0.6)));
  return { h, delay: (i % 7) * 0.08 };
});

const PLAY_ICON = (
  <svg width="16" height="16" viewBox="0 0 20 20" fill="currentColor" aria-hidden="true">
    <path d="M6.5 4.5v11l9-5.5z"/>
  </svg>
);
const PAUSE_ICON = (
  <svg width="16" height="16" viewBox="0 0 20 20" fill="currentColor" aria-hidden="true">
    <rect x="5.5" y="4.5" width="3.2" height="11" rx="1"/>
    <rect x="11.3" y="4.5" width="3.2" height="11" rx="1"/>
  </svg>
);
// Speaker glyph used on the big primary overlay — communicates "audio
// available, click for sound" rather than the more ambiguous play triangle.
const SPEAKER_ICON = (
  <svg width="22" height="22" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="1.9" strokeLinecap="round" strokeLinejoin="round" aria-hidden="true">
    <path d="M5 9h3l5-4v14l-5-4H5z" fill="currentColor" stroke="none"/>
    <path d="M16.5 8.5a5 5 0 0 1 0 7"/>
    <path d="M19 6a8 8 0 0 1 0 12"/>
  </svg>
);

function findActiveIndex(t) {
  let idx = 0;
  for (let i = 0; i < TIMINGS.length; i++) {
    if (TIMINGS[i].t <= t + 1e-3) idx = i;
    else break;
  }
  return idx;
}

function HeroTranscript() {
  const audioRef = React.useRef(null);
  const audioCtxRef = React.useRef(null);
  const analyserRef = React.useRef(null);
  const sourceRef = React.useRef(null);
  const freqDataRef = React.useRef(null);
  const rafRef = React.useRef(0);
  const barRefs = React.useRef([]);
  const cardRef = React.useRef(null);

  const [currentTime, setCurrentTime] = React.useState(0);
  // playState: "idle" (autoplay not yet started) | "playing" | "paused" | "ended"
  const [playState, setPlayState] = React.useState("idle");
  // userOptedIn flips true the first time the user clicks "Hear this call".
  // It's the single switch that controls (a) whether audio is muted and
  // (b) whether the big primary overlay is shown vs the small footer button.
  const [userOptedIn, setUserOptedIn] = React.useState(false);
  // Flips true once the analyser is built (lazily, on first user gesture).
  // The waveform RAF loop re-evaluates which tick variant to use when this
  // changes, so amplitude bars kick in once the user opts into sound.
  const [analyserReady, setAnalyserReady] = React.useState(false);

  // Wire audio element events + belt-and-suspenders autoplay kickoff.
  //
  // The `muted autoPlay preload="auto"` attributes on the <audio> element are
  // what browsers actually honour for autoplay heuristics — but on some
  // setups (slow networks, certain Safari configs) the autoplay-attribute
  // path silently doesn't fire. So we also imperatively call .play() once
  // the element reports it has enough data buffered (`canplay`). Both paths
  // are no-ops if the audio is already playing.
  React.useEffect(() => {
    const a = audioRef.current;
    if (!a) return;
    const onTime = () => setCurrentTime(a.currentTime);
    const onPlay = () => setPlayState("playing");
    const onPause = () => setPlayState(a.ended ? "ended" : "paused");
    const onEnded = () => { setPlayState("ended"); setCurrentTime(AUDIO_DURATION); };
    a.addEventListener("timeupdate", onTime);
    a.addEventListener("play", onPlay);
    a.addEventListener("pause", onPause);
    a.addEventListener("ended", onEnded);

    // Imperative fallback: try to start the muted call once data is ready,
    // in case the autoplay attribute alone didn't kick it off.
    a.muted = true;
    const tryAutoplay = () => { a.play().catch(() => { /* blocked — user can click "Hear" */ }); };
    if (a.readyState >= 2) {
      tryAutoplay();
    } else {
      a.addEventListener("canplay", tryAutoplay, { once: true });
    }

    return () => {
      a.removeEventListener("timeupdate", onTime);
      a.removeEventListener("play", onPlay);
      a.removeEventListener("pause", onPause);
      a.removeEventListener("ended", onEnded);
      a.removeEventListener("canplay", tryAutoplay);
    };
  }, []);

  // Pause audio when the hero scrolls out of view (no auto-resume).
  React.useEffect(() => {
    const el = cardRef.current;
    if (!el || typeof IntersectionObserver === "undefined") return;
    const obs = new IntersectionObserver((entries) => {
      for (const e of entries) {
        if (!e.isIntersecting && audioRef.current && !audioRef.current.paused) {
          audioRef.current.pause();
        }
      }
    }, { threshold: 0.25 });
    obs.observe(el);
    return () => obs.disconnect();
  }, []);

  // Drive waveform bar heights — analyser amplitude while playing, gentle
  // breathing sine otherwise.
  React.useEffect(() => {
    function tickAmplitude() {
      rafRef.current = requestAnimationFrame(tickAmplitude);
      const analyser = analyserRef.current;
      const data = freqDataRef.current;
      const bars = barRefs.current;
      if (!analyser || !data) return;
      analyser.getByteFrequencyData(data);
      for (let i = 0; i < bars.length; i++) {
        if (!bars[i]) continue;
        // Skip the DC bin; fold from both sides of the spectrum so the
        // waveform reads as roughly symmetric.
        const mid = Math.floor(bars.length / 2);
        const bin = i < mid
          ? 1 + (mid - i)
          : 1 + (i - mid);
        const v = data[Math.min(bin, data.length - 1)] || 0;
        const h = 3 + Math.round((v / 255) * 56);
        bars[i].style.height = `${h}px`;
      }
    }
    function tickIdle() {
      rafRef.current = requestAnimationFrame(tickIdle);
      const bars = barRefs.current;
      const now = performance.now() / 700;
      for (let i = 0; i < bars.length; i++) {
        if (!bars[i]) continue;
        const h = 5 + Math.round(3 * Math.abs(Math.sin(now + i * 0.42)));
        bars[i].style.height = `${h}px`;
      }
    }
    cancelAnimationFrame(rafRef.current);
    if (playState === "playing" && analyserRef.current) {
      tickAmplitude();
    } else {
      tickIdle();
    }
    return () => cancelAnimationFrame(rafRef.current);
  }, [playState, analyserReady]);

  // Lazily build the Web Audio graph the first time the user presses play.
  // createMediaElementSource must be called once per audio element for its
  // lifetime; we guard with the ref.
  async function ensureAudioGraph() {
    if (audioCtxRef.current || !audioRef.current) return;
    const Ctx = window.AudioContext || window.webkitAudioContext;
    if (!Ctx) return;
    const ctx = new Ctx();
    const analyser = ctx.createAnalyser();
    analyser.fftSize = 128;
    analyser.smoothingTimeConstant = 0.78;
    const src = ctx.createMediaElementSource(audioRef.current);
    src.connect(analyser);
    analyser.connect(ctx.destination);
    audioCtxRef.current = ctx;
    analyserRef.current = analyser;
    sourceRef.current = src;
    freqDataRef.current = new Uint8Array(analyser.frequencyBinCount);
    setAnalyserReady(true);
  }

  // Primary action — "Hear this call". Single click should:
  //   1. unmute the audio
  //   2. restart from t=0 so the listener gets the full demo, not a mid-call
  //      cut-in (the muted autoplay was just for the page to feel alive)
  //   3. start the audio (covers the case where autoplay was blocked entirely)
  //   4. lazily build the analyser so the waveform switches to real amplitude
  async function handleHearClick() {
    const a = audioRef.current;
    if (!a) return;
    try { await ensureAudioGraph(); } catch (_) {}
    if (audioCtxRef.current && audioCtxRef.current.state === "suspended") {
      try { await audioCtxRef.current.resume(); } catch (_) {}
    }
    a.currentTime = 0;
    setCurrentTime(0);
    a.muted = false;
    setUserOptedIn(true);
    try {
      await a.play();
    } catch (_) {
      // Unlikely after an explicit click, but don't blow up if play rejects.
    }
  }

  // Secondary action — only visible after the user has opted in. Toggles
  // pause/resume in the middle of a call.
  async function handlePauseResumeClick() {
    const a = audioRef.current;
    if (!a) return;
    if (playState === "playing") {
      a.pause();
      return;
    }
    try { await a.play(); } catch (_) {}
  }

  // Derive what to render from currentTime + playState
  const ended = playState === "ended";
  const idx = findActiveIndex(currentTime);
  const current = TIMINGS[idx] || TIMINGS[0];
  const nextLine = TIMINGS[idx + 1];
  const lineEnd = nextLine ? nextLine.t - SPEAKING_TAIL_GAP : AUDIO_DURATION - 0.15;
  const inGap = currentTime >= lineEnd && !ended;
  const isSpeaking = playState === "playing" && !inGap && !ended;
  const activeSpeaker = isSpeaking ? current.who : "idle";

  const captionWho = ended ? null : current.who;
  const captionText = ended
    ? "Confirmation sent. Have a good day."
    : current.text;

  const elapsed = ended ? Math.ceil(AUDIO_DURATION) : Math.floor(currentTime);
  const mm = String(Math.floor(elapsed / 60)).padStart(2, "0");
  const ss = String(elapsed % 60).padStart(2, "0");

  const reachedBooking = ended || currentTime >= TIMINGS[5].t;
  const reachedScheduling = currentTime >= TIMINGS[3].t;

  // Show the big primary "Hear this call" overlay until the user opts in.
  // When the call ends — even after they've heard it — invite a replay with
  // the same prominent affordance, since the secondary footer button hides
  // at the ended state.
  const showHearOverlay = !userOptedIn || ended;
  const hearLabel = ended
    ? (userOptedIn ? "Hear it again" : "Hear this call")
    : "Hear this call";
  // Footer secondary control only after opt-in, and only mid-call.
  const showFooterButton = userOptedIn && !ended;

  return (
    <div className="call-card" ref={cardRef}>
      {/* `muted autoPlay preload="auto"` is what browsers honour for autoplay
          — these have to be on the element at parse time, not set imperatively
          afterwards. The `muted` prop flips when the user clicks "Hear this
          call" and React re-renders the element with the new attribute. */}
      <audio
        ref={audioRef}
        src={AUDIO_SRC}
        preload="auto"
        playsInline
        muted={!userOptedIn}
        autoPlay
      />

      {/* Header: caller ID + live status */}
      <div className="call-head">
        <div className="call-head-left">
          <div className="call-avatar">SC</div>
          <div>
            <div className="call-meta-l1">
              <svg className="phone-icon" width="13" height="13" viewBox="0 0 20 20" fill="none" stroke="currentColor" strokeWidth="1.7" strokeLinecap="round" strokeLinejoin="round">
                <path d="M4 4.5a1.5 1.5 0 0 1 1.5-1.5h2l1.5 4-2 1a11 11 0 0 0 5 5l1-2 4 1.5v2a1.5 1.5 0 0 1-1.5 1.5A13 13 0 0 1 4 4.5Z"/>
              </svg>
              Sarah Chen
            </div>
            <div className="call-meta-l2">
              <span>Incoming voice call</span>
              <span className="sep"></span>
              <span>+1 (415) 555-0148</span>
            </div>
          </div>
        </div>
        <div className={`call-status ${playState === "playing" ? "is-live" : ""}`}>
          <span className="live-dot"></span>
          {ended ? `Call ended · ${mm}:${ss}` : playState === "playing" ? `On call · ${mm}:${ss}` : `Sample call · ${mm}:${ss}`}
        </div>
      </div>

      {/* Voice stage: who's talking + waveform + captions */}
      <div className="voice-stage">
        <div className="speaker-row">
          <div className={`speaker-tile agent ${activeSpeaker === "agent" ? "active" : ""}`}>
            <div className="speaker-avatar">P</div>
            <div className="speaker-meta">
              <div className="name">Premura</div>
              <div className="role">AI receptionist</div>
            </div>
            <div className="mic-state" aria-hidden="true"></div>
          </div>
          <svg className="speaker-arrow" width="16" height="16" viewBox="0 0 20 20" fill="none" stroke="currentColor" strokeWidth="1.6" strokeLinecap="round" strokeLinejoin="round">
            <path d="M4 10h12M11 6l4 4-4 4M9 14l-4-4 4-4" opacity={activeSpeaker === "idle" ? 0.4 : 1}/>
          </svg>
          <div className={`speaker-tile caller ${activeSpeaker === "caller" ? "active" : ""}`}>
            <div className="speaker-avatar">SC</div>
            <div className="speaker-meta">
              <div className="name">Sarah Chen</div>
              <div className="role">Caller</div>
            </div>
            <div className="mic-state" aria-hidden="true"></div>
          </div>
        </div>

        <div className="waveform-stage">
          <div
            className={`waveform ${playState === "playing" && analyserReady ? "amplitude" : ""}`}
            data-speaker={activeSpeaker}
            aria-label="Audio waveform"
          >
            {WAVE_BARS.map((b, i) => (
              <div
                key={i}
                ref={(el) => { barRefs.current[i] = el; }}
                className="bar"
                style={{
                  height: `${b.h}px`,
                  animationDelay: `${b.delay}s`,
                  animationDuration: `${0.7 + (i % 5) * 0.08}s`,
                }}
              />
            ))}
          </div>
          {showHearOverlay && (
            <button
              type="button"
              className="hear-btn"
              onClick={handleHearClick}
              aria-label={hearLabel}
            >
              <span className="hear-btn-icon">{SPEAKER_ICON}</span>
              <span className="hear-btn-label">{hearLabel}</span>
            </button>
          )}
        </div>

        <div className={`caption-strip ${captionWho === "caller" ? "caller" : ""}`}>
          <div className={`caption-eyebrow ${captionWho === "caller" ? "caller" : ""}`}>
            <svg width="11" height="11" viewBox="0 0 20 20" fill="none" stroke="currentColor" strokeWidth="1.7" strokeLinecap="round" strokeLinejoin="round">
              <rect x="8" y="3" width="4" height="10" rx="2"/>
              <path d="M5 9a5 5 0 0 0 10 0M10 14v3"/>
            </svg>
            Live captions ·{" "}
            <span className="who-tag">
              {ended ? "Call ended" : captionWho === "agent" ? "Premura speaking" : "Caller speaking"}
            </span>
          </div>
          <div className="caption-text">
            {captionText}
            {isSpeaking && <span className="cursor"></span>}
          </div>
        </div>
      </div>

      {/* Footer: small pause/resume control (only after the user has opted
          in and only mid-call), plus appointment outcome. */}
      <div className="call-foot">
        <div className="call-controls">
          {showFooterButton && (
            <button
              type="button"
              className={`call-ctrl call-ctrl-play ${playState === "playing" ? "is-playing" : ""}`}
              onClick={handlePauseResumeClick}
              aria-label={playState === "playing" ? "Pause sample call" : "Resume sample call"}
            >
              {playState === "playing" ? PAUSE_ICON : PLAY_ICON}
            </button>
          )}
        </div>
        <div className="call-action-row">
          {reachedBooking ? (
            <span className="booked">{CHECK_ICON} Appointment booked · 2:15 PM</span>
          ) : reachedScheduling ? (
            <span style={{display:'inline-flex',alignItems:'center',gap:6}}>
              {CAL_ICON} Checking openings…
            </span>
          ) : (
            <span style={{display:'inline-flex',alignItems:'center',gap:6, color:'var(--text-muted)'}}>
              {PHONE_ICON} {playState === "idle" ? "Sample of a real Premura call" : "Listening"}
            </span>
          )}
        </div>
      </div>
    </div>
  );
}

// ---------- Variant B: Calls log mini ----------
const CALL_ROWS = [
  { time: "9:42 AM",  name: "Marcus Webb",      reason: "New patient · cleaning",     outcome: "booked", dur: "1:48" },
  { time: "9:31 AM",  name: "Priya Anand",       reason: "Insurance — Delta Dental",    outcome: "info",   dur: "0:54" },
  { time: "9:08 AM",  name: "Diane Foster",      reason: "Reschedule · crown follow-up", outcome: "booked", dur: "1:12" },
  { time: "8:51 AM",  name: "Roberto Silva",     reason: "Toothache — emergency",      outcome: "routed", dur: "2:31" },
  { time: "8:33 AM",  name: "Kelsey Moore",      reason: "Whitening pricing",          outcome: "info",   dur: "1:05" },
  { time: "8:14 AM",  name: "Jonathan Park",     reason: "New patient · exam",         outcome: "booked", dur: "1:36" },
];

function HeroCallsLog() {
  const [count, setCount] = React.useState(0);
  React.useEffect(() => {
    if (count >= CALL_ROWS.length) return;
    const t = setTimeout(() => setCount(c => c + 1), 380);
    return () => clearTimeout(t);
  }, [count]);

  return (
    <div className="calls-mini">
      <div className="calls-mini-head">
        <h4>Today · 8:00 – 9:45 AM</h4>
        <div className="stat-row">
          <span><b>6</b> calls</span>
          <span><b>3</b> booked</span>
          <span><b>0:00</b> on hold</span>
        </div>
      </div>
      {CALL_ROWS.slice(0, count).map((r, i) => (
        <div className="call-row" key={i} style={{animationDelay: `${i*60}ms`}}>
          <span className="time">{r.time}</span>
          <span className="who-line">
            <div className="name">{r.name}</div>
            <div className="reason">{r.reason}</div>
          </span>
          <span className={`outcome-pill ${r.outcome}`}>
            <span className="dot"></span>
            {r.outcome === "booked" ? "Booked" : r.outcome === "routed" ? "Routed" : "Info"}
          </span>
          <span className="dur">{r.dur}</span>
        </div>
      ))}
    </div>
  );
}

// ---------- Variant C: Orbit / waveform ----------
function HeroOrbit() {
  const chips = [
    { text: "Schedule a cleaning",    pos: { top: '14%',  left: '8%'  }, kind: "" },
    { text: "Check insurance",         pos: { top: '22%',  right: '6%' }, kind: "accent" },
    { text: "Toothache — emergency",   pos: { bottom: '18%', left: '6%' }, kind: "accent" },
    { text: "Booked · Dr. Patel",       pos: { bottom: '22%', right: '8%' }, kind: "success" },
    { text: "Confirm appointment",      pos: { top: '52%',  left: '4%' }, kind: "" },
  ];
  return (
    <div className="orbit-card">
      <div className="orbit-stage">
        <div className="ring r1"></div>
        <div className="ring r2"></div>
        <div className="ring r3"></div>
        <div className="orbit-pulse"></div>
        <div className="orbit-pulse"></div>
        <div className="orbit-pulse"></div>
        <div className="orbit-core" aria-hidden="true">
          <div className="heart"></div>
        </div>
        {chips.map((c, i) => (
          <div key={i} className={`float-chip ${c.kind}`} style={{...c.pos, animationDelay: `${i * 0.6}s`}}>
            <span className="iconwrap">
              {c.kind === "success" ? CHECK_ICON : c.kind === "accent" ? PHONE_ICON : CAL_ICON}
            </span>
            {c.text}
          </div>
        ))}
      </div>
      <div className="orbit-foot">
        <span style={{display:'inline-flex',alignItems:'center',gap:8}}>
          <span style={{width:6,height:6,borderRadius:'50%',background:'var(--success)',display:'inline-block'}}></span>
          Agent online · Relaxation Dental
        </span>
        <span style={{fontFamily:'var(--font-mono)', fontSize:11}}>24 / 7</span>
      </div>
    </div>
  );
}

Object.assign(window, { HeroTranscript, HeroCallsLog, HeroOrbit });
