import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { Box, Flex, Skeleton, Text } from "@chakra-ui/react"; import { LuMinus, LuPlus, LuVideo, LuX } from "react-icons/lu"; import { useAuth } from "../../lib/AuthProvider"; import { API_URL } from "../../lib/apiClient"; import { generateHighContrastColor } from "../../lib/utils"; type SpeakerInfo = { speaker: number | null; name: string }; type VideoPlayerProps = { transcriptId: string; duration: number | null; expanded: boolean; onClose: () => void; sourceLanguage?: string | null; participants?: SpeakerInfo[] | null; }; function formatDuration(seconds: number): string { const h = Math.floor(seconds / 3600); const m = Math.floor((seconds % 3600) / 60); const s = seconds % 60; if (h > 0) return `${h}:${String(m).padStart(2, "0")}:${String(s).padStart(2, "0")}`; return `${m}:${String(s).padStart(2, "0")}`; } const VTT_TIMESTAMP_RE = /(\d{2}:\d{2}:\d{2}\.\d{3})\s*-->\s*(\d{2}:\d{2}:\d{2}\.\d{3})/g; function parseVttTimestamp(ts: string): number { const [h, m, rest] = ts.split(":"); const [s, ms] = rest.split("."); return Number(h) * 3600 + Number(m) * 60 + Number(s) + Number(ms) / 1000; } function formatVttTimestamp(totalSeconds: number): string { const clamped = Math.max(0, totalSeconds); const h = Math.floor(clamped / 3600); const m = Math.floor((clamped % 3600) / 60); const s = Math.floor(clamped % 60); const ms = Math.round((clamped % 1) * 1000); return `${String(h).padStart(2, "0")}:${String(m).padStart(2, "0")}:${String(s).padStart(2, "0")}.${String(ms).padStart(3, "0")}`; } function shiftVttTimestamps(vttContent: string, offsetSeconds: number): string { if (offsetSeconds === 0) return vttContent; return vttContent.replace( VTT_TIMESTAMP_RE, (_match, start: string, end: string) => { const newStart = formatVttTimestamp( parseVttTimestamp(start) + offsetSeconds, ); const newEnd = formatVttTimestamp(parseVttTimestamp(end) + offsetSeconds); return `${newStart} --> ${newEnd}`; }, ); } type VttSegment = { start: number; end: number; speaker: string }; const VTT_CUE_RE = /(\d{2}:\d{2}:\d{2}\.\d{3})\s*-->\s*(\d{2}:\d{2}:\d{2}\.\d{3})\n]+)>/g; function parseVttSegments(vttContent: string): VttSegment[] { const segments: VttSegment[] = []; let match; while ((match = VTT_CUE_RE.exec(vttContent)) !== null) { segments.push({ start: parseVttTimestamp(match[1]), end: parseVttTimestamp(match[2]), speaker: match[3], }); } return segments; } // Same background as TopicSegment so speaker colors match the transcript UI const SPEAKER_COLOR_BG: [number, number, number] = [96, 165, 250]; function SpeakerProgressBar({ segments, videoDuration, currentTime, captionOffset, onSeek, participants, }: { segments: VttSegment[]; videoDuration: number; currentTime: number; captionOffset: number; onSeek: (time: number) => void; participants?: SpeakerInfo[] | null; }) { const barRef = useRef(null); // Build a name→"Speaker N" reverse lookup so colors match TopicSegment const speakerColors = useMemo(() => { const nameToColorKey: Record = {}; if (participants) { for (const p of participants) { if (p.speaker != null) { nameToColorKey[p.name] = `Speaker ${p.speaker}`; } } } const map: Record = {}; for (const seg of segments) { if (!map[seg.speaker]) { const colorKey = nameToColorKey[seg.speaker] ?? seg.speaker; map[seg.speaker] = generateHighContrastColor( colorKey, SPEAKER_COLOR_BG, ); } } return map; }, [segments, participants]); const activeSpeaker = useMemo(() => { for (const seg of segments) { const adjStart = seg.start + captionOffset; const adjEnd = seg.end + captionOffset; if (currentTime >= adjStart && currentTime < adjEnd) { return seg.speaker; } } return null; }, [segments, currentTime, captionOffset]); const handleClick = (e: React.MouseEvent) => { if (!barRef.current || !videoDuration) return; const rect = barRef.current.getBoundingClientRect(); const fraction = Math.max( 0, Math.min(1, (e.clientX - rect.left) / rect.width), ); onSeek(fraction * videoDuration); }; const progressPct = videoDuration > 0 ? (currentTime / videoDuration) * 100 : 0; return ( {segments.map((seg, i) => { const adjStart = Math.max(0, seg.start + captionOffset); const adjEnd = Math.max(0, seg.end + captionOffset); if (adjEnd <= 0 || adjStart >= videoDuration) return null; const leftPct = (adjStart / videoDuration) * 100; const widthPct = ((adjEnd - adjStart) / videoDuration) * 100; return ( ); })} {/* Playhead */} {/* Speaker tooltip below the bar */} {activeSpeaker && ( {activeSpeaker} )} ); } export default function VideoPlayer({ transcriptId, duration, expanded, onClose, sourceLanguage, participants, }: VideoPlayerProps) { const [videoUrl, setVideoUrl] = useState(null); const [rawVtt, setRawVtt] = useState(null); const [captionsUrl, setCaptionsUrl] = useState(null); const [captionOffset, setCaptionOffset] = useState(0); const [currentTime, setCurrentTime] = useState(0); const [videoDuration, setVideoDuration] = useState(0); const [loading, setLoading] = useState(false); const [error, setError] = useState(null); const prevBlobUrl = useRef(null); const videoRef = useRef(null); const auth = useAuth(); const accessToken = auth.status === "authenticated" ? auth.accessToken : null; useEffect(() => { if (!expanded || !transcriptId || videoUrl) return; const fetchVideoUrl = async () => { setLoading(true); setError(null); try { const url = `${API_URL}/v1/transcripts/${transcriptId}/video/url`; const headers: Record = {}; if (accessToken) { headers["Authorization"] = `Bearer ${accessToken}`; } const resp = await fetch(url, { headers }); if (!resp.ok) { if (resp.status === 401) { throw new Error("Sign in to view the video recording"); } throw new Error("Failed to load video"); } const data = await resp.json(); setVideoUrl(data.url); } catch (err) { setError(err instanceof Error ? err.message : "Failed to load video"); } finally { setLoading(false); } }; fetchVideoUrl(); }, [expanded, transcriptId, accessToken, videoUrl]); useEffect(() => { if (!videoUrl || !transcriptId) return; let cancelled = false; const fetchCaptions = async () => { try { const url = `${API_URL}/v1/transcripts/${transcriptId}?transcript_format=webvtt-named`; const headers: Record = {}; if (accessToken) { headers["Authorization"] = `Bearer ${accessToken}`; } const resp = await fetch(url, { headers }); if (!resp.ok) return; const data = await resp.json(); const vttContent = data?.transcript; if (!vttContent || cancelled) return; setRawVtt(vttContent); } catch { // Captions are non-critical — fail silently } }; fetchCaptions(); return () => { cancelled = true; }; }, [videoUrl, transcriptId, accessToken]); // Rebuild blob URL whenever rawVtt or captionOffset changes useEffect(() => { if (!rawVtt) return; const shifted = shiftVttTimestamps(rawVtt, captionOffset); const blob = new Blob([shifted], { type: "text/vtt" }); const blobUrl = URL.createObjectURL(blob); if (prevBlobUrl.current) { URL.revokeObjectURL(prevBlobUrl.current); } prevBlobUrl.current = blobUrl; setCaptionsUrl(blobUrl); return () => { URL.revokeObjectURL(blobUrl); prevBlobUrl.current = null; }; }, [rawVtt, captionOffset]); const adjustOffset = useCallback((delta: number) => { setCaptionOffset((prev) => Math.round((prev + delta) * 10) / 10); }, []); const formattedOffset = useMemo(() => { const sign = captionOffset >= 0 ? "+" : ""; return `${sign}${captionOffset.toFixed(1)}s`; }, [captionOffset]); const segments = useMemo( () => (rawVtt ? parseVttSegments(rawVtt) : []), [rawVtt], ); // Track video currentTime and duration useEffect(() => { const video = videoRef.current; if (!video) return; const onTimeUpdate = () => setCurrentTime(video.currentTime); const onDurationChange = () => { if (video.duration && isFinite(video.duration)) { setVideoDuration(video.duration); } }; video.addEventListener("timeupdate", onTimeUpdate); video.addEventListener("loadedmetadata", onDurationChange); video.addEventListener("durationchange", onDurationChange); return () => { video.removeEventListener("timeupdate", onTimeUpdate); video.removeEventListener("loadedmetadata", onDurationChange); video.removeEventListener("durationchange", onDurationChange); }; }, [videoUrl]); const handleSeek = useCallback((time: number) => { if (videoRef.current) { videoRef.current.currentTime = time; } }, []); if (!expanded) return null; if (loading) { return ( ); } if (error || !videoUrl) { return ( {error || "Failed to load video recording"} ); } return ( {/* Header bar with title and close button */} Cloud recording {duration != null && ( {formatDuration(duration)} )} {rawVtt && ( CC sync adjustOffset(-0.5)} _hover={{ bg: "whiteAlpha.300" }} transition="background 0.15s" > {formattedOffset} adjustOffset(0.5)} _hover={{ bg: "whiteAlpha.300" }} transition="background 0.15s" > )} {/* Video element with visible controls */} {segments.length > 0 && videoDuration > 0 && ( )} ); }