diff --git a/www/app/[domain]/transcripts/[transcriptId]/record/page.tsx b/www/app/[domain]/transcripts/[transcriptId]/record/page.tsx index 073b5bf8..fd767e08 100644 --- a/www/app/[domain]/transcripts/[transcriptId]/record/page.tsx +++ b/www/app/[domain]/transcripts/[transcriptId]/record/page.tsx @@ -2,21 +2,16 @@ import React, { useEffect, useState } from "react"; import Recorder from "../../recorder"; import { TopicList } from "../../topicList"; -import useWebRTC from "../../useWebRTC"; import useTranscript from "../../useTranscript"; import { useWebSockets } from "../../useWebSockets"; -import useAudioDevice from "../../useAudioDevice"; import "../../../../styles/button.css"; import { Topic } from "../../webSocketTypes"; -import LiveTrancription from "../../liveTranscription"; -import DisconnectedIndicator from "../../disconnectedIndicator"; -import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import { faGear } from "@fortawesome/free-solid-svg-icons"; import { lockWakeState, releaseWakeState } from "../../../../lib/wakeLock"; import { useRouter } from "next/navigation"; import Player from "../../player"; import useMp3 from "../../useMp3"; import WaveformLoading from "../../waveformLoading"; +import { Box, Grid } from "@chakra-ui/react"; type TranscriptDetails = { params: { @@ -25,59 +20,36 @@ type TranscriptDetails = { }; const TranscriptRecord = (details: TranscriptDetails) => { - const [stream, setStream] = useState(null); - const [disconnected, setDisconnected] = useState(false); + const transcript = useTranscript(details.params.transcriptId); + const useActiveTopic = useState(null); - useEffect(() => { - if (process.env.NEXT_PUBLIC_ENV === "development") { - document.onkeyup = (e) => { - if (e.key === "d") { - setDisconnected((prev) => !prev); - } - }; - } - }, []); - - const transcript = useTranscript(details.params.transcriptId); - const webRTC = useWebRTC(stream, details.params.transcriptId); const webSockets = useWebSockets(details.params.transcriptId); - const { audioDevices, getAudioStream } = useAudioDevice(); - - const [recordedTime, setRecordedTime] = useState(0); - const [startTime, setStartTime] = useState(0); - const [transcriptStarted, setTranscriptStarted] = useState(false); let mp3 = useMp3(details.params.transcriptId, true); const router = useRouter(); - useEffect(() => { - if (!transcriptStarted && webSockets.transcriptText.length !== 0) - setTranscriptStarted(true); - }, [webSockets.transcriptText]); + const [status, setStatus] = useState( + webSockets.status.value || transcript.response?.status || "idle", + ); useEffect(() => { - const statusToRedirect = ["ended", "error"]; + //TODO HANDLE ERROR STATUS BETTER + const newStatus = + webSockets.status.value || transcript.response?.status || "idle"; + setStatus(newStatus); + if (newStatus && (newStatus == "ended" || newStatus == "error")) { + console.log(newStatus, "redirecting"); - //TODO if has no topic and is error, get back to new - if ( - transcript.response?.status && - (statusToRedirect.includes(transcript.response?.status) || - statusToRedirect.includes(webSockets.status.value)) - ) { const newUrl = "/transcripts/" + details.params.transcriptId; - // Shallow redirection does not work on NextJS 13 - // https://github.com/vercel/next.js/discussions/48110 - // https://github.com/vercel/next.js/discussions/49540 router.replace(newUrl); - // history.replaceState({}, "", newUrl); - } // history.replaceState({}, "", newUrl); + } }, [webSockets.status.value, transcript.response?.status]); useEffect(() => { - if (transcript.response?.status === "ended") mp3.getNow(); - }, [transcript.response]); + if (webSockets.waveform && webSockets.waveform) mp3.getNow(); + }, [webSockets.waveform, webSockets.duration]); useEffect(() => { lockWakeState(); @@ -87,8 +59,31 @@ const TranscriptRecord = (details: TranscriptDetails) => { }, []); return ( -
- {webSockets.waveform && webSockets.duration && mp3?.media ? ( + + + + + {status == "processing" && // todo send an event when the mp3 is ready + webSockets.waveform && + webSockets.duration && + mp3?.media ? ( { media={mp3.media} mediaDuration={webSockets.duration} /> - ) : recordedTime ? ( + ) : status == "processing" ? ( ) : ( - { - setStream(null); - setRecordedTime(Date.now() - startTime); - webRTC?.send(JSON.stringify({ cmd: "STOP" })); - }} - onRecord={() => { - setStartTime(Date.now()); - }} - getAudioStream={getAudioStream} - audioDevices={audioDevices} - transcriptId={details.params.transcriptId} - /> + // todo: only start recording animation when you get "recorded" status + )} - -
- - -
- {!recordedTime ? ( - <> - {transcriptStarted && ( -

Transcription

- )} -
-
- {!transcriptStarted ? ( -
- The conversation transcript will appear here shortly after - you start recording. -
- ) : ( - - )} -
-
- - ) : ( -
-
- -
-

- We are generating the final summary for you. This may take a - couple of minutes. Please do not navigate away from the page - during this time. -

- {/* NTH If login required remove last sentence */} -
- )} -
-
- - {disconnected && } -
+ ); }; diff --git a/www/app/[domain]/transcripts/audioInputsDropdown.tsx b/www/app/[domain]/transcripts/audioInputsDropdown.tsx index 56204286..4a6f9630 100644 --- a/www/app/[domain]/transcripts/audioInputsDropdown.tsx +++ b/www/app/[domain]/transcripts/audioInputsDropdown.tsx @@ -1,4 +1,4 @@ -import React, { useEffect, useState } from "react"; +import React from "react"; import Dropdown, { Option } from "react-dropdown"; import "react-dropdown/style.css"; diff --git a/www/app/[domain]/transcripts/fileUploadButton.tsx b/www/app/[domain]/transcripts/fileUploadButton.tsx index 7cfaf19e..b4225e09 100644 --- a/www/app/[domain]/transcripts/fileUploadButton.tsx +++ b/www/app/[domain]/transcripts/fileUploadButton.tsx @@ -1,9 +1,10 @@ import React from "react"; import useApi from "../../lib/useApi"; -import { Body_transcript_record_upload_v1_transcripts__transcript_id__record_upload_post } from "../../api"; +import { Button } from "@chakra-ui/react"; type FileUploadButton = { transcriptId: string; + disabled?: boolean; }; export default function FileUploadButton(props: FileUploadButton) { @@ -32,12 +33,14 @@ export default function FileUploadButton(props: FileUploadButton) { return ( <> - + >; - onStop: () => void; - onRecord?: () => void; - getAudioStream: (deviceId) => Promise; - audioDevices: Option[]; transcriptId: string; + status: string; }; export default function Recorder(props: RecorderProps) { const waveformRef = useRef(null); - const [wavesurfer, setWavesurfer] = useState(null); const [record, setRecord] = useState(null); const [isRecording, setIsRecording] = useState(false); - const [hasRecorded, setHasRecorded] = useState(false); - const [isPlaying, setIsPlaying] = useState(false); - const [currentTime, setCurrentTime] = useState(0); - const [timeInterval, setTimeInterval] = useState(null); + const [duration, setDuration] = useState(0); const [deviceId, setDeviceId] = useState(null); - const [recordStarted, setRecordStarted] = useState(false); - const [showDevices, setShowDevices] = useState(false); const { setError } = useError(); + const [stream, setStream] = useState(null); + + // Time tracking, iirc it was drifting without this. to be tested again. + const [startTime, setStartTime] = useState(0); + const [currentTime, setCurrentTime] = useState(0); + const [timeInterval, setTimeInterval] = useState(null); + + const webRTC = useWebRTC(stream, props.transcriptId); + + const { audioDevices, getAudioStream } = useAudioDevice(); // Function used to setup keyboard shortcuts for the streamdeck const setupProjectorKeys = (): (() => void) => { @@ -106,22 +117,13 @@ export default function Recorder(props: RecorderProps) { waveSurferStyles.playerStyle.backgroundColor; wsWrapper.style.borderRadius = waveSurferStyles.playerStyle.borderRadius; - _wavesurfer.on("play", () => { - setIsPlaying(true); - }); - _wavesurfer.on("pause", () => { - setIsPlaying(false); - }); _wavesurfer.on("timeupdate", setCurrentTime); setRecord(_wavesurfer.registerPlugin(RecordPlugin.create())); - setWavesurfer(_wavesurfer); - return () => { _wavesurfer.destroy(); setIsRecording(false); - setIsPlaying(false); setCurrentTime(0); }; } @@ -130,7 +132,7 @@ export default function Recorder(props: RecorderProps) { useEffect(() => { if (isRecording) { const interval = window.setInterval(() => { - setCurrentTime((prev) => prev + 1); + setCurrentTime(Date.now() - startTime); }, 1000); setTimeInterval(interval); return () => clearInterval(interval); @@ -147,20 +149,20 @@ export default function Recorder(props: RecorderProps) { if (!record) return console.log("no record"); if (record.isRecording()) { - if (props.onStop) props.onStop(); + setStream(null); + webRTC?.send(JSON.stringify({ cmd: "STOP" })); record.stopRecording(); if (screenMediaStream) { screenMediaStream.getTracks().forEach((t) => t.stop()); } setIsRecording(false); - setHasRecorded(true); setScreenMediaStream(null); setDestinationStream(null); } else { - if (props.onRecord) props.onRecord(); - const stream = await getCurrentStream(); + const stream = await getMicrophoneStream(); + setStartTime(Date.now()); - if (props.setStream) props.setStream(stream); + setStream(stream); if (stream) { await record.startRecording(stream); setIsRecording(true); @@ -198,7 +200,7 @@ export default function Recorder(props: RecorderProps) { if (destinationStream !== null) return console.log("already recording"); // connect mic audio (microphone) - const micStream = await getCurrentStream(); + const micStream = await getMicrophoneStream(); if (!micStream) { console.log("no microphone audio"); return; @@ -227,7 +229,7 @@ export default function Recorder(props: RecorderProps) { useEffect(() => { if (!record) return; if (!destinationStream) return; - if (props.setStream) props.setStream(destinationStream); + setStream(destinationStream); if (destinationStream) { record.startRecording(destinationStream); setIsRecording(true); @@ -238,115 +240,87 @@ export default function Recorder(props: RecorderProps) { startTabRecording(); }, [record, screenMediaStream]); - const handlePlayClick = () => { - wavesurfer?.playPause(); - }; - const timeLabel = () => { if (isRecording) return formatTime(currentTime); if (duration) return `${formatTime(currentTime)}/${formatTime(duration)}`; return ""; }; - const getCurrentStream = async () => { - setRecordStarted(true); - return deviceId && props.getAudioStream - ? await props.getAudioStream(deviceId) - : null; + const getMicrophoneStream = async () => { + return deviceId && getAudioStream ? await getAudioStream(deviceId) : null; }; useEffect(() => { - if (props.audioDevices && props.audioDevices.length > 0) { - setDeviceId(props.audioDevices[0].value); + if (audioDevices && audioDevices.length > 0) { + setDeviceId(audioDevices[0].value); } - }, [props.audioDevices]); + }, [audioDevices]); return ( -
-
-
-
- {isRecording && ( -
- )} + + : } + variant={"ghost"} + colorScheme={"blue"} + mr={2} + onClick={handleRecClick} + /> + + {!isRecording && (window as any).chrome && ( + } + variant={"ghost"} + colorScheme={"blue"} + disabled={isRecording} + mr={2} + onClick={handleRecordTabClick} + /> + )} + {audioDevices && audioDevices?.length > 0 && deviceId && !isRecording && ( + + } + variant={"ghost"} + disabled={isRecording} + colorScheme={"blue"} + mr={2} + /> + + + {audioDevices.map((device) => ( + setDeviceId(device.value)} + > + {device.label} + + ))} + + + + )} + + + {timeLabel()} -
-
- - {hasRecorded && ( - <> - - - )} - {!hasRecorded && ( - <> - - - - - {!isRecording && ( - - )} - {props.audioDevices && props.audioDevices?.length > 0 && deviceId && ( - <> - -
- setShowDevices(false)} - deviceId={deviceId} - /> -
- - )} - - )} -
+ + + ); } diff --git a/www/app/[domain]/transcripts/topicList.tsx b/www/app/[domain]/transcripts/topicList.tsx index e72b9bba..64266721 100644 --- a/www/app/[domain]/transcripts/topicList.tsx +++ b/www/app/[domain]/transcripts/topicList.tsx @@ -14,6 +14,7 @@ import { Flex, Text, } from "@chakra-ui/react"; +import { featureEnabled } from "../domainContext"; type TopicListProps = { topics: Topic[]; @@ -23,6 +24,8 @@ type TopicListProps = { ]; autoscroll: boolean; transcriptId: string; + status: string; + currentTranscriptText: any; }; export function TopicList({ @@ -30,6 +33,8 @@ export function TopicList({ useActiveTopic, autoscroll, transcriptId, + status, + currentTranscriptText, }: TopicListProps) { const [activeTopic, setActiveTopic] = useActiveTopic; const [autoscrollEnabled, setAutoscrollEnabled] = useState(true); @@ -72,7 +77,7 @@ export function TopicList({ useEffect(() => { if (autoscroll) { - const topicsDiv = document.getElementById("topics-div"); + const topicsDiv = document.getElementById("scroll-div"); topicsDiv && toggleScroll(topicsDiv); } @@ -80,10 +85,10 @@ export function TopicList({ useEffect(() => { if (autoscroll && autoscrollEnabled) scrollToBottom(); - }, [topics.length]); + }, [topics.length, currentTranscriptText]); const scrollToBottom = () => { - const topicsDiv = document.getElementById("topics-div"); + const topicsDiv = document.getElementById("scroll-div"); if (topicsDiv) topicsDiv.scrollTop = topicsDiv.scrollHeight; }; @@ -97,30 +102,41 @@ export function TopicList({ ); }; + const requireLogin = featureEnabled("requireLogin"); + + useEffect(() => { + setActiveTopic(topics[topics.length - 1]); + }, [topics]); + + useEffect(() => { + if (activeTopic && currentTranscriptText) setActiveTopic(null); + }, [activeTopic, currentTranscriptText]); + return ( - {topics.length > 0 ? ( - <> - {autoscroll && ( - - )} + {autoscroll && ( + + )} + + {topics.length > 0 && ( topic.id == activeTopic?.id)} variant="custom" allowToggle @@ -200,18 +216,47 @@ export function TopicList({ ))} - - ) : ( - - - Discussion topics will appear here after you start recording. - - - It may take up to 5 minutes of conversation for the first topic to - appear. - - - )} + )} + + {status == "recording" && ( + + {currentTranscriptText} + + )} + {(status == "recording" || status == "idle") && + currentTranscriptText.length == 0 && + topics.length == 0 && ( + + + Discussion transcript will appear here after you start + recording. + + + It may take up to 5 minutes of conversation to first appear. + + + )} + {status == "processing" && ( + + We are processing the recording, please wait. + {!requireLogin && ( + + Please do not navigate away from the page during this time. + + )} + + )} + {status == "ended" && topics.length == 0 && ( + + Recording has ended without topics being found. + + )} + {status == "error" && ( + + There was an error processing your recording + + )} + ); } diff --git a/www/app/[domain]/transcripts/useWebSockets.ts b/www/app/[domain]/transcripts/useWebSockets.ts index 585f383c..94b988e9 100644 --- a/www/app/[domain]/transcripts/useWebSockets.ts +++ b/www/app/[domain]/transcripts/useWebSockets.ts @@ -6,8 +6,9 @@ import { AudioWaveform, GetTranscriptSegmentTopic } from "../../api"; import useApi from "../../lib/useApi"; export type UseWebSockets = { - transcriptText: string; + transcriptTextLive: string; translateText: string; + accumulatedText: string; title: string; topics: Topic[]; finalSummary: FinalSummary; @@ -17,7 +18,7 @@ export type UseWebSockets = { }; export const useWebSockets = (transcriptId: string | null): UseWebSockets => { - const [transcriptText, setTranscriptText] = useState(""); + const [transcriptTextLive, setTranscriptTextLive] = useState(""); const [translateText, setTranslateText] = useState(""); const [title, setTitle] = useState(""); const [textQueue, setTextQueue] = useState([]); @@ -29,12 +30,14 @@ export const useWebSockets = (transcriptId: string | null): UseWebSockets => { const [finalSummary, setFinalSummary] = useState({ summary: "", }); - const [status, setStatus] = useState({ value: "initial" }); + const [status, setStatus] = useState({ value: "" }); const { setError } = useError(); const { websocket_url } = useContext(DomainContext); const api = useApi(); + const [accumulatedText, setAccumulatedText] = useState(""); + useEffect(() => { if (isProcessing || textQueue.length === 0) { return; @@ -42,13 +45,12 @@ export const useWebSockets = (transcriptId: string | null): UseWebSockets => { setIsProcessing(true); const text = textQueue[0]; - setTranscriptText(text); + setTranscriptTextLive(text); setTranslateText(translationQueue[0]); const WPM_READING = 200 + textQueue.length * 10; // words per minute to read const wordCount = text.split(/\s+/).length; const delay = (wordCount / WPM_READING) * 60 * 1000; - console.log(`displaying "${text}" for ${delay}ms`); setTimeout(() => { setIsProcessing(false); setTextQueue((prevQueue) => prevQueue.slice(1)); @@ -92,7 +94,7 @@ export const useWebSockets = (transcriptId: string | null): UseWebSockets => { }, ]; - setTranscriptText("Lorem Ipsum"); + setTranscriptTextLive("Lorem Ipsum"); setTopics([ { id: "1", @@ -190,9 +192,13 @@ export const useWebSockets = (transcriptId: string | null): UseWebSockets => { setFinalSummary({ summary: "This is the final summary" }); } if (e.key === "z" && process.env.NEXT_PUBLIC_ENV === "development") { - setTranscriptText( + setTranscriptTextLive( "This text is in English, and it is a pretty long sentence to test the limits", ); + setAccumulatedText( + "This text is in English, and it is a pretty long sentence to test the limits. This text is in English, and it is a pretty long sentence to test the limits", + ); + setStatus({ value: "recording" }); setTopics([ { id: "1", @@ -333,6 +339,8 @@ export const useWebSockets = (transcriptId: string | null): UseWebSockets => { console.debug("TRANSCRIPT event:", newText); setTextQueue((prevQueue) => [...prevQueue, newText]); setTranslationQueue((prevQueue) => [...prevQueue, newTranslation]); + + setAccumulatedText((prevText) => prevText + " " + newText); break; case "TOPIC": @@ -345,6 +353,10 @@ export const useWebSockets = (transcriptId: string | null): UseWebSockets => { prevTopics[index] = topic; return prevTopics; } + setAccumulatedText((prevText) => + prevText.slice(topic.transcript.length), + ); + return [...prevTopics, topic]; }); console.debug("TOPIC event:", message.data); @@ -419,18 +431,18 @@ export const useWebSockets = (transcriptId: string | null): UseWebSockets => { break; case 1005: // Closure by client FF break; + case 1001: // Navigate away + break; default: setError( new Error(`WebSocket closed unexpectedly with code: ${event.code}`), - "Disconnected", + "Disconnected from the server. Please refresh the page.", ); console.log( "Socket is closed. Reconnect will be attempted in 1 second.", event.reason, ); - setTimeout(function () { - ws = new WebSocket(url); - }, 1000); + // todo handle reconnect with socket.io } }; @@ -440,8 +452,9 @@ export const useWebSockets = (transcriptId: string | null): UseWebSockets => { }, [transcriptId, !api]); return { - transcriptText, + transcriptTextLive, translateText, + accumulatedText, topics, finalSummary, title, diff --git a/www/app/styles/icons/stopRecord.tsx b/www/app/styles/icons/stopRecord.tsx new file mode 100644 index 00000000..146d15a5 --- /dev/null +++ b/www/app/styles/icons/stopRecord.tsx @@ -0,0 +1,9 @@ +import { Icon } from "@chakra-ui/react"; + +export default function StopRecordIcon(props) { + return ( + + + + ); +}