mirror of
https://github.com/Monadical-SAS/reflector.git
synced 2025-12-22 21:29:05 +00:00
Merge branch 'main' into reenable-non-latin-languages
This commit is contained in:
154
www/app/[domain]/transcripts/[transcriptId]/page.tsx
Normal file
154
www/app/[domain]/transcripts/[transcriptId]/page.tsx
Normal file
@@ -0,0 +1,154 @@
|
||||
"use client";
|
||||
import Modal from "../modal";
|
||||
import useTranscript from "../useTranscript";
|
||||
import useTopics from "../useTopics";
|
||||
import useWaveform from "../useWaveform";
|
||||
import useMp3 from "../useMp3";
|
||||
import { TopicList } from "../topicList";
|
||||
import { Topic } from "../webSocketTypes";
|
||||
import React, { useEffect, useState } from "react";
|
||||
import "../../../styles/button.css";
|
||||
import FinalSummary from "../finalSummary";
|
||||
import ShareLink from "../shareLink";
|
||||
import QRCode from "react-qr-code";
|
||||
import TranscriptTitle from "../transcriptTitle";
|
||||
import ShareModal from "./shareModal";
|
||||
import Player from "../player";
|
||||
import WaveformLoading from "../waveformLoading";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { featureEnabled } from "../../domainContext";
|
||||
import { toShareMode } from "../../../lib/shareMode";
|
||||
|
||||
type TranscriptDetails = {
|
||||
params: {
|
||||
transcriptId: string;
|
||||
};
|
||||
};
|
||||
|
||||
export default function TranscriptDetails(details: TranscriptDetails) {
|
||||
const transcriptId = details.params.transcriptId;
|
||||
const router = useRouter();
|
||||
|
||||
const transcript = useTranscript(transcriptId);
|
||||
const topics = useTopics(transcriptId);
|
||||
const waveform = useWaveform(transcriptId);
|
||||
const useActiveTopic = useState<Topic | null>(null);
|
||||
const mp3 = useMp3(transcriptId);
|
||||
const [showModal, setShowModal] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
const statusToRedirect = ["idle", "recording", "processing"];
|
||||
if (statusToRedirect.includes(transcript.response?.status)) {
|
||||
const newUrl = "/transcripts/" + details.params.transcriptId + "/record";
|
||||
// Shallow redirection does not work on NextJS 13
|
||||
// https://github.com/vercel/next.js/discussions/48110
|
||||
// https://github.com/vercel/next.js/discussions/49540
|
||||
router.push(newUrl, undefined);
|
||||
// history.replaceState({}, "", newUrl);
|
||||
}
|
||||
}, [transcript.response?.status]);
|
||||
|
||||
const fullTranscript =
|
||||
topics.topics
|
||||
?.map((topic) => topic.transcript)
|
||||
.join("\n\n")
|
||||
.replace(/ +/g, " ")
|
||||
.trim() || "";
|
||||
|
||||
if (transcript && transcript.response) {
|
||||
if (transcript.error || topics?.error) {
|
||||
return (
|
||||
<Modal
|
||||
title="Transcription Not Found"
|
||||
text="A trascription with this ID does not exist."
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
if (!transcriptId || transcript?.loading || topics?.loading) {
|
||||
return <Modal title="Loading" text={"Loading transcript..."} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
{featureEnabled("sendToZulip") && (
|
||||
<ShareModal
|
||||
transcript={transcript.response}
|
||||
topics={topics ? topics.topics : null}
|
||||
show={showModal}
|
||||
setShow={(v) => setShowModal(v)}
|
||||
/>
|
||||
)}
|
||||
<div className="flex flex-col">
|
||||
{transcript?.response?.title && (
|
||||
<TranscriptTitle
|
||||
title={transcript.response.title}
|
||||
transcriptId={transcript.response.id}
|
||||
/>
|
||||
)}
|
||||
{waveform.waveform && mp3.media ? (
|
||||
<Player
|
||||
topics={topics?.topics || []}
|
||||
useActiveTopic={useActiveTopic}
|
||||
waveform={waveform.waveform}
|
||||
media={mp3.media}
|
||||
mediaDuration={transcript.response.duration}
|
||||
/>
|
||||
) : waveform.error ? (
|
||||
<div>"error loading this recording"</div>
|
||||
) : (
|
||||
<WaveformLoading />
|
||||
)}
|
||||
</div>
|
||||
<div className="grid grid-cols-1 lg:grid-cols-2 grid-rows-2 lg:grid-rows-1 gap-2 lg:gap-4 h-full">
|
||||
<TopicList
|
||||
topics={topics.topics || []}
|
||||
useActiveTopic={useActiveTopic}
|
||||
autoscroll={false}
|
||||
/>
|
||||
|
||||
<div className="w-full h-full grid grid-rows-layout-one grid-cols-1 gap-2 lg:gap-4">
|
||||
<section className=" bg-blue-400/20 rounded-lg md:rounded-xl p-2 md:px-4 h-full">
|
||||
{transcript.response.long_summary ? (
|
||||
<FinalSummary
|
||||
fullTranscript={fullTranscript}
|
||||
summary={transcript.response.long_summary}
|
||||
transcriptId={transcript.response.id}
|
||||
openZulipModal={() => setShowModal(true)}
|
||||
/>
|
||||
) : (
|
||||
<div className="flex flex-col h-full justify-center content-center">
|
||||
{transcript.response.status == "processing" ? (
|
||||
<p>Loading Transcript</p>
|
||||
) : (
|
||||
<p>
|
||||
There was an error generating the final summary, please
|
||||
come back later
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</section>
|
||||
|
||||
<section className="flex items-center">
|
||||
<div className="mr-4 hidden md:block h-auto">
|
||||
<QRCode
|
||||
value={`${location.origin}/transcripts/${details.params.transcriptId}`}
|
||||
level="L"
|
||||
size={98}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-grow max-w-full">
|
||||
<ShareLink
|
||||
transcriptId={transcript?.response?.id}
|
||||
userId={transcript?.response?.user_id}
|
||||
shareMode={toShareMode(transcript?.response?.share_mode)}
|
||||
/>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
}
|
||||
172
www/app/[domain]/transcripts/[transcriptId]/record/page.tsx
Normal file
172
www/app/[domain]/transcripts/[transcriptId]/record/page.tsx
Normal file
@@ -0,0 +1,172 @@
|
||||
"use client";
|
||||
import React, { useEffect, useState } from "react";
|
||||
import Recorder from "../../recorder";
|
||||
import { TopicList } from "../../topicList";
|
||||
import useWebRTC from "../../useWebRTC";
|
||||
import useTranscript from "../../useTranscript";
|
||||
import { useWebSockets } from "../../useWebSockets";
|
||||
import useAudioDevice from "../../useAudioDevice";
|
||||
import "../../../../styles/button.css";
|
||||
import { Topic } from "../../webSocketTypes";
|
||||
import LiveTrancription from "../../liveTranscription";
|
||||
import DisconnectedIndicator from "../../disconnectedIndicator";
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
import { faGear } from "@fortawesome/free-solid-svg-icons";
|
||||
import { lockWakeState, releaseWakeState } from "../../../../lib/wakeLock";
|
||||
import { useRouter } from "next/navigation";
|
||||
import Player from "../../player";
|
||||
import useMp3 from "../../useMp3";
|
||||
import WaveformLoading from "../../waveformLoading";
|
||||
|
||||
type TranscriptDetails = {
|
||||
params: {
|
||||
transcriptId: string;
|
||||
};
|
||||
};
|
||||
|
||||
const TranscriptRecord = (details: TranscriptDetails) => {
|
||||
const [stream, setStream] = useState<MediaStream | null>(null);
|
||||
const [disconnected, setDisconnected] = useState<boolean>(false);
|
||||
const useActiveTopic = useState<Topic | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (process.env.NEXT_PUBLIC_ENV === "development") {
|
||||
document.onkeyup = (e) => {
|
||||
if (e.key === "d") {
|
||||
setDisconnected((prev) => !prev);
|
||||
}
|
||||
};
|
||||
}
|
||||
}, []);
|
||||
|
||||
const transcript = useTranscript(details.params.transcriptId);
|
||||
const webRTC = useWebRTC(stream, details.params.transcriptId);
|
||||
const webSockets = useWebSockets(details.params.transcriptId);
|
||||
|
||||
const { audioDevices, getAudioStream } = useAudioDevice();
|
||||
|
||||
const [recordedTime, setRecordedTime] = useState(0);
|
||||
const [startTime, setStartTime] = useState(0);
|
||||
const [transcriptStarted, setTranscriptStarted] = useState(false);
|
||||
let mp3 = useMp3(details.params.transcriptId, true);
|
||||
|
||||
const router = useRouter();
|
||||
|
||||
useEffect(() => {
|
||||
if (!transcriptStarted && webSockets.transcriptText.length !== 0)
|
||||
setTranscriptStarted(true);
|
||||
}, [webSockets.transcriptText]);
|
||||
|
||||
useEffect(() => {
|
||||
const statusToRedirect = ["ended", "error"];
|
||||
|
||||
//TODO if has no topic and is error, get back to new
|
||||
if (
|
||||
statusToRedirect.includes(transcript.response?.status) ||
|
||||
statusToRedirect.includes(webSockets.status.value)
|
||||
) {
|
||||
const newUrl = "/transcripts/" + details.params.transcriptId;
|
||||
// Shallow redirection does not work on NextJS 13
|
||||
// https://github.com/vercel/next.js/discussions/48110
|
||||
// https://github.com/vercel/next.js/discussions/49540
|
||||
router.replace(newUrl);
|
||||
// history.replaceState({}, "", newUrl);
|
||||
} // history.replaceState({}, "", newUrl);
|
||||
}, [webSockets.status.value, transcript.response?.status]);
|
||||
|
||||
useEffect(() => {
|
||||
if (transcript.response?.status === "ended") mp3.getNow();
|
||||
}, [transcript.response]);
|
||||
|
||||
useEffect(() => {
|
||||
lockWakeState();
|
||||
return () => {
|
||||
releaseWakeState();
|
||||
};
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<>
|
||||
{webSockets.waveform && webSockets.duration && mp3?.media ? (
|
||||
<Player
|
||||
topics={webSockets.topics || []}
|
||||
useActiveTopic={useActiveTopic}
|
||||
waveform={webSockets.waveform}
|
||||
media={mp3.media}
|
||||
mediaDuration={webSockets.duration}
|
||||
/>
|
||||
) : recordedTime ? (
|
||||
<WaveformLoading />
|
||||
) : (
|
||||
<Recorder
|
||||
setStream={setStream}
|
||||
onStop={() => {
|
||||
setStream(null);
|
||||
setRecordedTime(Date.now() - startTime);
|
||||
webRTC?.send(JSON.stringify({ cmd: "STOP" }));
|
||||
}}
|
||||
onRecord={() => {
|
||||
setStartTime(Date.now());
|
||||
}}
|
||||
getAudioStream={getAudioStream}
|
||||
audioDevices={audioDevices}
|
||||
transcriptId={details.params.transcriptId}
|
||||
/>
|
||||
)}
|
||||
|
||||
<div className="grid grid-cols-1 lg:grid-cols-2 grid-rows-mobile-inner lg:grid-rows-1 gap-2 lg:gap-4 h-full">
|
||||
<TopicList
|
||||
topics={webSockets.topics}
|
||||
useActiveTopic={useActiveTopic}
|
||||
autoscroll={true}
|
||||
/>
|
||||
|
||||
<section
|
||||
className={`w-full h-full bg-blue-400/20 rounded-lg md:rounded-xl p-2 md:px-4`}
|
||||
>
|
||||
{!recordedTime ? (
|
||||
<>
|
||||
{transcriptStarted && (
|
||||
<h2 className="md:text-lg font-bold">Transcription</h2>
|
||||
)}
|
||||
<div className="flex flex-col justify-center align center text-center h-full">
|
||||
<div className="py-2 h-auto">
|
||||
{!transcriptStarted ? (
|
||||
<div className="text-center text-gray-500">
|
||||
The conversation transcript will appear here shortly after
|
||||
you start recording.
|
||||
</div>
|
||||
) : (
|
||||
<LiveTrancription
|
||||
text={webSockets.transcriptText}
|
||||
translateText={webSockets.translateText}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
) : (
|
||||
<div className="flex flex-col justify-center align center text-center h-full text-gray-500">
|
||||
<div className="p-2 md:p-4">
|
||||
<FontAwesomeIcon
|
||||
icon={faGear}
|
||||
className="animate-spin-slow h-14 w-14 md:h-20 md:w-20"
|
||||
/>
|
||||
</div>
|
||||
<p>
|
||||
We are generating the final summary for you. This may take a
|
||||
couple of minutes. Please do not navigate away from the page
|
||||
during this time.
|
||||
</p>
|
||||
{/* NTH If login required remove last sentence */}
|
||||
</div>
|
||||
)}
|
||||
</section>
|
||||
</div>
|
||||
|
||||
{disconnected && <DisconnectedIndicator />}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default TranscriptRecord;
|
||||
159
www/app/[domain]/transcripts/[transcriptId]/shareModal.tsx
Normal file
159
www/app/[domain]/transcripts/[transcriptId]/shareModal.tsx
Normal file
@@ -0,0 +1,159 @@
|
||||
import React, { useContext, useState, useEffect } from "react";
|
||||
import SelectSearch from "react-select-search";
|
||||
import { getZulipMessage, sendZulipMessage } from "../../../lib/zulip";
|
||||
import { GetTranscript, GetTranscriptTopic } from "../../../api";
|
||||
import "react-select-search/style.css";
|
||||
import { DomainContext } from "../../domainContext";
|
||||
|
||||
type ShareModal = {
|
||||
show: boolean;
|
||||
setShow: (show: boolean) => void;
|
||||
transcript: GetTranscript | null;
|
||||
topics: GetTranscriptTopic[] | null;
|
||||
};
|
||||
|
||||
interface Stream {
|
||||
id: number;
|
||||
name: string;
|
||||
topics: string[];
|
||||
}
|
||||
|
||||
interface SelectSearchOption {
|
||||
name: string;
|
||||
value: string;
|
||||
}
|
||||
|
||||
const ShareModal = (props: ShareModal) => {
|
||||
const [stream, setStream] = useState<string | undefined>(undefined);
|
||||
const [topic, setTopic] = useState<string | undefined>(undefined);
|
||||
const [includeTopics, setIncludeTopics] = useState(false);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [streams, setStreams] = useState<Stream[]>([]);
|
||||
const { zulip_streams } = useContext(DomainContext);
|
||||
|
||||
useEffect(() => {
|
||||
fetch(zulip_streams + "/streams.json")
|
||||
.then((response) => {
|
||||
if (!response.ok) {
|
||||
throw new Error("Network response was not ok");
|
||||
}
|
||||
return response.json();
|
||||
})
|
||||
.then((data) => {
|
||||
data = data.sort((a: Stream, b: Stream) =>
|
||||
a.name.localeCompare(b.name),
|
||||
);
|
||||
setStreams(data);
|
||||
setIsLoading(false);
|
||||
// data now contains the JavaScript object decoded from JSON
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("There was a problem with your fetch operation:", error);
|
||||
});
|
||||
}, []);
|
||||
|
||||
const handleSendToZulip = () => {
|
||||
if (!props.transcript) return;
|
||||
|
||||
const msg = getZulipMessage(props.transcript, props.topics, includeTopics);
|
||||
|
||||
if (stream && topic) sendZulipMessage(stream, topic, msg);
|
||||
};
|
||||
|
||||
if (props.show && isLoading) {
|
||||
return <div>Loading...</div>;
|
||||
}
|
||||
|
||||
let streamOptions: SelectSearchOption[] = [];
|
||||
if (streams) {
|
||||
streams.forEach((stream) => {
|
||||
const value = stream.name;
|
||||
streamOptions.push({ name: value, value: value });
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="absolute">
|
||||
{props.show && (
|
||||
<div className="fixed inset-0 bg-gray-600 bg-opacity-50 overflow-y-auto h-full w-full z-50">
|
||||
<div className="relative top-20 mx-auto p-5 w-96 shadow-lg rounded-md bg-white">
|
||||
<div className="mt-3 text-center">
|
||||
<h3 className="font-bold text-xl">Send to Zulip</h3>
|
||||
|
||||
{/* Checkbox for 'Include Topics' */}
|
||||
<div className="mt-4 text-left ml-5">
|
||||
<label className="flex items-center">
|
||||
<input
|
||||
type="checkbox"
|
||||
className="form-checkbox rounded border-gray-300 text-indigo-600 shadow-sm focus:border-indigo-300 focus:ring focus:ring-indigo-200 focus:ring-opacity-50"
|
||||
checked={includeTopics}
|
||||
onChange={(e) => setIncludeTopics(e.target.checked)}
|
||||
/>
|
||||
<span className="ml-2">Include topics</span>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center mt-4">
|
||||
<span className="mr-2">#</span>
|
||||
<SelectSearch
|
||||
search={true}
|
||||
options={streamOptions}
|
||||
value={stream}
|
||||
onChange={(val) => {
|
||||
setTopic(undefined);
|
||||
setStream(val.toString());
|
||||
}}
|
||||
placeholder="Pick a stream"
|
||||
/>
|
||||
</div>
|
||||
|
||||
{stream && (
|
||||
<>
|
||||
<div className="flex items-center mt-4">
|
||||
<span className="mr-2 invisible">#</span>
|
||||
<SelectSearch
|
||||
search={true}
|
||||
options={
|
||||
streams
|
||||
.find((s) => s.name == stream)
|
||||
?.topics.sort((a: string, b: string) =>
|
||||
a.localeCompare(b),
|
||||
)
|
||||
.map((t) => ({ name: t, value: t })) || []
|
||||
}
|
||||
value={topic}
|
||||
onChange={(val) => setTopic(val.toString())}
|
||||
placeholder="Pick a topic"
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
<button
|
||||
className={`bg-blue-400 hover:bg-blue-500 focus-visible:bg-blue-500 text-white rounded py-2 px-4 mr-3 ${
|
||||
!stream || !topic ? "opacity-50 cursor-not-allowed" : ""
|
||||
}`}
|
||||
disabled={!stream || !topic}
|
||||
onClick={() => {
|
||||
handleSendToZulip();
|
||||
props.setShow(false);
|
||||
}}
|
||||
>
|
||||
Send to Zulip
|
||||
</button>
|
||||
|
||||
<button
|
||||
className="bg-red-500 hover:bg-red-700 focus-visible:bg-red-700 text-white rounded py-2 px-4 mt-4"
|
||||
onClick={() => props.setShow(false)}
|
||||
>
|
||||
Close
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default ShareModal;
|
||||
28
www/app/[domain]/transcripts/audioInputsDropdown.tsx
Normal file
28
www/app/[domain]/transcripts/audioInputsDropdown.tsx
Normal file
@@ -0,0 +1,28 @@
|
||||
import React, { useEffect, useState } from "react";
|
||||
import Dropdown, { Option } from "react-dropdown";
|
||||
import "react-dropdown/style.css";
|
||||
|
||||
const AudioInputsDropdown: React.FC<{
|
||||
audioDevices: Option[];
|
||||
disabled: boolean;
|
||||
hide: () => void;
|
||||
deviceId: string;
|
||||
setDeviceId: React.Dispatch<React.SetStateAction<string | null>>;
|
||||
}> = (props) => {
|
||||
const handleDropdownChange = (option: Option) => {
|
||||
props.setDeviceId(option.value);
|
||||
props.hide();
|
||||
};
|
||||
|
||||
return (
|
||||
<Dropdown
|
||||
options={props.audioDevices}
|
||||
onChange={handleDropdownChange}
|
||||
value={props.deviceId}
|
||||
className="flex-grow w-full"
|
||||
disabled={props.disabled}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
export default AudioInputsDropdown;
|
||||
44
www/app/[domain]/transcripts/createTranscript.ts
Normal file
44
www/app/[domain]/transcripts/createTranscript.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { useState } from "react";
|
||||
import { useError } from "../../(errors)/errorContext";
|
||||
import { GetTranscript, CreateTranscript } from "../../api";
|
||||
import useApi from "../../lib/useApi";
|
||||
|
||||
type UseTranscript = {
|
||||
transcript: GetTranscript | null;
|
||||
loading: boolean;
|
||||
error: Error | null;
|
||||
create: (transcriptCreationDetails: CreateTranscript) => void;
|
||||
};
|
||||
|
||||
const useCreateTranscript = (): UseTranscript => {
|
||||
const [transcript, setTranscript] = useState<GetTranscript | null>(null);
|
||||
const [loading, setLoading] = useState<boolean>(false);
|
||||
const [error, setErrorState] = useState<Error | null>(null);
|
||||
const { setError } = useError();
|
||||
const api = useApi();
|
||||
|
||||
const create = (transcriptCreationDetails: CreateTranscript) => {
|
||||
if (loading || !api) return;
|
||||
|
||||
setLoading(true);
|
||||
|
||||
api
|
||||
.v1TranscriptsCreate(transcriptCreationDetails)
|
||||
.then((transcript) => {
|
||||
setTranscript(transcript);
|
||||
setLoading(false);
|
||||
})
|
||||
.catch((err) => {
|
||||
setError(
|
||||
err,
|
||||
"There was an issue creating a transcript, please try again.",
|
||||
);
|
||||
setErrorState(err);
|
||||
setLoading(false);
|
||||
});
|
||||
};
|
||||
|
||||
return { transcript, loading, error, create };
|
||||
};
|
||||
|
||||
export default useCreateTranscript;
|
||||
13
www/app/[domain]/transcripts/disconnectedIndicator.tsx
Normal file
13
www/app/[domain]/transcripts/disconnectedIndicator.tsx
Normal file
@@ -0,0 +1,13 @@
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
import { faLinkSlash } from "@fortawesome/free-solid-svg-icons";
|
||||
|
||||
export default function DisconnectedIndicator() {
|
||||
return (
|
||||
<div className="absolute top-0 left-0 w-full h-full bg-black opacity-50 flex justify-center items-center">
|
||||
<div className="text-white text-2xl">
|
||||
<FontAwesomeIcon icon={faLinkSlash} className="mr-2" />
|
||||
Disconnected
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
50
www/app/[domain]/transcripts/fileUploadButton.tsx
Normal file
50
www/app/[domain]/transcripts/fileUploadButton.tsx
Normal file
@@ -0,0 +1,50 @@
|
||||
import React from "react";
|
||||
import useApi from "../../lib/useApi";
|
||||
import { Body_transcript_record_upload_v1_transcripts__transcript_id__record_upload_post } from "../../api";
|
||||
|
||||
type FileUploadButton = {
|
||||
transcriptId: string;
|
||||
};
|
||||
|
||||
export default function FileUploadButton(props: FileUploadButton) {
|
||||
const fileInputRef = React.useRef<HTMLInputElement>(null);
|
||||
const api = useApi();
|
||||
|
||||
const triggerFileUpload = () => {
|
||||
fileInputRef.current?.click();
|
||||
};
|
||||
|
||||
const handleFileUpload = (event: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const file = event.target.files?.[0];
|
||||
|
||||
if (file) {
|
||||
console.log("Calling api.v1TranscriptRecordUpload()...");
|
||||
|
||||
// Create an object of the expected type
|
||||
const uploadData = {
|
||||
file: file,
|
||||
// Add other properties if required by the type definition
|
||||
};
|
||||
|
||||
api?.v1TranscriptRecordUpload(props.transcriptId, uploadData);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<button
|
||||
className="bg-blue-400 hover:bg-blue-500 focus-visible:bg-blue-500 text-white ml-2 md:ml:4 md:h-[78px] md:min-w-[100px] text-lg"
|
||||
onClick={triggerFileUpload}
|
||||
>
|
||||
Upload File
|
||||
</button>
|
||||
|
||||
<input
|
||||
type="file"
|
||||
ref={fileInputRef}
|
||||
style={{ display: "none" }}
|
||||
onChange={handleFileUpload}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
186
www/app/[domain]/transcripts/finalSummary.tsx
Normal file
186
www/app/[domain]/transcripts/finalSummary.tsx
Normal file
@@ -0,0 +1,186 @@
|
||||
import { useRef, useState } from "react";
|
||||
import React from "react";
|
||||
import Markdown from "react-markdown";
|
||||
import "../../styles/markdown.css";
|
||||
import { featureEnabled } from "../domainContext";
|
||||
import { UpdateTranscript } from "../../api";
|
||||
import useApi from "../../lib/useApi";
|
||||
|
||||
type FinalSummaryProps = {
|
||||
summary: string;
|
||||
fullTranscript: string;
|
||||
transcriptId: string;
|
||||
openZulipModal: () => void;
|
||||
};
|
||||
|
||||
export default function FinalSummary(props: FinalSummaryProps) {
|
||||
const finalSummaryRef = useRef<HTMLParagraphElement>(null);
|
||||
const [isCopiedSummary, setIsCopiedSummary] = useState(false);
|
||||
const [isCopiedTranscript, setIsCopiedTranscript] = useState(false);
|
||||
const [isEditMode, setIsEditMode] = useState(false);
|
||||
const [preEditSummary, setPreEditSummary] = useState(props.summary);
|
||||
const [editedSummary, setEditedSummary] = useState(props.summary);
|
||||
|
||||
const updateSummary = async (newSummary: string, transcriptId: string) => {
|
||||
try {
|
||||
const api = useApi();
|
||||
const requestBody: UpdateTranscript = {
|
||||
long_summary: newSummary,
|
||||
};
|
||||
const updatedTranscript = await api?.v1TranscriptUpdate(
|
||||
transcriptId,
|
||||
requestBody,
|
||||
);
|
||||
console.log("Updated long summary:", updatedTranscript);
|
||||
} catch (err) {
|
||||
console.error("Failed to update long summary:", err);
|
||||
}
|
||||
};
|
||||
|
||||
const onCopySummaryClick = () => {
|
||||
let text_to_copy = finalSummaryRef.current?.innerText;
|
||||
|
||||
text_to_copy &&
|
||||
navigator.clipboard.writeText(text_to_copy).then(() => {
|
||||
setIsCopiedSummary(true);
|
||||
// Reset the copied state after 2 seconds
|
||||
setTimeout(() => setIsCopiedSummary(false), 2000);
|
||||
});
|
||||
};
|
||||
|
||||
const onCopyTranscriptClick = () => {
|
||||
let text_to_copy = props.fullTranscript;
|
||||
|
||||
text_to_copy &&
|
||||
navigator.clipboard.writeText(text_to_copy).then(() => {
|
||||
setIsCopiedTranscript(true);
|
||||
// Reset the copied state after 2 seconds
|
||||
setTimeout(() => setIsCopiedTranscript(false), 2000);
|
||||
});
|
||||
};
|
||||
|
||||
const onEditClick = () => {
|
||||
setPreEditSummary(editedSummary);
|
||||
setIsEditMode(true);
|
||||
};
|
||||
|
||||
const onDiscardClick = () => {
|
||||
setEditedSummary(preEditSummary);
|
||||
setIsEditMode(false);
|
||||
};
|
||||
|
||||
const onSaveClick = () => {
|
||||
updateSummary(editedSummary, props.transcriptId);
|
||||
setIsEditMode(false);
|
||||
};
|
||||
|
||||
const handleTextAreaKeyDown = (e: React.KeyboardEvent) => {
|
||||
if (e.key === "Escape") {
|
||||
onDiscardClick();
|
||||
}
|
||||
|
||||
if (e.key === "Enter" && e.shiftKey) {
|
||||
onSaveClick();
|
||||
e.preventDefault(); // prevent the default action of adding a new line
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
className={
|
||||
(isEditMode ? "overflow-y-none" : "overflow-y-auto") +
|
||||
" max-h-full flex flex-col h-full"
|
||||
}
|
||||
>
|
||||
<div className="flex flex-row flex-wrap-reverse justify-between items-center">
|
||||
<h2 className="text-lg sm:text-xl md:text-2xl font-bold">
|
||||
Final Summary
|
||||
</h2>
|
||||
|
||||
<div className="ml-auto flex space-x-2 mb-2">
|
||||
{isEditMode && (
|
||||
<>
|
||||
<button
|
||||
onClick={onDiscardClick}
|
||||
className={"text-gray-500 text-sm hover:underline"}
|
||||
>
|
||||
Discard Changes
|
||||
</button>
|
||||
<button
|
||||
onClick={onSaveClick}
|
||||
className={
|
||||
"bg-blue-400 hover:bg-blue-500 focus-visible:bg-blue-500 text-white rounded p-2"
|
||||
}
|
||||
>
|
||||
Save Changes
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
|
||||
{!isEditMode && (
|
||||
<>
|
||||
{featureEnabled("sendToZulip") && (
|
||||
<button
|
||||
className={
|
||||
"bg-blue-400 hover:bg-blue-500 focus-visible:bg-blue-500 text-white rounded p-2 sm:text-base"
|
||||
}
|
||||
onClick={() => props.openZulipModal()}
|
||||
>
|
||||
<span className="text-xs">➡️ Zulip</span>
|
||||
</button>
|
||||
)}
|
||||
|
||||
<button
|
||||
onClick={onEditClick}
|
||||
className={
|
||||
"bg-blue-400 hover:bg-blue-500 focus-visible:bg-blue-500 text-white rounded p-2 sm:text-base"
|
||||
}
|
||||
>
|
||||
<span className="text-xs">✏️ Summary</span>
|
||||
</button>
|
||||
<button
|
||||
onClick={onCopyTranscriptClick}
|
||||
className={
|
||||
(isCopiedTranscript ? "bg-blue-500" : "bg-blue-400") +
|
||||
" hover:bg-blue-500 focus-visible:bg-blue-500 text-white rounded p-2 sm:text-base"
|
||||
}
|
||||
style={{ minHeight: "30px" }}
|
||||
>
|
||||
<span className="text-xs">
|
||||
{isCopiedTranscript ? "Copied!" : "Copy Transcript"}
|
||||
</span>
|
||||
</button>
|
||||
<button
|
||||
onClick={onCopySummaryClick}
|
||||
className={
|
||||
(isCopiedSummary ? "bg-blue-500" : "bg-blue-400") +
|
||||
" hover:bg-blue-500 focus-visible:bg-blue-500 text-white rounded p-2 sm:text-base"
|
||||
}
|
||||
style={{ minHeight: "30px" }}
|
||||
>
|
||||
<span className="text-xs">
|
||||
{isCopiedSummary ? "Copied!" : "Copy Summary"}
|
||||
</span>
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{isEditMode ? (
|
||||
<div className="flex-grow overflow-y-none">
|
||||
<textarea
|
||||
value={editedSummary}
|
||||
onChange={(e) => setEditedSummary(e.target.value)}
|
||||
className="markdown w-full h-full d-block p-2 border rounded shadow-sm"
|
||||
onKeyDown={(e) => handleTextAreaKeyDown(e)}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<p ref={finalSummaryRef} className="markdown">
|
||||
<Markdown>{editedSummary}</Markdown>
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
23
www/app/[domain]/transcripts/liveTranscription.tsx
Normal file
23
www/app/[domain]/transcripts/liveTranscription.tsx
Normal file
@@ -0,0 +1,23 @@
|
||||
type LiveTranscriptionProps = {
|
||||
text: string;
|
||||
translateText: string;
|
||||
};
|
||||
|
||||
export default function LiveTrancription(props: LiveTranscriptionProps) {
|
||||
return (
|
||||
<div className="text-center p-4">
|
||||
<p
|
||||
className={`text-lg md:text-xl lg:text-2xl font-bold ${
|
||||
props.translateText ? "line-clamp-2 lg:line-clamp-5" : "line-clamp-4"
|
||||
}`}
|
||||
>
|
||||
{props.text}
|
||||
</p>
|
||||
{props.translateText && (
|
||||
<p className="text-base md:text-lg lg:text-xl font-bold line-clamp-2 lg:line-clamp-4 mt-4">
|
||||
{props.translateText}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
15
www/app/[domain]/transcripts/modal.tsx
Normal file
15
www/app/[domain]/transcripts/modal.tsx
Normal file
@@ -0,0 +1,15 @@
|
||||
type ModalProps = {
|
||||
title: string;
|
||||
text: string;
|
||||
};
|
||||
|
||||
export default function Modal(props: ModalProps) {
|
||||
return (
|
||||
<>
|
||||
<div className="w-full flex flex-col items-center justify-center bg-white px-6 py-8 mt-8 rounded-xl">
|
||||
<h1 className="text-2xl font-bold text-blue-500">{props.title}</h1>
|
||||
<p className="text-gray-500 text-center mt-5">{props.text}</p>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
150
www/app/[domain]/transcripts/new/page.tsx
Normal file
150
www/app/[domain]/transcripts/new/page.tsx
Normal file
@@ -0,0 +1,150 @@
|
||||
"use client";
|
||||
import React, { useEffect, useState } from "react";
|
||||
import useAudioDevice from "../useAudioDevice";
|
||||
import "react-select-search/style.css";
|
||||
import "../../../styles/button.css";
|
||||
import "../../../styles/form.scss";
|
||||
import About from "../../../(aboutAndPrivacy)/about";
|
||||
import Privacy from "../../../(aboutAndPrivacy)/privacy";
|
||||
import { useRouter } from "next/navigation";
|
||||
import useCreateTranscript from "../createTranscript";
|
||||
import SelectSearch from "react-select-search";
|
||||
import { supportedLanguages } from "../../supportedLanguages";
|
||||
import { useFiefIsAuthenticated } from "@fief/fief/nextjs/react";
|
||||
import { featureEnabled } from "../../domainContext";
|
||||
|
||||
const TranscriptCreate = () => {
|
||||
const router = useRouter();
|
||||
const isAuthenticated = useFiefIsAuthenticated();
|
||||
const requireLogin = featureEnabled("requireLogin");
|
||||
|
||||
const [name, setName] = useState<string>("");
|
||||
const nameChange = (event: React.ChangeEvent<HTMLInputElement>) => {
|
||||
setName(event.target.value);
|
||||
};
|
||||
const [targetLanguage, setTargetLanguage] = useState<string>();
|
||||
|
||||
const onLanguageChange = (newval) => {
|
||||
(!newval || typeof newval === "string") && setTargetLanguage(newval);
|
||||
};
|
||||
|
||||
const createTranscript = useCreateTranscript();
|
||||
|
||||
const [loadingSend, setLoadingSend] = useState(false);
|
||||
|
||||
const send = () => {
|
||||
if (loadingSend || createTranscript.loading || permissionDenied) return;
|
||||
setLoadingSend(true);
|
||||
createTranscript.create({ name, target_language: targetLanguage });
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
createTranscript.transcript &&
|
||||
router.push(`/transcripts/${createTranscript.transcript.id}/record`);
|
||||
}, [createTranscript.transcript]);
|
||||
|
||||
useEffect(() => {
|
||||
if (createTranscript.error) setLoadingSend(false);
|
||||
}, [createTranscript.error]);
|
||||
|
||||
const { loading, permissionOk, permissionDenied, requestPermission } =
|
||||
useAudioDevice();
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="hidden lg:block"></div>
|
||||
<div className="lg:grid lg:grid-cols-2 lg:grid-rows-1 lg:gap-4 lg:h-full h-auto flex flex-col">
|
||||
<section className="flex flex-col w-full lg:h-full items-center justify-evenly p-4 md:px-6 md:py-8">
|
||||
<div className="flex flex-col max-w-xl items-center justify-center">
|
||||
<h1 className="text-2xl font-bold mb-2">
|
||||
Welcome to reflector.media
|
||||
</h1>
|
||||
<button>Test upload</button>
|
||||
<p>
|
||||
Reflector is a transcription and summarization pipeline that
|
||||
transforms audio into knowledge.
|
||||
<span className="hidden md:block">
|
||||
The output is meeting minutes and topic summaries enabling
|
||||
topic-specific analyses stored in your systems of record. This
|
||||
is accomplished on your infrastructure – without 3rd parties –
|
||||
keeping your data private, secure, and organized.
|
||||
</span>
|
||||
</p>
|
||||
<About buttonText="Learn more" />
|
||||
<p className="mt-6">
|
||||
In order to use Reflector, we kindly request permission to access
|
||||
your microphone during meetings and events.
|
||||
</p>
|
||||
{featureEnabled("privacy") && (
|
||||
<Privacy buttonText="Privacy policy" />
|
||||
)}
|
||||
</div>
|
||||
</section>
|
||||
<section className="flex flex-col justify-center items-center w-full h-full">
|
||||
{requireLogin && !isAuthenticated ? (
|
||||
<button
|
||||
className="mt-4 bg-blue-400 hover:bg-blue-500 focus-visible:bg-blue-500 text-white font-bold py-2 px-4 rounded"
|
||||
onClick={() => router.push("/login")}
|
||||
>
|
||||
Log in
|
||||
</button>
|
||||
) : (
|
||||
<div className="rounded-xl md:bg-blue-200 md:w-96 p-4 lg:p-6 flex flex-col mb-4 md:mb-10">
|
||||
<h2 className="text-2xl font-bold mt-2 mb-2">Try Reflector</h2>
|
||||
<label className="mb-3">
|
||||
<p>Recording name</p>
|
||||
<div className="select-search-container">
|
||||
<input
|
||||
className="select-search-input"
|
||||
type="text"
|
||||
onChange={nameChange}
|
||||
placeholder="Optional"
|
||||
/>
|
||||
</div>
|
||||
</label>
|
||||
|
||||
<label className="mb-3">
|
||||
<p>Do you want to enable live translation?</p>
|
||||
<SelectSearch
|
||||
search
|
||||
options={supportedLanguages}
|
||||
value={targetLanguage}
|
||||
onChange={onLanguageChange}
|
||||
placeholder="Choose your language"
|
||||
/>
|
||||
</label>
|
||||
|
||||
{loading ? (
|
||||
<p className="">Checking permissions...</p>
|
||||
) : permissionOk ? (
|
||||
<p className=""> Microphone permission granted </p>
|
||||
) : permissionDenied ? (
|
||||
<p className="">
|
||||
Permission to use your microphone was denied, please change
|
||||
the permission setting in your browser and refresh this page.
|
||||
</p>
|
||||
) : (
|
||||
<button
|
||||
className="mt-4 bg-blue-400 hover:bg-blue-500 focus-visible:bg-blue-500 text-white font-bold py-2 px-4 rounded"
|
||||
onClick={requestPermission}
|
||||
disabled={permissionDenied}
|
||||
>
|
||||
Request Microphone Permission
|
||||
</button>
|
||||
)}
|
||||
<button
|
||||
className="mt-4 bg-blue-400 hover:bg-blue-500 focus-visible:bg-blue-500 text-white font-bold py-2 px-4 rounded"
|
||||
onClick={send}
|
||||
disabled={!permissionOk || loadingSend}
|
||||
>
|
||||
{loadingSend ? "Loading..." : "Confirm"}
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</section>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default TranscriptCreate;
|
||||
166
www/app/[domain]/transcripts/player.tsx
Normal file
166
www/app/[domain]/transcripts/player.tsx
Normal file
@@ -0,0 +1,166 @@
|
||||
import React, { useRef, useEffect, useState } from "react";
|
||||
|
||||
import WaveSurfer from "wavesurfer.js";
|
||||
import CustomRegionsPlugin from "../../lib/custom-plugins/regions";
|
||||
|
||||
import { formatTime } from "../../lib/time";
|
||||
import { Topic } from "./webSocketTypes";
|
||||
import { AudioWaveform } from "../../api";
|
||||
import { waveSurferStyles } from "../../styles/recorder";
|
||||
|
||||
type PlayerProps = {
|
||||
topics: Topic[];
|
||||
useActiveTopic: [
|
||||
Topic | null,
|
||||
React.Dispatch<React.SetStateAction<Topic | null>>,
|
||||
];
|
||||
waveform: AudioWaveform;
|
||||
media: HTMLMediaElement;
|
||||
mediaDuration: number;
|
||||
};
|
||||
|
||||
export default function Player(props: PlayerProps) {
|
||||
const waveformRef = useRef<HTMLDivElement>(null);
|
||||
const [wavesurfer, setWavesurfer] = useState<WaveSurfer | null>(null);
|
||||
const [isPlaying, setIsPlaying] = useState<boolean>(false);
|
||||
const [currentTime, setCurrentTime] = useState<number>(0);
|
||||
const [waveRegions, setWaveRegions] = useState<CustomRegionsPlugin | null>(
|
||||
null,
|
||||
);
|
||||
const [activeTopic, setActiveTopic] = props.useActiveTopic;
|
||||
const topicsRef = useRef(props.topics);
|
||||
// Waveform setup
|
||||
useEffect(() => {
|
||||
if (waveformRef.current) {
|
||||
// XXX duration is required to prevent recomputing peaks from audio
|
||||
// However, the current waveform returns only the peaks, and no duration
|
||||
// And the backend does not save duration properly.
|
||||
// So at the moment, we deduct the duration from the topics.
|
||||
// This is not ideal, but it works for now.
|
||||
const _wavesurfer = WaveSurfer.create({
|
||||
container: waveformRef.current,
|
||||
peaks: props.waveform,
|
||||
hideScrollbar: true,
|
||||
autoCenter: true,
|
||||
barWidth: 2,
|
||||
height: "auto",
|
||||
duration: props.mediaDuration,
|
||||
|
||||
...waveSurferStyles.player,
|
||||
});
|
||||
|
||||
// styling
|
||||
const wsWrapper = _wavesurfer.getWrapper();
|
||||
wsWrapper.style.cursor = waveSurferStyles.playerStyle.cursor;
|
||||
wsWrapper.style.backgroundColor =
|
||||
waveSurferStyles.playerStyle.backgroundColor;
|
||||
wsWrapper.style.borderRadius = waveSurferStyles.playerStyle.borderRadius;
|
||||
|
||||
_wavesurfer.on("play", () => {
|
||||
setIsPlaying(true);
|
||||
});
|
||||
_wavesurfer.on("pause", () => {
|
||||
setIsPlaying(false);
|
||||
});
|
||||
_wavesurfer.on("timeupdate", setCurrentTime);
|
||||
|
||||
setWaveRegions(_wavesurfer.registerPlugin(CustomRegionsPlugin.create()));
|
||||
|
||||
_wavesurfer.toggleInteraction(true);
|
||||
|
||||
_wavesurfer.setMediaElement(props.media);
|
||||
|
||||
setWavesurfer(_wavesurfer);
|
||||
|
||||
return () => {
|
||||
_wavesurfer.destroy();
|
||||
setIsPlaying(false);
|
||||
setCurrentTime(0);
|
||||
};
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (!wavesurfer) return;
|
||||
if (!props.media) return;
|
||||
wavesurfer.setMediaElement(props.media);
|
||||
}, [props.media, wavesurfer]);
|
||||
|
||||
useEffect(() => {
|
||||
topicsRef.current = props.topics;
|
||||
renderMarkers();
|
||||
}, [props.topics, waveRegions]);
|
||||
|
||||
const renderMarkers = () => {
|
||||
if (!waveRegions) return;
|
||||
|
||||
waveRegions.clearRegions();
|
||||
|
||||
for (let topic of topicsRef.current) {
|
||||
const content = document.createElement("div");
|
||||
content.setAttribute("style", waveSurferStyles.marker);
|
||||
content.onmouseover = () => {
|
||||
content.style.backgroundColor =
|
||||
waveSurferStyles.markerHover.backgroundColor;
|
||||
content.style.zIndex = "999";
|
||||
content.style.width = "300px";
|
||||
};
|
||||
content.onmouseout = () => {
|
||||
content.setAttribute("style", waveSurferStyles.marker);
|
||||
};
|
||||
content.textContent = topic.title;
|
||||
|
||||
const region = waveRegions.addRegion({
|
||||
start: topic.timestamp,
|
||||
content,
|
||||
color: "f00",
|
||||
drag: false,
|
||||
});
|
||||
region.on("click", (e) => {
|
||||
e.stopPropagation();
|
||||
setActiveTopic(topic);
|
||||
wavesurfer?.setTime(region.start);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (activeTopic) {
|
||||
wavesurfer?.setTime(activeTopic.timestamp);
|
||||
}
|
||||
}, [activeTopic]);
|
||||
|
||||
const handlePlayClick = () => {
|
||||
wavesurfer?.playPause();
|
||||
};
|
||||
|
||||
const timeLabel = () => {
|
||||
if (props.mediaDuration)
|
||||
return `${formatTime(currentTime)}/${formatTime(props.mediaDuration)}`;
|
||||
return "";
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex items-center w-full relative">
|
||||
<div className="flex-grow items-end relative">
|
||||
<div
|
||||
ref={waveformRef}
|
||||
className="flex-grow rounded-lg md:rounded-xl h-20"
|
||||
></div>
|
||||
<div className="absolute right-2 bottom-0">{timeLabel()}</div>
|
||||
</div>
|
||||
|
||||
<button
|
||||
className={`${
|
||||
isPlaying
|
||||
? "bg-orange-400 hover:bg-orange-500 focus-visible:bg-orange-500"
|
||||
: "bg-green-400 hover:bg-green-500 focus-visible:bg-green-500"
|
||||
} text-white ml-2 md:ml:4 md:h-[78px] md:min-w-[100px] text-lg`}
|
||||
id="play-btn"
|
||||
onClick={handlePlayClick}
|
||||
>
|
||||
{isPlaying ? "Pause" : "Play"}
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
357
www/app/[domain]/transcripts/recorder.tsx
Normal file
357
www/app/[domain]/transcripts/recorder.tsx
Normal file
@@ -0,0 +1,357 @@
|
||||
import React, { useRef, useEffect, useState } from "react";
|
||||
|
||||
import WaveSurfer from "wavesurfer.js";
|
||||
import RecordPlugin from "../../lib/custom-plugins/record";
|
||||
import CustomRegionsPlugin from "../../lib/custom-plugins/regions";
|
||||
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
import { faMicrophone } from "@fortawesome/free-solid-svg-icons";
|
||||
|
||||
import { formatTime } from "../../lib/time";
|
||||
import AudioInputsDropdown from "./audioInputsDropdown";
|
||||
import { Option } from "react-dropdown";
|
||||
import { waveSurferStyles } from "../../styles/recorder";
|
||||
import { useError } from "../../(errors)/errorContext";
|
||||
import FileUploadButton from "./fileUploadButton";
|
||||
|
||||
type RecorderProps = {
|
||||
setStream: React.Dispatch<React.SetStateAction<MediaStream | null>>;
|
||||
onStop: () => void;
|
||||
onRecord?: () => void;
|
||||
getAudioStream: (deviceId) => Promise<MediaStream | null>;
|
||||
audioDevices: Option[];
|
||||
transcriptId: string;
|
||||
};
|
||||
|
||||
export default function Recorder(props: RecorderProps) {
|
||||
const waveformRef = useRef<HTMLDivElement>(null);
|
||||
const [wavesurfer, setWavesurfer] = useState<WaveSurfer | null>(null);
|
||||
const [record, setRecord] = useState<RecordPlugin | null>(null);
|
||||
const [isRecording, setIsRecording] = useState<boolean>(false);
|
||||
const [hasRecorded, setHasRecorded] = useState<boolean>(false);
|
||||
const [isPlaying, setIsPlaying] = useState<boolean>(false);
|
||||
const [currentTime, setCurrentTime] = useState<number>(0);
|
||||
const [timeInterval, setTimeInterval] = useState<number | null>(null);
|
||||
const [duration, setDuration] = useState<number>(0);
|
||||
const [waveRegions, setWaveRegions] = useState<CustomRegionsPlugin | null>(
|
||||
null,
|
||||
);
|
||||
const [deviceId, setDeviceId] = useState<string | null>(null);
|
||||
const [recordStarted, setRecordStarted] = useState(false);
|
||||
const [showDevices, setShowDevices] = useState(false);
|
||||
const { setError } = useError();
|
||||
|
||||
// Function used to setup keyboard shortcuts for the streamdeck
|
||||
const setupProjectorKeys = (): (() => void) => {
|
||||
if (!record) return () => {};
|
||||
|
||||
const handleKeyPress = (event: KeyboardEvent) => {
|
||||
switch (event.key) {
|
||||
case "~":
|
||||
location.href = "";
|
||||
break;
|
||||
case ",":
|
||||
location.href = "/transcripts/new";
|
||||
break;
|
||||
case "!":
|
||||
if (record.isRecording()) return;
|
||||
handleRecClick();
|
||||
break;
|
||||
case "@":
|
||||
if (!record.isRecording()) return;
|
||||
handleRecClick();
|
||||
break;
|
||||
case "(":
|
||||
location.href = "/login";
|
||||
break;
|
||||
case ")":
|
||||
location.href = "/logout";
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener("keydown", handleKeyPress);
|
||||
|
||||
// Return the cleanup function
|
||||
return () => {
|
||||
document.removeEventListener("keydown", handleKeyPress);
|
||||
};
|
||||
};
|
||||
|
||||
// Setup Shortcuts
|
||||
useEffect(() => {
|
||||
if (!record) return;
|
||||
|
||||
return setupProjectorKeys();
|
||||
}, [record, deviceId]);
|
||||
|
||||
// Waveform setup
|
||||
useEffect(() => {
|
||||
if (waveformRef.current) {
|
||||
const _wavesurfer = WaveSurfer.create({
|
||||
container: waveformRef.current,
|
||||
hideScrollbar: true,
|
||||
autoCenter: true,
|
||||
barWidth: 2,
|
||||
height: "auto",
|
||||
|
||||
...waveSurferStyles.player,
|
||||
});
|
||||
|
||||
const _wshack: any = _wavesurfer;
|
||||
_wshack.renderer.renderSingleCanvas = () => {};
|
||||
|
||||
// styling
|
||||
const wsWrapper = _wavesurfer.getWrapper();
|
||||
wsWrapper.style.cursor = waveSurferStyles.playerStyle.cursor;
|
||||
wsWrapper.style.backgroundColor =
|
||||
waveSurferStyles.playerStyle.backgroundColor;
|
||||
wsWrapper.style.borderRadius = waveSurferStyles.playerStyle.borderRadius;
|
||||
|
||||
_wavesurfer.on("play", () => {
|
||||
setIsPlaying(true);
|
||||
});
|
||||
_wavesurfer.on("pause", () => {
|
||||
setIsPlaying(false);
|
||||
});
|
||||
_wavesurfer.on("timeupdate", setCurrentTime);
|
||||
|
||||
setRecord(_wavesurfer.registerPlugin(RecordPlugin.create()));
|
||||
setWaveRegions(_wavesurfer.registerPlugin(CustomRegionsPlugin.create()));
|
||||
|
||||
setWavesurfer(_wavesurfer);
|
||||
|
||||
return () => {
|
||||
_wavesurfer.destroy();
|
||||
setIsRecording(false);
|
||||
setIsPlaying(false);
|
||||
setCurrentTime(0);
|
||||
};
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (isRecording) {
|
||||
const interval = window.setInterval(() => {
|
||||
setCurrentTime((prev) => prev + 1);
|
||||
}, 1000);
|
||||
setTimeInterval(interval);
|
||||
return () => clearInterval(interval);
|
||||
} else {
|
||||
clearInterval(timeInterval as number);
|
||||
setCurrentTime((prev) => {
|
||||
setDuration(prev);
|
||||
return 0;
|
||||
});
|
||||
}
|
||||
}, [isRecording]);
|
||||
|
||||
const handleRecClick = async () => {
|
||||
if (!record) return console.log("no record");
|
||||
|
||||
if (record.isRecording()) {
|
||||
if (props.onStop) props.onStop();
|
||||
record.stopRecording();
|
||||
if (screenMediaStream) {
|
||||
screenMediaStream.getTracks().forEach((t) => t.stop());
|
||||
}
|
||||
setIsRecording(false);
|
||||
setHasRecorded(true);
|
||||
setScreenMediaStream(null);
|
||||
setDestinationStream(null);
|
||||
} else {
|
||||
if (props.onRecord) props.onRecord();
|
||||
const stream = await getCurrentStream();
|
||||
|
||||
if (props.setStream) props.setStream(stream);
|
||||
if (stream) {
|
||||
await record.startRecording(stream);
|
||||
setIsRecording(true);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const [screenMediaStream, setScreenMediaStream] =
|
||||
useState<MediaStream | null>(null);
|
||||
|
||||
const handleRecordTabClick = async () => {
|
||||
if (!record) return console.log("no record");
|
||||
const stream: MediaStream = await navigator.mediaDevices.getDisplayMedia({
|
||||
video: true,
|
||||
audio: {
|
||||
echoCancellation: true,
|
||||
noiseSuppression: true,
|
||||
sampleRate: 44100,
|
||||
},
|
||||
});
|
||||
|
||||
if (stream.getAudioTracks().length == 0) {
|
||||
setError(new Error("No audio track found in screen recording."));
|
||||
return;
|
||||
}
|
||||
setScreenMediaStream(stream);
|
||||
};
|
||||
|
||||
const [destinationStream, setDestinationStream] =
|
||||
useState<MediaStream | null>(null);
|
||||
|
||||
const startTabRecording = async () => {
|
||||
if (!screenMediaStream) return;
|
||||
if (!record) return;
|
||||
if (destinationStream !== null) return console.log("already recording");
|
||||
|
||||
// connect mic audio (microphone)
|
||||
const micStream = await getCurrentStream();
|
||||
if (!micStream) {
|
||||
console.log("no microphone audio");
|
||||
return;
|
||||
}
|
||||
|
||||
// Create MediaStreamSource nodes for the microphone and tab
|
||||
const audioContext = new AudioContext();
|
||||
const micSource = audioContext.createMediaStreamSource(micStream);
|
||||
const tabSource = audioContext.createMediaStreamSource(screenMediaStream);
|
||||
|
||||
// Merge channels
|
||||
// XXX If the length is not the same, we do not receive audio in WebRTC.
|
||||
// So for now, merge the channels to have only one stereo source
|
||||
const channelMerger = audioContext.createChannelMerger(1);
|
||||
micSource.connect(channelMerger, 0, 0);
|
||||
tabSource.connect(channelMerger, 0, 0);
|
||||
|
||||
// Create a MediaStreamDestination node
|
||||
const destination = audioContext.createMediaStreamDestination();
|
||||
channelMerger.connect(destination);
|
||||
|
||||
// Use the destination's stream for the WebRTC connection
|
||||
setDestinationStream(destination.stream);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (!record) return;
|
||||
if (!destinationStream) return;
|
||||
if (props.setStream) props.setStream(destinationStream);
|
||||
if (destinationStream) {
|
||||
record.startRecording(destinationStream);
|
||||
setIsRecording(true);
|
||||
}
|
||||
}, [record, destinationStream]);
|
||||
|
||||
useEffect(() => {
|
||||
startTabRecording();
|
||||
}, [record, screenMediaStream]);
|
||||
|
||||
const handlePlayClick = () => {
|
||||
wavesurfer?.playPause();
|
||||
};
|
||||
|
||||
const timeLabel = () => {
|
||||
if (isRecording) return formatTime(currentTime);
|
||||
if (duration) return `${formatTime(currentTime)}/${formatTime(duration)}`;
|
||||
return "";
|
||||
};
|
||||
|
||||
const getCurrentStream = async () => {
|
||||
setRecordStarted(true);
|
||||
return deviceId && props.getAudioStream
|
||||
? await props.getAudioStream(deviceId)
|
||||
: null;
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (props.audioDevices && props.audioDevices.length > 0) {
|
||||
setDeviceId(props.audioDevices[0].value);
|
||||
}
|
||||
}, [props.audioDevices]);
|
||||
|
||||
return (
|
||||
<div className="flex items-center w-full relative">
|
||||
<div className="flex-grow items-end relative">
|
||||
<div
|
||||
ref={waveformRef}
|
||||
className="flex-grow rounded-lg md:rounded-xl h-20"
|
||||
></div>
|
||||
<div className="absolute right-2 bottom-0">
|
||||
{isRecording && (
|
||||
<div className="inline-block bg-red-500 rounded-full w-2 h-2 my-auto mr-1 animate-ping"></div>
|
||||
)}
|
||||
{timeLabel()}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{hasRecorded && (
|
||||
<>
|
||||
<button
|
||||
className={`${
|
||||
isPlaying
|
||||
? "bg-orange-400 hover:bg-orange-500 focus-visible:bg-orange-500"
|
||||
: "bg-green-400 hover:bg-green-500 focus-visible:bg-green-500"
|
||||
} text-white ml-2 md:ml:4 md:h-[78px] md:min-w-[100px] text-lg`}
|
||||
id="play-btn"
|
||||
onClick={handlePlayClick}
|
||||
>
|
||||
{isPlaying ? "Pause" : "Play"}
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
{!hasRecorded && (
|
||||
<>
|
||||
<button
|
||||
className={`${
|
||||
isRecording
|
||||
? "bg-red-400 hover:bg-red-500 focus-visible:bg-red-500"
|
||||
: "bg-blue-400 hover:bg-blue-500 focus-visible:bg-blue-500"
|
||||
} text-white ml-2 md:ml:4 md:h-[78px] md:min-w-[100px] text-lg`}
|
||||
onClick={handleRecClick}
|
||||
disabled={isPlaying}
|
||||
>
|
||||
{isRecording ? "Stop" : "Record"}
|
||||
</button>
|
||||
|
||||
<FileUploadButton
|
||||
transcriptId={props.transcriptId}
|
||||
></FileUploadButton>
|
||||
|
||||
{!isRecording && (
|
||||
<button
|
||||
className={`${
|
||||
isRecording
|
||||
? "bg-red-400 hover:bg-red-500 focus-visible:bg-red-500"
|
||||
: "bg-blue-400 hover:bg-blue-500 focus-visible:bg-blue-500"
|
||||
} text-white ml-2 md:ml:4 md:h-[78px] md:min-w-[100px] text-lg`}
|
||||
onClick={handleRecordTabClick}
|
||||
>
|
||||
Record
|
||||
<br />a tab
|
||||
</button>
|
||||
)}
|
||||
{props.audioDevices && props.audioDevices?.length > 0 && deviceId && (
|
||||
<>
|
||||
<button
|
||||
className="text-center text-blue-400 hover:text-blue-700 ml-2 md:ml:4 p-2 rounded-lg focus-visible:outline outline-blue-400"
|
||||
onClick={() => setShowDevices((prev) => !prev)}
|
||||
>
|
||||
<FontAwesomeIcon icon={faMicrophone} className="h-5 w-auto" />
|
||||
</button>
|
||||
<div
|
||||
className={`absolute z-20 bottom-[-1rem] right-0 bg-white rounded ${
|
||||
showDevices ? "visible" : "invisible"
|
||||
}`}
|
||||
>
|
||||
<AudioInputsDropdown
|
||||
setDeviceId={setDeviceId}
|
||||
audioDevices={props.audioDevices}
|
||||
disabled={recordStarted}
|
||||
hide={() => setShowDevices(false)}
|
||||
deviceId={deviceId}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
23
www/app/[domain]/transcripts/scrollToBottom.tsx
Normal file
23
www/app/[domain]/transcripts/scrollToBottom.tsx
Normal file
@@ -0,0 +1,23 @@
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
import { faArrowDown } from "@fortawesome/free-solid-svg-icons";
|
||||
|
||||
type ScrollToBottomProps = {
|
||||
visible: boolean;
|
||||
handleScrollBottom: () => void;
|
||||
};
|
||||
|
||||
export default function ScrollToBottom(props: ScrollToBottomProps) {
|
||||
return (
|
||||
<div
|
||||
className={`absolute bottom-0 right-[0.15rem] md:right-[0.65rem] ${
|
||||
props.visible ? "flex" : "hidden"
|
||||
} text-2xl cursor-pointer opacity-70 hover:opacity-100 transition-opacity duration-200 text-blue-400`}
|
||||
onClick={() => {
|
||||
props.handleScrollBottom();
|
||||
return false;
|
||||
}}
|
||||
>
|
||||
<FontAwesomeIcon icon={faArrowDown} className="animate-bounce" />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
153
www/app/[domain]/transcripts/shareLink.tsx
Normal file
153
www/app/[domain]/transcripts/shareLink.tsx
Normal file
@@ -0,0 +1,153 @@
|
||||
import React, { useState, useRef, useEffect, use } from "react";
|
||||
import { featureEnabled } from "../domainContext";
|
||||
import { useFiefUserinfo } from "@fief/fief/nextjs/react";
|
||||
import SelectSearch from "react-select-search";
|
||||
import "react-select-search/style.css";
|
||||
import "../../styles/button.css";
|
||||
import "../../styles/form.scss";
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
import { faSpinner } from "@fortawesome/free-solid-svg-icons";
|
||||
import { UpdateTranscript } from "../../api";
|
||||
import { ShareMode, toShareMode } from "../../lib/shareMode";
|
||||
import useApi from "../../lib/useApi";
|
||||
type ShareLinkProps = {
|
||||
transcriptId: string;
|
||||
userId: string | null;
|
||||
shareMode: ShareMode;
|
||||
};
|
||||
|
||||
const ShareLink = (props: ShareLinkProps) => {
|
||||
const [isCopied, setIsCopied] = useState(false);
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
const [currentUrl, setCurrentUrl] = useState<string>("");
|
||||
const requireLogin = featureEnabled("requireLogin");
|
||||
const [isOwner, setIsOwner] = useState(false);
|
||||
const [shareMode, setShareMode] = useState<ShareMode>(props.shareMode);
|
||||
const [shareLoading, setShareLoading] = useState(false);
|
||||
const userinfo = useFiefUserinfo();
|
||||
const api = useApi();
|
||||
|
||||
useEffect(() => {
|
||||
setCurrentUrl(window.location.href);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
setIsOwner(!!(requireLogin && userinfo?.sub === props.userId));
|
||||
}, [userinfo, props.userId]);
|
||||
|
||||
const handleCopyClick = () => {
|
||||
if (inputRef.current) {
|
||||
let text_to_copy = inputRef.current.value;
|
||||
|
||||
text_to_copy &&
|
||||
navigator.clipboard.writeText(text_to_copy).then(() => {
|
||||
setIsCopied(true);
|
||||
// Reset the copied state after 2 seconds
|
||||
setTimeout(() => setIsCopied(false), 2000);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const updateShareMode = async (selectedShareMode: string) => {
|
||||
if (!api)
|
||||
throw new Error("ShareLink's API should always be ready at this point");
|
||||
|
||||
setShareLoading(true);
|
||||
const requestBody: UpdateTranscript = {
|
||||
share_mode: toShareMode(selectedShareMode),
|
||||
};
|
||||
|
||||
const updatedTranscript = await api.v1TranscriptUpdate(
|
||||
props.transcriptId,
|
||||
requestBody,
|
||||
);
|
||||
setShareMode(toShareMode(updatedTranscript.share_mode));
|
||||
setShareLoading(false);
|
||||
};
|
||||
const privacyEnabled = featureEnabled("privacy");
|
||||
|
||||
return (
|
||||
<div
|
||||
className="p-2 md:p-4 rounded"
|
||||
style={{ background: "rgba(96, 165, 250, 0.2)" }}
|
||||
>
|
||||
{requireLogin && (
|
||||
<div className="text-sm mb-2">
|
||||
{shareMode === "private" && (
|
||||
<p>This transcript is private and can only be accessed by you.</p>
|
||||
)}
|
||||
{shareMode === "semi-private" && (
|
||||
<p>
|
||||
This transcript is secure. Only authenticated users can access it.
|
||||
</p>
|
||||
)}
|
||||
{shareMode === "public" && (
|
||||
<p>This transcript is public. Everyone can access it.</p>
|
||||
)}
|
||||
|
||||
{isOwner && api && (
|
||||
<div className="relative">
|
||||
<SelectSearch
|
||||
className="select-search--top select-search"
|
||||
options={[
|
||||
{ name: "Private", value: "private" },
|
||||
{ name: "Secure", value: "semi-private" },
|
||||
{ name: "Public", value: "public" },
|
||||
]}
|
||||
value={shareMode?.toString()}
|
||||
onChange={updateShareMode}
|
||||
closeOnSelect={true}
|
||||
/>
|
||||
{shareLoading && (
|
||||
<div className="h-4 w-4 absolute top-1/3 right-3 z-10">
|
||||
<FontAwesomeIcon
|
||||
icon={faSpinner}
|
||||
className="animate-spin-slow text-gray-600 flex-grow rounded-lg md:rounded-xl h-4 w-4"
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{!requireLogin && (
|
||||
<>
|
||||
{privacyEnabled ? (
|
||||
<p className="text-sm mb-2">
|
||||
Share this link to grant others access to this page. The link
|
||||
includes the full audio recording and is valid for the next 7
|
||||
days.
|
||||
</p>
|
||||
) : (
|
||||
<p className="text-sm mb-2">
|
||||
Share this link to allow others to view this page and listen to
|
||||
the full audio recording.
|
||||
</p>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
<div className="flex items-center">
|
||||
<input
|
||||
type="text"
|
||||
readOnly
|
||||
value={currentUrl}
|
||||
ref={inputRef}
|
||||
onChange={() => {}}
|
||||
className="border rounded-lg md:rounded-xl p-2 flex-grow flex-shrink overflow-auto mr-2 text-sm bg-slate-100 outline-slate-400"
|
||||
/>
|
||||
<button
|
||||
onClick={handleCopyClick}
|
||||
className={
|
||||
(isCopied ? "bg-blue-500" : "bg-blue-400") +
|
||||
" hover:bg-blue-500 focus-visible:bg-blue-500 text-white rounded p-2"
|
||||
}
|
||||
style={{ minHeight: "38px" }}
|
||||
>
|
||||
{isCopied ? "Copied!" : "Copy"}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default ShareLink;
|
||||
153
www/app/[domain]/transcripts/topicList.tsx
Normal file
153
www/app/[domain]/transcripts/topicList.tsx
Normal file
@@ -0,0 +1,153 @@
|
||||
import React, { useState, useEffect } from "react";
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
import {
|
||||
faChevronRight,
|
||||
faChevronDown,
|
||||
} from "@fortawesome/free-solid-svg-icons";
|
||||
import { formatTime } from "../../lib/time";
|
||||
import ScrollToBottom from "./scrollToBottom";
|
||||
import { Topic } from "./webSocketTypes";
|
||||
import { generateHighContrastColor } from "../../lib/utils";
|
||||
|
||||
type TopicListProps = {
|
||||
topics: Topic[];
|
||||
useActiveTopic: [
|
||||
Topic | null,
|
||||
React.Dispatch<React.SetStateAction<Topic | null>>,
|
||||
];
|
||||
autoscroll: boolean;
|
||||
};
|
||||
|
||||
export function TopicList({
|
||||
topics,
|
||||
useActiveTopic,
|
||||
autoscroll,
|
||||
}: TopicListProps) {
|
||||
const [activeTopic, setActiveTopic] = useActiveTopic;
|
||||
const [autoscrollEnabled, setAutoscrollEnabled] = useState<boolean>(true);
|
||||
|
||||
useEffect(() => {
|
||||
if (autoscroll && autoscrollEnabled) scrollToBottom();
|
||||
}, [topics.length]);
|
||||
|
||||
const scrollToBottom = () => {
|
||||
const topicsDiv = document.getElementById("topics-div");
|
||||
|
||||
if (topicsDiv) topicsDiv.scrollTop = topicsDiv.scrollHeight;
|
||||
};
|
||||
|
||||
// scroll top is not rounded, heights are, so exact match won't work.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Element/scrollHeight#determine_if_an_element_has_been_totally_scrolled
|
||||
const toggleScroll = (element) => {
|
||||
const bottom =
|
||||
Math.abs(
|
||||
element.scrollHeight - element.clientHeight - element.scrollTop,
|
||||
) < 2 || element.scrollHeight == element.clientHeight;
|
||||
if (!bottom && autoscrollEnabled) {
|
||||
setAutoscrollEnabled(false);
|
||||
} else if (bottom && !autoscrollEnabled) {
|
||||
setAutoscrollEnabled(true);
|
||||
}
|
||||
};
|
||||
const handleScroll = (e) => {
|
||||
toggleScroll(e.target);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (autoscroll) {
|
||||
const topicsDiv = document.getElementById("topics-div");
|
||||
|
||||
topicsDiv && toggleScroll(topicsDiv);
|
||||
}
|
||||
}, [activeTopic, autoscroll]);
|
||||
|
||||
return (
|
||||
<section className="relative w-full h-full bg-blue-400/20 rounded-lg md:rounded-xl p-1 sm:p-2 md:px-4 flex flex-col justify-center align-center">
|
||||
{topics.length > 0 ? (
|
||||
<>
|
||||
<h2 className="ml-2 md:text-lg font-bold mb-2">Topics</h2>
|
||||
|
||||
{autoscroll && (
|
||||
<ScrollToBottom
|
||||
visible={!autoscrollEnabled}
|
||||
handleScrollBottom={scrollToBottom}
|
||||
/>
|
||||
)}
|
||||
|
||||
<div
|
||||
id="topics-div"
|
||||
className="overflow-y-auto h-full"
|
||||
onScroll={handleScroll}
|
||||
>
|
||||
{topics.map((topic, index) => (
|
||||
<button
|
||||
key={index}
|
||||
className="rounded-none border-solid border-0 border-bluegrey border-b last:border-none last:rounded-b-lg p-2 hover:bg-blue-400/20 focus-visible:bg-blue-400/20 text-left block w-full"
|
||||
onClick={() =>
|
||||
setActiveTopic(activeTopic?.id == topic.id ? null : topic)
|
||||
}
|
||||
>
|
||||
<div className="w-full flex justify-between items-center rounded-lg md:rounded-xl xs:text-base sm:text-lg md:text-xl font-bold leading-tight">
|
||||
<p>
|
||||
<span className="font-light font-mono text-slate-500 text-base md:text-lg">
|
||||
[{formatTime(topic.timestamp)}]
|
||||
</span>
|
||||
<span>{topic.title}</span>
|
||||
</p>
|
||||
<FontAwesomeIcon
|
||||
className="transform transition-transform duration-200 ml-2"
|
||||
icon={
|
||||
activeTopic?.id == topic.id
|
||||
? faChevronDown
|
||||
: faChevronRight
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
{activeTopic?.id == topic.id && (
|
||||
<div className="p-2">
|
||||
{topic.segments ? (
|
||||
<>
|
||||
{topic.segments.map((segment, index: number) => (
|
||||
<p
|
||||
key={index}
|
||||
className="text-left text-slate-500 text-sm md:text-base"
|
||||
>
|
||||
<span className="font-mono text-slate-500">
|
||||
[{formatTime(segment.start)}]
|
||||
</span>
|
||||
<span
|
||||
className="font-bold text-slate-500"
|
||||
style={{
|
||||
color: generateHighContrastColor(
|
||||
`Speaker ${segment.speaker}`,
|
||||
[96, 165, 250],
|
||||
),
|
||||
}}
|
||||
>
|
||||
{" "}
|
||||
(Speaker {segment.speaker}):
|
||||
</span>{" "}
|
||||
<span>{segment.text}</span>
|
||||
</p>
|
||||
))}
|
||||
</>
|
||||
) : (
|
||||
<>{topic.transcript}</>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
</>
|
||||
) : (
|
||||
<div className="text-center text-gray-500">
|
||||
Discussion topics will appear here after you start recording.
|
||||
<br />
|
||||
It may take up to 5 minutes of conversation for the first topic to
|
||||
appear.
|
||||
</div>
|
||||
)}
|
||||
</section>
|
||||
);
|
||||
}
|
||||
86
www/app/[domain]/transcripts/transcriptTitle.tsx
Normal file
86
www/app/[domain]/transcripts/transcriptTitle.tsx
Normal file
@@ -0,0 +1,86 @@
|
||||
import { useState } from "react";
|
||||
import { UpdateTranscript } from "../../api";
|
||||
import useApi from "../../lib/useApi";
|
||||
|
||||
type TranscriptTitle = {
|
||||
title: string;
|
||||
transcriptId: string;
|
||||
};
|
||||
|
||||
const TranscriptTitle = (props: TranscriptTitle) => {
|
||||
const [displayedTitle, setDisplayedTitle] = useState(props.title);
|
||||
const [preEditTitle, setPreEditTitle] = useState(props.title);
|
||||
const [isEditing, setIsEditing] = useState(false);
|
||||
const api = useApi();
|
||||
|
||||
const updateTitle = async (newTitle: string, transcriptId: string) => {
|
||||
if (!api) return;
|
||||
try {
|
||||
const requestBody: UpdateTranscript = {
|
||||
title: newTitle,
|
||||
};
|
||||
const api = useApi();
|
||||
const updatedTranscript = await api?.v1TranscriptUpdate(
|
||||
transcriptId,
|
||||
requestBody,
|
||||
);
|
||||
console.log("Updated transcript:", updatedTranscript);
|
||||
} catch (err) {
|
||||
console.error("Failed to update transcript:", err);
|
||||
}
|
||||
};
|
||||
|
||||
const handleTitleClick = () => {
|
||||
const isMobile = /iPhone|iPad|iPod|Android/i.test(navigator.userAgent);
|
||||
|
||||
if (isMobile) {
|
||||
// Use prompt
|
||||
const newTitle = prompt("Please enter the new title:", displayedTitle);
|
||||
if (newTitle !== null) {
|
||||
setDisplayedTitle(newTitle);
|
||||
updateTitle(newTitle, props.transcriptId);
|
||||
}
|
||||
} else {
|
||||
setPreEditTitle(displayedTitle);
|
||||
setIsEditing(true);
|
||||
}
|
||||
};
|
||||
|
||||
const handleChange = (e) => {
|
||||
setDisplayedTitle(e.target.value);
|
||||
};
|
||||
|
||||
const handleKeyDown = (e) => {
|
||||
if (e.key === "Enter") {
|
||||
updateTitle(displayedTitle, props.transcriptId);
|
||||
setIsEditing(false);
|
||||
} else if (e.key === "Escape") {
|
||||
setDisplayedTitle(preEditTitle);
|
||||
setIsEditing(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
{isEditing ? (
|
||||
<input
|
||||
type="text"
|
||||
value={displayedTitle}
|
||||
onChange={handleChange}
|
||||
onKeyDown={handleKeyDown}
|
||||
autoFocus
|
||||
className="text-2xl lg:text-4xl font-extrabold text-center mb-4 w-full border-none bg-transparent overflow-hidden h-[fit-content]"
|
||||
/>
|
||||
) : (
|
||||
<h2
|
||||
className="text-2xl lg:text-4xl font-extrabold text-center mb-4 cursor-pointer"
|
||||
onClick={handleTitleClick}
|
||||
>
|
||||
{displayedTitle}
|
||||
</h2>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default TranscriptTitle;
|
||||
130
www/app/[domain]/transcripts/useAudioDevice.ts
Normal file
130
www/app/[domain]/transcripts/useAudioDevice.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
import { useEffect, useState } from "react";
|
||||
|
||||
import { Option } from "react-dropdown";
|
||||
|
||||
const MIC_QUERY = { name: "microphone" as PermissionName };
|
||||
|
||||
const useAudioDevice = () => {
|
||||
const [permissionOk, setPermissionOk] = useState<boolean>(false);
|
||||
const [permissionDenied, setPermissionDenied] = useState<boolean>(false);
|
||||
const [audioDevices, setAudioDevices] = useState<Option[]>([]);
|
||||
const [loading, setLoading] = useState(true);
|
||||
|
||||
useEffect(() => {
|
||||
checkPermission();
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (permissionOk) {
|
||||
updateDevices();
|
||||
}
|
||||
}, [permissionOk]);
|
||||
|
||||
const checkPermission = (): void => {
|
||||
if (navigator.userAgent.includes("Firefox")) {
|
||||
navigator.mediaDevices
|
||||
.getUserMedia({ audio: true, video: false })
|
||||
.then((stream) => {
|
||||
setPermissionOk(true);
|
||||
setPermissionDenied(false);
|
||||
})
|
||||
.catch((e) => {
|
||||
setPermissionOk(false);
|
||||
setPermissionDenied(false);
|
||||
})
|
||||
.finally(() => setLoading(false));
|
||||
return;
|
||||
}
|
||||
|
||||
navigator.permissions
|
||||
.query(MIC_QUERY)
|
||||
.then((permissionStatus) => {
|
||||
setPermissionOk(permissionStatus.state === "granted");
|
||||
setPermissionDenied(permissionStatus.state === "denied");
|
||||
permissionStatus.onchange = () => {
|
||||
setPermissionOk(permissionStatus.state === "granted");
|
||||
setPermissionDenied(permissionStatus.state === "denied");
|
||||
};
|
||||
})
|
||||
.catch(() => {
|
||||
setPermissionOk(false);
|
||||
setPermissionDenied(false);
|
||||
})
|
||||
.finally(() => {
|
||||
setLoading(false);
|
||||
});
|
||||
};
|
||||
|
||||
const requestPermission = () => {
|
||||
navigator.mediaDevices
|
||||
.getUserMedia({
|
||||
audio: true,
|
||||
})
|
||||
.then((stream) => {
|
||||
if (!navigator.userAgent.includes("Firefox"))
|
||||
stream.getTracks().forEach((track) => track.stop());
|
||||
setPermissionOk(true);
|
||||
})
|
||||
.catch(() => {
|
||||
setPermissionDenied(true);
|
||||
setPermissionOk(false);
|
||||
})
|
||||
.finally(() => {
|
||||
setLoading(false);
|
||||
});
|
||||
};
|
||||
|
||||
const getAudioStream = async (
|
||||
deviceId: string,
|
||||
): Promise<MediaStream | null> => {
|
||||
try {
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
|
||||
const noiseSuppression = urlParams.get("noiseSuppression") === "true";
|
||||
const echoCancellation = urlParams.get("echoCancellation") === "true";
|
||||
|
||||
console.debug(
|
||||
"noiseSuppression",
|
||||
noiseSuppression,
|
||||
"echoCancellation",
|
||||
echoCancellation,
|
||||
);
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: {
|
||||
deviceId,
|
||||
noiseSuppression,
|
||||
echoCancellation,
|
||||
},
|
||||
});
|
||||
return stream;
|
||||
} catch (e) {
|
||||
setPermissionOk(false);
|
||||
setAudioDevices([]);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const updateDevices = async (): Promise<void> => {
|
||||
const devices = await navigator.mediaDevices.enumerateDevices();
|
||||
const _audioDevices = devices
|
||||
.filter(
|
||||
(d) => d.kind === "audioinput" && d.deviceId != "" && d.label != "",
|
||||
)
|
||||
.map((d) => ({ value: d.deviceId, label: d.label }));
|
||||
|
||||
setPermissionOk(_audioDevices.length > 0);
|
||||
setAudioDevices(_audioDevices);
|
||||
};
|
||||
|
||||
return {
|
||||
loading,
|
||||
permissionOk,
|
||||
permissionDenied,
|
||||
audioDevices,
|
||||
getAudioStream,
|
||||
requestPermission,
|
||||
};
|
||||
};
|
||||
|
||||
export default useAudioDevice;
|
||||
64
www/app/[domain]/transcripts/useMp3.ts
Normal file
64
www/app/[domain]/transcripts/useMp3.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import { useContext, useEffect, useState } from "react";
|
||||
import { DomainContext } from "../domainContext";
|
||||
import getApi from "../../lib/useApi";
|
||||
import { useFiefAccessTokenInfo } from "@fief/fief/build/esm/nextjs/react";
|
||||
|
||||
export type Mp3Response = {
|
||||
media: HTMLMediaElement | null;
|
||||
loading: boolean;
|
||||
getNow: () => void;
|
||||
};
|
||||
|
||||
const useMp3 = (id: string, waiting?: boolean): Mp3Response => {
|
||||
const [media, setMedia] = useState<HTMLMediaElement | null>(null);
|
||||
const [later, setLater] = useState(waiting);
|
||||
const [loading, setLoading] = useState<boolean>(false);
|
||||
const api = getApi();
|
||||
const { api_url } = useContext(DomainContext);
|
||||
const accessTokenInfo = useFiefAccessTokenInfo();
|
||||
const [serviceWorker, setServiceWorker] =
|
||||
useState<ServiceWorkerRegistration | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if ("serviceWorker" in navigator) {
|
||||
navigator.serviceWorker.register("/service-worker.js").then((worker) => {
|
||||
setServiceWorker(worker);
|
||||
});
|
||||
}
|
||||
return () => {
|
||||
serviceWorker?.unregister();
|
||||
};
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (!navigator.serviceWorker) return;
|
||||
if (!navigator.serviceWorker.controller) return;
|
||||
if (!serviceWorker) return;
|
||||
// Send the token to the service worker
|
||||
navigator.serviceWorker.controller.postMessage({
|
||||
type: "SET_AUTH_TOKEN",
|
||||
token: accessTokenInfo?.access_token,
|
||||
});
|
||||
}, [navigator.serviceWorker, !serviceWorker, accessTokenInfo]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!id || !api || later) return;
|
||||
|
||||
// createa a audio element and set the source
|
||||
setLoading(true);
|
||||
const audioElement = document.createElement("audio");
|
||||
audioElement.src = `${api_url}/v1/transcripts/${id}/audio/mp3`;
|
||||
audioElement.crossOrigin = "anonymous";
|
||||
audioElement.preload = "auto";
|
||||
setMedia(audioElement);
|
||||
setLoading(false);
|
||||
}, [id, api, later]);
|
||||
|
||||
const getNow = () => {
|
||||
setLater(false);
|
||||
};
|
||||
|
||||
return { media, loading, getNow };
|
||||
};
|
||||
|
||||
export default useMp3;
|
||||
45
www/app/[domain]/transcripts/useTopics.ts
Normal file
45
www/app/[domain]/transcripts/useTopics.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import { useError } from "../../(errors)/errorContext";
|
||||
import { Topic } from "./webSocketTypes";
|
||||
import useApi from "../../lib/useApi";
|
||||
import { shouldShowError } from "../../lib/errorUtils";
|
||||
|
||||
type TranscriptTopics = {
|
||||
topics: Topic[] | null;
|
||||
loading: boolean;
|
||||
error: Error | null;
|
||||
};
|
||||
|
||||
const useTopics = (id: string): TranscriptTopics => {
|
||||
const [topics, setTopics] = useState<Topic[] | null>(null);
|
||||
const [loading, setLoading] = useState<boolean>(false);
|
||||
const [error, setErrorState] = useState<Error | null>(null);
|
||||
const { setError } = useError();
|
||||
const api = useApi();
|
||||
|
||||
useEffect(() => {
|
||||
if (!id || !api) return;
|
||||
|
||||
setLoading(true);
|
||||
api
|
||||
.v1TranscriptGetTopics(id)
|
||||
.then((result) => {
|
||||
setTopics(result);
|
||||
setLoading(false);
|
||||
console.debug("Transcript topics loaded:", result);
|
||||
})
|
||||
.catch((err) => {
|
||||
setErrorState(err);
|
||||
const shouldShowHuman = shouldShowError(err);
|
||||
if (shouldShowHuman) {
|
||||
setError(err, "There was an error loading the topics");
|
||||
} else {
|
||||
setError(err);
|
||||
}
|
||||
});
|
||||
}, [id, api]);
|
||||
|
||||
return { topics, loading, error };
|
||||
};
|
||||
|
||||
export default useTopics;
|
||||
63
www/app/[domain]/transcripts/useTranscript.ts
Normal file
63
www/app/[domain]/transcripts/useTranscript.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import { GetTranscript } from "../../api";
|
||||
import { useError } from "../../(errors)/errorContext";
|
||||
import { shouldShowError } from "../../lib/errorUtils";
|
||||
import useApi from "../../lib/useApi";
|
||||
|
||||
type ErrorTranscript = {
|
||||
error: Error;
|
||||
loading: false;
|
||||
response: any;
|
||||
};
|
||||
|
||||
type LoadingTranscript = {
|
||||
response: any;
|
||||
loading: true;
|
||||
error: false;
|
||||
};
|
||||
|
||||
type SuccessTranscript = {
|
||||
response: GetTranscript;
|
||||
loading: false;
|
||||
error: null;
|
||||
};
|
||||
|
||||
const useTranscript = (
|
||||
id: string | null,
|
||||
): ErrorTranscript | LoadingTranscript | SuccessTranscript => {
|
||||
const [response, setResponse] = useState<GetTranscript | null>(null);
|
||||
const [loading, setLoading] = useState<boolean>(true);
|
||||
const [error, setErrorState] = useState<Error | null>(null);
|
||||
const { setError } = useError();
|
||||
const api = useApi();
|
||||
|
||||
useEffect(() => {
|
||||
if (!id || !api) return;
|
||||
|
||||
setLoading(true);
|
||||
|
||||
api
|
||||
.v1TranscriptGet(id)
|
||||
.then((result) => {
|
||||
setResponse(result);
|
||||
setLoading(false);
|
||||
console.debug("Transcript Loaded:", result);
|
||||
})
|
||||
.catch((error) => {
|
||||
const shouldShowHuman = shouldShowError(error);
|
||||
if (shouldShowHuman) {
|
||||
setError(error, "There was an error loading the transcript");
|
||||
} else {
|
||||
setError(error);
|
||||
}
|
||||
setErrorState(error);
|
||||
});
|
||||
}, [id, !api]);
|
||||
|
||||
return { response, loading, error } as
|
||||
| ErrorTranscript
|
||||
| LoadingTranscript
|
||||
| SuccessTranscript;
|
||||
};
|
||||
|
||||
export default useTranscript;
|
||||
40
www/app/[domain]/transcripts/useTranscriptList.ts
Normal file
40
www/app/[domain]/transcripts/useTranscriptList.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import { useError } from "../../(errors)/errorContext";
|
||||
import useApi from "../../lib/useApi";
|
||||
import { Page_GetTranscript_ } from "../../api";
|
||||
|
||||
type TranscriptList = {
|
||||
response: Page_GetTranscript_ | null;
|
||||
loading: boolean;
|
||||
error: Error | null;
|
||||
};
|
||||
|
||||
//always protected
|
||||
const useTranscriptList = (page: number): TranscriptList => {
|
||||
const [response, setResponse] = useState<Page_GetTranscript_ | null>(null);
|
||||
const [loading, setLoading] = useState<boolean>(true);
|
||||
const [error, setErrorState] = useState<Error | null>(null);
|
||||
const { setError } = useError();
|
||||
const api = useApi();
|
||||
|
||||
useEffect(() => {
|
||||
if (!api) return;
|
||||
setLoading(true);
|
||||
api
|
||||
.v1TranscriptsList(page)
|
||||
.then((response) => {
|
||||
setResponse(response);
|
||||
setLoading(false);
|
||||
})
|
||||
.catch((err) => {
|
||||
setResponse(null);
|
||||
setLoading(false);
|
||||
setError(err);
|
||||
setErrorState(err);
|
||||
});
|
||||
}, [!api, page]);
|
||||
|
||||
return { response, loading, error };
|
||||
};
|
||||
|
||||
export default useTranscriptList;
|
||||
44
www/app/[domain]/transcripts/useWaveform.ts
Normal file
44
www/app/[domain]/transcripts/useWaveform.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import { AudioWaveform } from "../../api";
|
||||
import { useError } from "../../(errors)/errorContext";
|
||||
import useApi from "../../lib/useApi";
|
||||
import { shouldShowError } from "../../lib/errorUtils";
|
||||
|
||||
type AudioWaveFormResponse = {
|
||||
waveform: AudioWaveform | null;
|
||||
loading: boolean;
|
||||
error: Error | null;
|
||||
};
|
||||
|
||||
const useWaveform = (id: string): AudioWaveFormResponse => {
|
||||
const [waveform, setWaveform] = useState<AudioWaveform | null>(null);
|
||||
const [loading, setLoading] = useState<boolean>(true);
|
||||
const [error, setErrorState] = useState<Error | null>(null);
|
||||
const { setError } = useError();
|
||||
const api = useApi();
|
||||
|
||||
useEffect(() => {
|
||||
if (!id || !api) return;
|
||||
setLoading(true);
|
||||
api
|
||||
.v1TranscriptGetAudioWaveform(id)
|
||||
.then((result) => {
|
||||
setWaveform(result);
|
||||
setLoading(false);
|
||||
console.debug("Transcript waveform loaded:", result);
|
||||
})
|
||||
.catch((err) => {
|
||||
setErrorState(err);
|
||||
const shouldShowHuman = shouldShowError(err);
|
||||
if (shouldShowHuman) {
|
||||
setError(err, "There was an error loading the waveform");
|
||||
} else {
|
||||
setError(err);
|
||||
}
|
||||
});
|
||||
}, [id, api]);
|
||||
|
||||
return { waveform, loading, error };
|
||||
};
|
||||
|
||||
export default useWaveform;
|
||||
71
www/app/[domain]/transcripts/useWebRTC.ts
Normal file
71
www/app/[domain]/transcripts/useWebRTC.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import Peer from "simple-peer";
|
||||
import { useError } from "../../(errors)/errorContext";
|
||||
import useApi from "../../lib/useApi";
|
||||
import { RtcOffer } from "../../api";
|
||||
|
||||
const useWebRTC = (
|
||||
stream: MediaStream | null,
|
||||
transcriptId: string | null,
|
||||
): Peer => {
|
||||
const [peer, setPeer] = useState<Peer | null>(null);
|
||||
const { setError } = useError();
|
||||
const api = useApi();
|
||||
|
||||
useEffect(() => {
|
||||
if (!stream || !transcriptId) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.debug("Using WebRTC", stream, transcriptId);
|
||||
|
||||
let p: Peer;
|
||||
|
||||
try {
|
||||
p = new Peer({ initiator: true, stream: stream });
|
||||
} catch (error) {
|
||||
setError(error, "Error creating WebRTC");
|
||||
return;
|
||||
}
|
||||
|
||||
p.on("error", (err) => {
|
||||
setError(new Error(`WebRTC error: ${err}`));
|
||||
});
|
||||
|
||||
p.on("signal", (data: any) => {
|
||||
if (!api) return;
|
||||
if ("sdp" in data) {
|
||||
const rtcOffer: RtcOffer = {
|
||||
sdp: data.sdp,
|
||||
type: data.type,
|
||||
};
|
||||
|
||||
api
|
||||
.v1TranscriptRecordWebrtc(transcriptId, rtcOffer)
|
||||
.then((answer) => {
|
||||
try {
|
||||
p.signal(answer);
|
||||
} catch (error) {
|
||||
setError(error);
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
setError(error, "Error loading WebRTCOffer");
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
p.on("connect", () => {
|
||||
console.log("WebRTC connected");
|
||||
setPeer(p);
|
||||
});
|
||||
|
||||
return () => {
|
||||
p.destroy();
|
||||
};
|
||||
}, [stream, transcriptId]);
|
||||
|
||||
return peer;
|
||||
};
|
||||
|
||||
export default useWebRTC;
|
||||
452
www/app/[domain]/transcripts/useWebSockets.ts
Normal file
452
www/app/[domain]/transcripts/useWebSockets.ts
Normal file
@@ -0,0 +1,452 @@
|
||||
import { useContext, useEffect, useState } from "react";
|
||||
import { Topic, FinalSummary, Status } from "./webSocketTypes";
|
||||
import { useError } from "../../(errors)/errorContext";
|
||||
import { DomainContext } from "../domainContext";
|
||||
import { AudioWaveform, GetTranscriptSegmentTopic } from "../../api";
|
||||
import useApi from "../../lib/useApi";
|
||||
|
||||
export type UseWebSockets = {
|
||||
transcriptText: string;
|
||||
translateText: string;
|
||||
title: string;
|
||||
topics: Topic[];
|
||||
finalSummary: FinalSummary;
|
||||
status: Status;
|
||||
waveform: AudioWaveform | null;
|
||||
duration: number | null;
|
||||
};
|
||||
|
||||
export const useWebSockets = (transcriptId: string | null): UseWebSockets => {
|
||||
const [transcriptText, setTranscriptText] = useState<string>("");
|
||||
const [translateText, setTranslateText] = useState<string>("");
|
||||
const [title, setTitle] = useState<string>("");
|
||||
const [textQueue, setTextQueue] = useState<string[]>([]);
|
||||
const [translationQueue, setTranslationQueue] = useState<string[]>([]);
|
||||
const [isProcessing, setIsProcessing] = useState(false);
|
||||
const [topics, setTopics] = useState<Topic[]>([]);
|
||||
const [waveform, setWaveForm] = useState<AudioWaveform | null>(null);
|
||||
const [duration, setDuration] = useState<number | null>(null);
|
||||
const [finalSummary, setFinalSummary] = useState<FinalSummary>({
|
||||
summary: "",
|
||||
});
|
||||
const [status, setStatus] = useState<Status>({ value: "initial" });
|
||||
const { setError } = useError();
|
||||
|
||||
const { websocket_url } = useContext(DomainContext);
|
||||
const api = useApi();
|
||||
|
||||
useEffect(() => {
|
||||
if (isProcessing || textQueue.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
setIsProcessing(true);
|
||||
const text = textQueue[0];
|
||||
setTranscriptText(text);
|
||||
setTranslateText(translationQueue[0]);
|
||||
|
||||
const WPM_READING = 200 + textQueue.length * 10; // words per minute to read
|
||||
const wordCount = text.split(/\s+/).length;
|
||||
const delay = (wordCount / WPM_READING) * 60 * 1000;
|
||||
console.log(`displaying "${text}" for ${delay}ms`);
|
||||
setTimeout(() => {
|
||||
setIsProcessing(false);
|
||||
setTextQueue((prevQueue) => prevQueue.slice(1));
|
||||
setTranslationQueue((prevQueue) => prevQueue.slice(1));
|
||||
}, delay);
|
||||
}, [textQueue, isProcessing]);
|
||||
|
||||
useEffect(() => {
|
||||
document.onkeyup = (e) => {
|
||||
if (e.key === "a" && process.env.NEXT_PUBLIC_ENV === "development") {
|
||||
const segments: GetTranscriptSegmentTopic[] = [
|
||||
{
|
||||
speaker: 1,
|
||||
start: 0,
|
||||
text: "This is the transcription of an example title",
|
||||
},
|
||||
{
|
||||
speaker: 2,
|
||||
start: 10,
|
||||
text: "This is the second speaker",
|
||||
},
|
||||
{
|
||||
speaker: 3,
|
||||
start: 90,
|
||||
text: "This is the third speaker",
|
||||
},
|
||||
{
|
||||
speaker: 4,
|
||||
start: 90,
|
||||
text: "This is the fourth speaker",
|
||||
},
|
||||
{
|
||||
speaker: 5,
|
||||
start: 123,
|
||||
text: "This is the fifth speaker",
|
||||
},
|
||||
{
|
||||
speaker: 6,
|
||||
start: 300,
|
||||
text: "This is the sixth speaker",
|
||||
},
|
||||
];
|
||||
|
||||
setTranscriptText("Lorem Ipsum");
|
||||
setTopics([
|
||||
{
|
||||
id: "1",
|
||||
timestamp: 10,
|
||||
duration: 10,
|
||||
summary: "This is test topic 1",
|
||||
title: "Topic 1: Introduction to Quantum Mechanics",
|
||||
transcript:
|
||||
"A brief overview of quantum mechanics and its principles.",
|
||||
},
|
||||
{
|
||||
id: "2",
|
||||
timestamp: 20,
|
||||
duration: 10,
|
||||
summary: "This is test topic 2",
|
||||
title: "Topic 2: Machine Learning Algorithms",
|
||||
transcript:
|
||||
"Understanding the different types of machine learning algorithms.",
|
||||
segments: [
|
||||
{
|
||||
speaker: 1,
|
||||
start: 0,
|
||||
text: "This is the transcription of an example title",
|
||||
},
|
||||
{
|
||||
speaker: 2,
|
||||
start: 10,
|
||||
text: "This is the second speaker",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "3",
|
||||
timestamp: 30,
|
||||
duration: 10,
|
||||
summary: "This is test topic 3",
|
||||
title: "Topic 3: Mental Health Awareness",
|
||||
transcript: "Ways to improve mental health and reduce stigma.",
|
||||
segments: [
|
||||
{
|
||||
speaker: 1,
|
||||
start: 0,
|
||||
text: "This is the transcription of an example title",
|
||||
},
|
||||
{
|
||||
speaker: 2,
|
||||
start: 10,
|
||||
text: "This is the second speaker",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "4",
|
||||
timestamp: 40,
|
||||
duration: 10,
|
||||
summary: "This is test topic 4",
|
||||
title: "Topic 4: Basics of Productivity",
|
||||
transcript: "Tips and tricks to increase daily productivity.",
|
||||
segments: [
|
||||
{
|
||||
speaker: 1,
|
||||
start: 0,
|
||||
text: "This is the transcription of an example title",
|
||||
},
|
||||
{
|
||||
speaker: 2,
|
||||
start: 10,
|
||||
text: "This is the second speaker",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "5",
|
||||
timestamp: 50,
|
||||
duration: 10,
|
||||
summary: "This is test topic 5",
|
||||
title: "Topic 5: Future of Aviation",
|
||||
transcript:
|
||||
"Exploring the advancements and possibilities in aviation.",
|
||||
segments: [
|
||||
{
|
||||
speaker: 1,
|
||||
start: 0,
|
||||
text: "This is the transcription of an example title",
|
||||
},
|
||||
{
|
||||
speaker: 2,
|
||||
start: 10,
|
||||
text: "This is the second speaker",
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
|
||||
setFinalSummary({ summary: "This is the final summary" });
|
||||
}
|
||||
if (e.key === "z" && process.env.NEXT_PUBLIC_ENV === "development") {
|
||||
setTranscriptText(
|
||||
"This text is in English, and it is a pretty long sentence to test the limits",
|
||||
);
|
||||
setTopics([
|
||||
{
|
||||
id: "1",
|
||||
timestamp: 10,
|
||||
duration: 10,
|
||||
summary: "This is test topic 1",
|
||||
title:
|
||||
"Topic 1: Introduction to Quantum Mechanics, a brief overview of quantum mechanics and its principles.",
|
||||
transcript:
|
||||
"A brief overview of quantum mechanics and its principles.",
|
||||
segments: [
|
||||
{
|
||||
speaker: 1,
|
||||
start: 0,
|
||||
text: "This is the transcription of an example title",
|
||||
},
|
||||
{
|
||||
speaker: 2,
|
||||
start: 10,
|
||||
text: "This is the second speaker",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "2",
|
||||
timestamp: 20,
|
||||
duration: 10,
|
||||
summary: "This is test topic 2",
|
||||
title:
|
||||
"Topic 2: Machine Learning Algorithms, understanding the different types of machine learning algorithms.",
|
||||
transcript:
|
||||
"Understanding the different types of machine learning algorithms.",
|
||||
segments: [
|
||||
{
|
||||
speaker: 1,
|
||||
start: 0,
|
||||
text: "This is the transcription of an example title",
|
||||
},
|
||||
{
|
||||
speaker: 2,
|
||||
start: 10,
|
||||
text: "This is the second speaker",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "3",
|
||||
timestamp: 30,
|
||||
duration: 10,
|
||||
summary: "This is test topic 3",
|
||||
title:
|
||||
"Topic 3: Mental Health Awareness, ways to improve mental health and reduce stigma.",
|
||||
transcript: "Ways to improve mental health and reduce stigma.",
|
||||
segments: [
|
||||
{
|
||||
speaker: 1,
|
||||
start: 0,
|
||||
text: "This is the transcription of an example title",
|
||||
},
|
||||
{
|
||||
speaker: 2,
|
||||
start: 10,
|
||||
text: "This is the second speaker",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "4",
|
||||
timestamp: 40,
|
||||
duration: 10,
|
||||
summary: "This is test topic 4",
|
||||
title:
|
||||
"Topic 4: Basics of Productivity, tips and tricks to increase daily productivity.",
|
||||
transcript: "Tips and tricks to increase daily productivity.",
|
||||
segments: [
|
||||
{
|
||||
speaker: 1,
|
||||
start: 0,
|
||||
text: "This is the transcription of an example title",
|
||||
},
|
||||
{
|
||||
speaker: 2,
|
||||
start: 10,
|
||||
text: "This is the second speaker",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "5",
|
||||
timestamp: 50,
|
||||
duration: 10,
|
||||
summary: "This is test topic 5",
|
||||
title:
|
||||
"Topic 5: Future of Aviation, exploring the advancements and possibilities in aviation.",
|
||||
transcript:
|
||||
"Exploring the advancements and possibilities in aviation.",
|
||||
segments: [
|
||||
{
|
||||
speaker: 1,
|
||||
start: 0,
|
||||
text: "This is the transcription of an example title",
|
||||
},
|
||||
{
|
||||
speaker: 2,
|
||||
start: 10,
|
||||
text: "This is the second speaker",
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
|
||||
setFinalSummary({ summary: "This is the final summary" });
|
||||
}
|
||||
};
|
||||
|
||||
if (!transcriptId || !api) return;
|
||||
|
||||
api?.v1TranscriptGetWebsocketEvents(transcriptId).then((result) => {});
|
||||
|
||||
const url = `${websocket_url}/v1/transcripts/${transcriptId}/events`;
|
||||
let ws = new WebSocket(url);
|
||||
|
||||
ws.onopen = () => {
|
||||
console.debug("WebSocket connection opened");
|
||||
};
|
||||
|
||||
ws.onmessage = (event) => {
|
||||
const message = JSON.parse(event.data);
|
||||
|
||||
try {
|
||||
switch (message.event) {
|
||||
case "TRANSCRIPT":
|
||||
const newText = (message.data.text ?? "").trim();
|
||||
const newTranslation = (message.data.translation ?? "").trim();
|
||||
|
||||
if (!newText) break;
|
||||
|
||||
console.debug("TRANSCRIPT event:", newText);
|
||||
setTextQueue((prevQueue) => [...prevQueue, newText]);
|
||||
setTranslationQueue((prevQueue) => [...prevQueue, newTranslation]);
|
||||
break;
|
||||
|
||||
case "TOPIC":
|
||||
setTopics((prevTopics) => {
|
||||
const topic = message.data as Topic;
|
||||
const index = prevTopics.findIndex(
|
||||
(prevTopic) => prevTopic.id === topic.id,
|
||||
);
|
||||
if (index >= 0) {
|
||||
prevTopics[index] = topic;
|
||||
return prevTopics;
|
||||
}
|
||||
return [...prevTopics, topic];
|
||||
});
|
||||
console.debug("TOPIC event:", message.data);
|
||||
break;
|
||||
|
||||
case "FINAL_SHORT_SUMMARY":
|
||||
console.debug("FINAL_SHORT_SUMMARY event:", message.data);
|
||||
break;
|
||||
|
||||
case "FINAL_LONG_SUMMARY":
|
||||
if (message.data) {
|
||||
setFinalSummary(message.data);
|
||||
}
|
||||
break;
|
||||
|
||||
case "FINAL_TITLE":
|
||||
console.debug("FINAL_TITLE event:", message.data);
|
||||
if (message.data) {
|
||||
setTitle(message.data.title);
|
||||
}
|
||||
break;
|
||||
|
||||
case "WAVEFORM":
|
||||
console.debug(
|
||||
"WAVEFORM event length:",
|
||||
message.data.waveform.length,
|
||||
);
|
||||
if (message.data) {
|
||||
setWaveForm(message.data.waveform);
|
||||
}
|
||||
break;
|
||||
case "DURATION":
|
||||
console.debug("DURATION event:", message.data);
|
||||
if (message.data) {
|
||||
setDuration(message.data.duration);
|
||||
}
|
||||
break;
|
||||
|
||||
case "STATUS":
|
||||
console.log("STATUS event:", message.data);
|
||||
if (message.data.value === "error") {
|
||||
setError(
|
||||
Error("Websocket error status"),
|
||||
"There was an error processing this meeting.",
|
||||
);
|
||||
}
|
||||
setStatus(message.data);
|
||||
if (message.data.value === "ended") {
|
||||
ws.close();
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
setError(
|
||||
new Error(`Received unknown WebSocket event: ${message.event}`),
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
setError(error);
|
||||
}
|
||||
};
|
||||
|
||||
ws.onerror = (error) => {
|
||||
console.error("WebSocket error:", error);
|
||||
setError(new Error("A WebSocket error occurred."));
|
||||
};
|
||||
|
||||
ws.onclose = (event) => {
|
||||
console.debug("WebSocket connection closed");
|
||||
switch (event.code) {
|
||||
case 1000: // Normal Closure:
|
||||
break;
|
||||
case 1005: // Closure by client FF
|
||||
break;
|
||||
default:
|
||||
setError(
|
||||
new Error(`WebSocket closed unexpectedly with code: ${event.code}`),
|
||||
"Disconnected",
|
||||
);
|
||||
console.log(
|
||||
"Socket is closed. Reconnect will be attempted in 1 second.",
|
||||
event.reason,
|
||||
);
|
||||
setTimeout(function () {
|
||||
ws = new WebSocket(url);
|
||||
}, 1000);
|
||||
}
|
||||
};
|
||||
|
||||
return () => {
|
||||
ws.close();
|
||||
};
|
||||
}, [transcriptId, !api]);
|
||||
|
||||
return {
|
||||
transcriptText,
|
||||
translateText,
|
||||
topics,
|
||||
finalSummary,
|
||||
title,
|
||||
status,
|
||||
waveform,
|
||||
duration,
|
||||
};
|
||||
};
|
||||
11
www/app/[domain]/transcripts/waveformLoading.tsx
Normal file
11
www/app/[domain]/transcripts/waveformLoading.tsx
Normal file
@@ -0,0 +1,11 @@
|
||||
import { faSpinner } from "@fortawesome/free-solid-svg-icons";
|
||||
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
|
||||
|
||||
export default () => (
|
||||
<div className="flex flex-grow items-center justify-center h-20">
|
||||
<FontAwesomeIcon
|
||||
icon={faSpinner}
|
||||
className="animate-spin-slow text-gray-600 flex-grow rounded-lg md:rounded-xl h-10 w-10"
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
20
www/app/[domain]/transcripts/webSocketTypes.ts
Normal file
20
www/app/[domain]/transcripts/webSocketTypes.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { GetTranscriptTopic } from "../../api";
|
||||
|
||||
export type Topic = GetTranscriptTopic;
|
||||
|
||||
export type Transcript = {
|
||||
text: string;
|
||||
};
|
||||
|
||||
export type FinalSummary = {
|
||||
summary: string;
|
||||
};
|
||||
|
||||
export type Status = {
|
||||
value: string;
|
||||
};
|
||||
|
||||
export type TranslatedTopic = {
|
||||
text: string;
|
||||
translation: string;
|
||||
};
|
||||
Reference in New Issue
Block a user