Refactor based on NextJS 13 Routing / Dir Layout

This commit is contained in:
Koper
2023-08-14 19:50:25 +07:00
parent 63636b52e1
commit eb4fd6dcd0
15 changed files with 22 additions and 25 deletions

View File

@@ -1,128 +0,0 @@
// Override the startRecording method so we can pass the desired stream
// Checkout: https://github.com/katspaugh/wavesurfer.js/blob/fa2bcfe/src/plugins/record.ts
import RecordPlugin from "wavesurfer.js/dist/plugins/record";
const MIME_TYPES = [
"audio/webm",
"audio/wav",
"audio/mpeg",
"audio/mp4",
"audio/mp3",
];
const findSupportedMimeType = () =>
MIME_TYPES.find((mimeType) => MediaRecorder.isTypeSupported(mimeType));
class CustomRecordPlugin extends RecordPlugin {
static create(options) {
return new CustomRecordPlugin(options || {});
}
render(stream) {
if (!this.wavesurfer) return () => undefined;
const container = this.wavesurfer.getWrapper();
const canvas = document.createElement("canvas");
canvas.width = container.clientWidth;
canvas.height = container.clientHeight;
canvas.style.zIndex = "10";
container.appendChild(canvas);
const canvasCtx = canvas.getContext("2d");
const audioContext = new AudioContext();
const source = audioContext.createMediaStreamSource(stream);
const analyser = audioContext.createAnalyser();
analyser.fftSize = 2 ** 5;
source.connect(analyser);
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
let animationId, previousTimeStamp;
const BUFFER_SIZE = 2 ** 8;
const dataBuffer = new Array(BUFFER_SIZE).fill(canvas.height);
const drawWaveform = (timeStamp) => {
if (!canvasCtx) return;
analyser.getByteTimeDomainData(dataArray);
canvasCtx.clearRect(0, 0, canvas.width, canvas.height);
canvasCtx.fillStyle = "#cc3347";
if (previousTimeStamp === undefined) {
previousTimeStamp = timeStamp;
dataBuffer.push(Math.min(...dataArray));
dataBuffer.splice(0, 1);
}
const elapsed = timeStamp - previousTimeStamp;
if (elapsed > 10) {
previousTimeStamp = timeStamp;
dataBuffer.push(Math.min(...dataArray));
dataBuffer.splice(0, 1);
}
// Drawing
const sliceWidth = canvas.width / dataBuffer.length;
let x = 0;
for (let i = 0; i < dataBuffer.length; i++) {
const valueNormalized = dataBuffer[i] / canvas.height;
const y = (valueNormalized * canvas.height) / 2;
const sliceHeight = canvas.height + 1 - y * 2;
canvasCtx.fillRect(x, y, (sliceWidth * 2) / 3, sliceHeight);
x += sliceWidth;
}
animationId = requestAnimationFrame(drawWaveform);
};
drawWaveform();
return () => {
if (animationId) {
cancelAnimationFrame(animationId);
}
if (source) {
source.disconnect();
source.mediaStream.getTracks().forEach((track) => track.stop());
}
if (audioContext) {
audioContext.close();
}
canvas?.remove();
};
}
startRecording(stream) {
this.preventInteraction();
this.cleanUp();
const onStop = this.render(stream);
const mediaRecorder = new MediaRecorder(stream, {
mimeType: this.options.mimeType || findSupportedMimeType(),
audioBitsPerSecond: this.options.audioBitsPerSecond,
});
const recordedChunks = [];
mediaRecorder.addEventListener("dataavailable", (event) => {
if (event.data.size > 0) {
recordedChunks.push(event.data);
}
});
mediaRecorder.addEventListener("stop", () => {
onStop();
this.loadBlob(recordedChunks, mediaRecorder.mimeType);
this.emit("stopRecording");
});
mediaRecorder.start();
this.emit("startRecording");
this.mediaRecorder = mediaRecorder;
}
}
export default CustomRecordPlugin;

View File

@@ -1,124 +0,0 @@
import React, { useState, useEffect } from "react";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import {
faChevronRight,
faChevronDown,
faLinkSlash,
} from "@fortawesome/free-solid-svg-icons";
export function Dashboard({
transcriptionText,
finalSummary,
topics,
disconnected,
}) {
const [openIndex, setOpenIndex] = useState(null);
const [autoscrollEnabled, setAutoscrollEnabled] = useState(true);
useEffect(() => {
if (autoscrollEnabled) scrollToBottom();
}, [topics.length]);
const scrollToBottom = () => {
const topicsDiv = document.getElementById("topics-div");
topicsDiv.scrollTop = topicsDiv.scrollHeight;
};
const handleScroll = (e) => {
const bottom =
e.target.scrollHeight - e.target.scrollTop === e.target.clientHeight;
if (!bottom && autoscrollEnabled) {
setAutoscrollEnabled(false);
} else if (bottom && !autoscrollEnabled) {
setAutoscrollEnabled(true);
}
};
const formatTime = (seconds) => {
let hours = Math.floor(seconds / 3600);
let minutes = Math.floor((seconds % 3600) / 60);
let secs = Math.floor(seconds % 60);
let timeString = `${hours > 0 ? hours + ":" : ""}${minutes
.toString()
.padStart(2, "0")}:${secs.toString().padStart(2, "0")}`;
return timeString;
};
return (
<>
<div className="relative h-[60svh] w-3/4 flex flex-col">
<div className="text-center pb-1 pt-4">
<h1 className="text-2xl font-bold text-blue-500">Meeting Notes</h1>
</div>
<div className="flex justify-between border-b-2">
<div className="w-1/4 font-bold">Timestamp</div>
<div className="w-3/4 font-bold">Topic</div>
</div>
<div
className={`absolute right-5 w-10 h-10 ${
autoscrollEnabled ? "hidden" : "flex"
} ${
finalSummary ? "top-[49%]" : "bottom-1"
} justify-center items-center text-2xl cursor-pointer opacity-70 hover:opacity-100 transition-opacity duration-200 animate-bounce rounded-xl border-slate-400 bg-[#3c82f638] text-[#3c82f6ed]`}
onClick={scrollToBottom}
>
&#11015;
</div>
<div
id="topics-div"
className="py-2 overflow-y-auto"
onScroll={handleScroll}
>
{topics.map((item, index) => (
<div key={index} className="border-b-2 py-2 hover:bg-[#8ec5fc30]">
<div
className="flex justify-between items-center cursor-pointer px-4"
onClick={() => setOpenIndex(openIndex === index ? null : index)}
>
<div className="w-1/4">{formatTime(item.timestamp)}</div>
<div className="w-3/4 flex justify-between items-center">
{item.title}
<FontAwesomeIcon
className={`transform transition-transform duration-200`}
icon={openIndex === index ? faChevronDown : faChevronRight}
/>
</div>
</div>
{openIndex === index && (
<div className="p-2 mt-2 -mb-2 bg-slate-50 rounded">
{item.transcript}
</div>
)}
</div>
))}
{topics.length === 0 && (
<div className="text-center text-gray-500">No topics yet</div>
)}
</div>
{finalSummary && (
<div className="min-h-[200px] overflow-y-auto mt-2 p-2 bg-white temp-transcription rounded">
<h2>Final Summary</h2>
<p>{finalSummary.summary}</p>
</div>
)}
</div>
{disconnected && (
<div className="absolute top-0 left-0 w-full h-full bg-black opacity-50 flex justify-center items-center">
<div className="text-white text-2xl">
<FontAwesomeIcon icon={faLinkSlash} className="mr-2" />
Disconnected
</div>
</div>
)}
<footer className="h-[7svh] w-full bg-gray-800 text-white text-center py-4 text-2xl">
&nbsp;{transcriptionText}&nbsp;
</footer>
</>
);
}

View File

@@ -1,197 +0,0 @@
import React, { useRef, useEffect, useState } from "react";
import WaveSurfer from "wavesurfer.js";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { faDownload } from "@fortawesome/free-solid-svg-icons";
import Dropdown from "react-dropdown";
import "react-dropdown/style.css";
import CustomRecordPlugin from "./CustomRecordPlugin";
import { formatTime } from "../utils";
const AudioInputsDropdown = (props) => {
const [ddOptions, setDdOptions] = useState([]);
useEffect(() => {
const init = async () => {
// Request permission to use audio inputs
await navigator.mediaDevices
.getUserMedia({ audio: true })
.then((stream) => stream.getTracks().forEach((t) => t.stop()));
const devices = await navigator.mediaDevices.enumerateDevices();
const audioDevices = devices
.filter((d) => d.kind === "audioinput" && d.deviceId != "")
.map((d) => ({ value: d.deviceId, label: d.label }));
if (audioDevices.length < 1) return console.log("no audio input devices");
setDdOptions(audioDevices);
props.setDeviceId(audioDevices[0].value);
};
init();
}, []);
const handleDropdownChange = (e) => {
props.setDeviceId(e.value);
};
return (
<Dropdown
options={ddOptions}
onChange={handleDropdownChange}
value={ddOptions[0]}
disabled={props.disabled}
/>
);
};
export default function Recorder(props) {
const waveformRef = useRef();
const [wavesurfer, setWavesurfer] = useState(null);
const [record, setRecord] = useState(null);
const [isRecording, setIsRecording] = useState(false);
const [isPlaying, setIsPlaying] = useState(false);
const [deviceId, setDeviceId] = useState(null);
const [currentTime, setCurrentTime] = useState(0);
const [timeInterval, setTimeInterval] = useState(null);
const [duration, setDuration] = useState(0);
useEffect(() => {
document.getElementById("play-btn").disabled = true;
if (waveformRef.current) {
const _wavesurfer = WaveSurfer.create({
container: waveformRef.current,
waveColor: "#777",
progressColor: "#222",
cursorColor: "OrangeRed",
hideScrollbar: true,
autoCenter: true,
barWidth: 2,
});
const wsWrapper = _wavesurfer.getWrapper();
wsWrapper.style.cursor = "pointer";
wsWrapper.style.backgroundColor = "#e0c3fc42";
wsWrapper.style.borderRadius = "15px";
_wavesurfer.on("play", () => {
setIsPlaying(true);
});
_wavesurfer.on("pause", () => {
setIsPlaying(false);
});
_wavesurfer.on("timeupdate", setCurrentTime);
setRecord(_wavesurfer.registerPlugin(CustomRecordPlugin.create()));
setWavesurfer(_wavesurfer);
return () => {
_wavesurfer.destroy();
setIsRecording(false);
setIsPlaying(false);
};
}
}, []);
useEffect(() => {
if (record) {
return record.on("stopRecording", () => {
const link = document.getElementById("download-recording");
link.href = record.getRecordedUrl();
link.download = "reflector-recording.webm";
link.style.visibility = "visible";
});
}
}, [record]);
useEffect(() => {
if (isRecording) {
const interval = setInterval(() => {
setCurrentTime((prev) => prev + 1);
}, 1000);
setTimeInterval(interval);
return () => clearInterval(interval);
} else {
clearInterval(timeInterval);
setCurrentTime((prev) => {
setDuration(prev);
return 0;
});
}
}, [isRecording]);
const handleRecClick = async () => {
if (!record) return console.log("no record");
if (record.isRecording()) {
props.onStop();
record.stopRecording();
setIsRecording(false);
document.getElementById("play-btn").disabled = false;
} else {
const stream = await navigator.mediaDevices.getUserMedia({
audio: {
deviceId,
noiseSuppression: false,
echoCancellation: false,
},
});
await record.startRecording(stream);
props.setStream(stream);
setIsRecording(true);
}
};
const handlePlayClick = () => {
wavesurfer?.playPause();
};
const timeLabel = () => {
if (isRecording) return formatTime(currentTime);
else if (duration)
return `${formatTime(currentTime)}/${formatTime(duration)}`;
else "";
};
return (
<div className="relative flex flex-col items-center justify-center max-w-[75vw] w-full">
<div className="flex my-2 mx-auto">
<AudioInputsDropdown setDeviceId={setDeviceId} disabled={isRecording} />
&nbsp;
<button
className="w-20"
onClick={handleRecClick}
data-color={isRecording ? "red" : "blue"}
disabled={!deviceId}
>
{isRecording ? "Stop" : "Record"}
</button>
&nbsp;
<button
className="w-20"
id="play-btn"
onClick={handlePlayClick}
data-color={isPlaying ? "orange" : "green"}
>
{isPlaying ? "Pause" : "Play"}
</button>
<a
id="download-recording"
title="Download recording"
className="invisible w-9 m-auto text-center cursor-pointer text-blue-300 hover:text-blue-700"
>
<FontAwesomeIcon icon={faDownload} />
</a>
</div>
<div ref={waveformRef} className="w-full shadow-xl rounded-2xl"></div>
<div className="absolute bottom-0 right-2 text-xs text-black">
{isRecording && (
<div className="inline-block bg-red-500 rounded-full w-2 h-2 my-auto mr-1 animate-ping"></div>
)}
{timeLabel()}
</div>
</div>
);
}

View File

@@ -1,50 +0,0 @@
import { useEffect, useState } from "react";
import { DefaultApi } from "../api/apis/DefaultApi";
import { Configuration } from "../api/runtime";
const useTranscript = () => {
const [response, setResponse] = useState(null);
const [loading, setLoading] = useState(false);
const [error, setError] = useState(null);
const apiConfiguration = new Configuration({
basePath: process.env.NEXT_PUBLIC_API_URL,
});
const api = new DefaultApi(apiConfiguration);
const createTranscript = () => {
setLoading(true);
const requestParameters = {
createTranscript: {
name: "Weekly All-Hands", // Hardcoded for now
},
};
console.debug(
"POST - /v1/transcripts/ - Requesting new transcription creation",
requestParameters,
);
api
.v1TranscriptsCreate(requestParameters)
.then((result) => {
setResponse(result);
setLoading(false);
console.debug("New transcript created:", result);
})
.catch((err) => {
const errorString = err.response || err.message || "Unknown error";
setError(errorString);
setLoading(false);
console.error("Error creating transcript:", errorString);
});
};
useEffect(() => {
createTranscript();
}, []);
return { response, loading, error, createTranscript };
};
export default useTranscript;

View File

@@ -1,57 +0,0 @@
import { useEffect, useState } from "react";
import Peer from "simple-peer";
import { DefaultApi } from "../api/apis/DefaultApi";
import { Configuration } from "../api/runtime";
const useWebRTC = (stream, transcriptId) => {
const [data, setData] = useState({
peer: null,
});
useEffect(() => {
if (!stream || !transcriptId) {
return;
}
const apiConfiguration = new Configuration({
basePath: process.env.NEXT_PUBLIC_API_URL,
});
const api = new DefaultApi(apiConfiguration);
let peer = new Peer({ initiator: true, stream: stream });
peer.on("signal", (data) => {
if ("sdp" in data) {
const requestParameters = {
transcriptId: transcriptId,
rtcOffer: {
sdp: data.sdp,
type: data.type,
},
};
api
.v1TranscriptRecordWebrtc(requestParameters)
.then((answer) => {
peer.signal(answer);
})
.catch((err) => {
console.error("WebRTC signaling error:", err);
});
}
});
peer.on("connect", () => {
console.log("WebRTC connected");
setData((prevData) => ({ ...prevData, peer: peer }));
});
return () => {
peer.destroy();
};
}, [stream, transcriptId]);
return data;
};
export default useWebRTC;

View File

@@ -1,65 +0,0 @@
import { useEffect, useState } from "react";
export const useWebSockets = (transcriptId) => {
const [transcriptText, setTranscriptText] = useState("");
const [topics, setTopics] = useState([]);
const [finalSummary, setFinalSummary] = useState("");
const [status, setStatus] = useState("disconnected");
useEffect(() => {
if (!transcriptId) return;
const url = `${process.env.NEXT_PUBLIC_WEBSOCKET_URL}/v1/transcripts/${transcriptId}/events`;
const ws = new WebSocket(url);
ws.onopen = () => {
console.debug("WebSocket connection opened");
};
ws.onmessage = (event) => {
const message = JSON.parse(event.data);
switch (message.event) {
case "TRANSCRIPT":
if (message.data.text) {
setTranscriptText(message.data.text.trim());
console.debug("TRANSCRIPT event:", message.data);
}
break;
case "TOPIC":
setTopics((prevTopics) => [...prevTopics, message.data]);
console.debug("TOPIC event:", message.data);
break;
case "FINAL_SUMMARY":
if (message.data) {
setFinalSummary(message.data);
console.debug("FINAL_SUMMARY event:", message.data);
}
break;
case "STATUS":
setStatus(message.data.status);
break;
default:
console.error("Unknown event:", message.event);
}
};
ws.onerror = (error) => {
console.error("WebSocket error:", error);
};
ws.onclose = () => {
console.debug("WebSocket connection closed");
};
return () => {
ws.close();
};
}, [transcriptId]);
return { transcriptText, topics, finalSummary, status };
};