Merged reflector-ui (now "www") into single repo

This commit is contained in:
Koper
2023-07-26 15:18:05 +07:00
parent 9a57a89b5b
commit 401aa1e6f9
24 changed files with 0 additions and 0 deletions

View File

@@ -0,0 +1,128 @@
// Override the startRecording method so we can pass the desired stream
// Checkout: https://github.com/katspaugh/wavesurfer.js/blob/fa2bcfe/src/plugins/record.ts
import RecordPlugin from "wavesurfer.js/dist/plugins/record";
const MIME_TYPES = [
"audio/webm",
"audio/wav",
"audio/mpeg",
"audio/mp4",
"audio/mp3",
];
const findSupportedMimeType = () =>
MIME_TYPES.find((mimeType) => MediaRecorder.isTypeSupported(mimeType));
class CustomRecordPlugin extends RecordPlugin {
static create(options) {
return new CustomRecordPlugin(options || {});
}
render(stream) {
if (!this.wavesurfer) return () => undefined
const container = this.wavesurfer.getWrapper()
const canvas = document.createElement('canvas')
canvas.width = container.clientWidth
canvas.height = container.clientHeight
canvas.style.zIndex = '10'
container.appendChild(canvas)
const canvasCtx = canvas.getContext('2d')
const audioContext = new AudioContext()
const source = audioContext.createMediaStreamSource(stream)
const analyser = audioContext.createAnalyser()
analyser.fftSize = 2 ** 5
source.connect(analyser)
const bufferLength = analyser.frequencyBinCount
const dataArray = new Uint8Array(bufferLength)
let animationId, previousTimeStamp;
const BUFFER_SIZE = 2 ** 8
const dataBuffer = new Array(BUFFER_SIZE).fill(canvas.height)
const drawWaveform = (timeStamp) => {
if (!canvasCtx) return
analyser.getByteTimeDomainData(dataArray)
canvasCtx.clearRect(0, 0, canvas.width, canvas.height)
canvasCtx.fillStyle = 'black'
if (previousTimeStamp === undefined) {
previousTimeStamp = timeStamp
dataBuffer.push(Math.min(...dataArray))
dataBuffer.splice(0, 1)
}
const elapsed = timeStamp - previousTimeStamp;
if (elapsed > 10) {
previousTimeStamp = timeStamp
dataBuffer.push(Math.min(...dataArray))
dataBuffer.splice(0, 1)
}
// Drawing
const sliceWidth = canvas.width / dataBuffer.length
let x = 0
for (let i = 0; i < dataBuffer.length; i++) {
const valueNormalized = dataBuffer[i] / canvas.height
const y = valueNormalized * canvas.height / 2
const sliceHeight = canvas.height + 1 - y * 2
canvasCtx.fillRect(x, y, sliceWidth * 2 / 3, sliceHeight)
x += sliceWidth
}
animationId = requestAnimationFrame(drawWaveform)
}
drawWaveform()
return () => {
if (animationId) {
cancelAnimationFrame(animationId)
}
if (source) {
source.disconnect()
source.mediaStream.getTracks().forEach((track) => track.stop())
}
if (audioContext) {
audioContext.close()
}
canvas?.remove()
}
}
startRecording(stream) {
this.preventInteraction();
this.cleanUp();
const onStop = this.render(stream);
const mediaRecorder = new MediaRecorder(stream, {
mimeType: this.options.mimeType || findSupportedMimeType(),
audioBitsPerSecond: this.options.audioBitsPerSecond,
});
const recordedChunks = [];
mediaRecorder.addEventListener("dataavailable", (event) => {
if (event.data.size > 0) {
recordedChunks.push(event.data);
}
});
mediaRecorder.addEventListener("stop", () => {
onStop();
this.loadBlob(recordedChunks, mediaRecorder.mimeType);
this.emit("stopRecording");
});
mediaRecorder.start();
this.emit("startRecording");
this.mediaRecorder = mediaRecorder;
}
}
export default CustomRecordPlugin;

View File

@@ -0,0 +1,65 @@
import { Mulberry32 } from "../utils.js";
import React, { useState, useEffect } from "react";
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
import { faChevronRight, faChevronDown } from '@fortawesome/free-solid-svg-icons'
export function Dashboard({
isRecording,
onRecord,
transcriptionText,
finalSummary,
topics,
stream,
}) {
const [openIndex, setOpenIndex] = useState(null);
return (
<>
<div className="flex-grow w-3/4 py-4">
<div className="text-center py-6">
<h1 className="text-2xl font-bold text-blue-500">Meeting Notes</h1>
</div>
<div className="flex justify-between border-b-2">
<div className="w-1/4">Timestamp</div>
<div className="w-1/4">Topic</div>
<div className="w-1/4"></div>
</div>
{topics.map((item, index) => (
<div key={index} className="border-b-2 py-2">
<div
className="flex justify-between items-center cursor-pointer"
onClick={() => setOpenIndex(openIndex === index ? null : index)}
>
<div className="w-1/4">{item.timestamp}</div>
<div className="w-1/4 flex justify-between items-center">
{item.title}
<FontAwesomeIcon
className={`transform transition-transform duration-200`}
icon={openIndex === index ? faChevronDown : faChevronRight}
/>
</div>
<div className="w-1/4 flex flex-row space-x-0.5"></div>
</div>
{openIndex === index && (
<div className="mt-2 p-2 bg-white rounded">{item.transcript}</div>
)}
</div>
))}
{finalSummary && (
<div className="mt-2 p-2 bg-white temp-transcription rounded">
<h2>Final Summary</h2>
<p>Duration: {finalSummary.duration}</p>
<p>{finalSummary.summary}</p>
</div>
)}
</div>
<footer className="w-full bg-gray-800 text-white text-center py-4 text-2xl">
{transcriptionText}
</footer>
</>
);
}

View File

@@ -0,0 +1,142 @@
import React, { useRef, useEffect, useState } from "react";
import WaveSurfer from "wavesurfer.js";
import Dropdown from "react-dropdown";
import "react-dropdown/style.css";
import CustomRecordPlugin from "./CustomRecordPlugin";
const queryAndPromptAudio = async () => {
const permissionStatus = await navigator.permissions.query({name: 'microphone'})
if (permissionStatus.state == 'prompt') {
await navigator.mediaDevices.getUserMedia({ audio: true })
}
}
const AudioInputsDropdown = (props) => {
const [ddOptions, setDdOptions] = useState([]);
useEffect(() => {
const init = async () => {
await queryAndPromptAudio()
const devices = await navigator.mediaDevices.enumerateDevices()
const audioDevices = devices
.filter((d) => d.kind === "audioinput" && d.deviceId != "")
.map((d) => ({ value: d.deviceId, label: d.label }))
if (audioDevices.length < 1) return console.log("no audio input devices")
setDdOptions(audioDevices)
props.setDeviceId(audioDevices[0].value)
}
init()
}, [])
const handleDropdownChange = (e) => {
props.setDeviceId(e.value);
};
return (
<Dropdown
options={ddOptions}
onChange={handleDropdownChange}
value={ddOptions[0]}
disabled={props.disabled}
/>
)
}
export default function Recorder(props) {
const waveformRef = useRef();
const [wavesurfer, setWavesurfer] = useState(null);
const [record, setRecord] = useState(null);
const [isRecording, setIsRecording] = useState(false);
const [isPlaying, setIsPlaying] = useState(false);
const [deviceId, setDeviceId] = useState(null);
useEffect(() => {
document.getElementById("play-btn").disabled = true;
if (waveformRef.current) {
const _wavesurfer = WaveSurfer.create({
container: waveformRef.current,
waveColor: "#cc3347",
progressColor: "#0178FFπ",
cursorColor: "OrangeRed",
hideScrollbar: true,
autoCenter: true,
barWidth: 2,
});
const wsWrapper = _wavesurfer.getWrapper();
wsWrapper.style.cursor = "pointer";
wsWrapper.style.backgroundColor = "lightgray";
wsWrapper.style.borderRadius = "15px";
_wavesurfer.on("play", () => {
setIsPlaying(true);
});
_wavesurfer.on("pause", () => {
setIsPlaying(false);
});
setRecord(_wavesurfer.registerPlugin(CustomRecordPlugin.create()));
setWavesurfer(_wavesurfer);
return () => {
_wavesurfer.destroy();
setIsRecording(false);
setIsPlaying(false);
};
}
}, []);
const handleRecClick = async () => {
if (!record) return console.log("no record");
if (record?.isRecording()) {
props.serverData.peer.send(JSON.stringify({ cmd: "STOP" }));
record.stopRecording();
setIsRecording(false);
document.getElementById("play-btn").disabled = false;
} else {
const stream = await navigator.mediaDevices.getUserMedia({
audio: { deviceId },
});
await record.startRecording(stream);
props.setStream(stream);
setIsRecording(true);
}
};
const handlePlayClick = () => {
wavesurfer?.playPause();
};
return (
<div className="flex flex-col items-center justify-center max-w-[75vw] w-full">
<div className="flex my-2 mx-auto">
<AudioInputsDropdown setDeviceId={setDeviceId} disabled={isRecording} />
&nbsp;
<button
onClick={handleRecClick}
data-color={isRecording ? "red" : "blue"}
disabled={!deviceId}
>
{isRecording ? "Stop" : "Record"}
</button>
&nbsp;
<button
id="play-btn"
onClick={handlePlayClick}
data-color={isPlaying ? "orange" : "green"}
>
{isPlaying ? "Pause" : "Play"}
</button>
</div>
<div ref={waveformRef} className="w-full"></div>
{/* TODO: Download audio <a> tag */}
</div>
);
}

View File

@@ -0,0 +1,84 @@
import { useEffect, useState } from "react";
import Peer from "simple-peer";
const WebRTC_SERVER_URL = "http://127.0.0.1:1250/offer";
const useWebRTC = (stream, setIsRecording) => {
const [data, setData] = useState({
peer: null,
});
useEffect(() => {
if (!stream) {
return;
}
let peer = new Peer({ initiator: true, stream: stream });
peer.on("signal", (data) => {
if ("sdp" in data) {
fetch(WebRTC_SERVER_URL, {
body: JSON.stringify({
sdp: data.sdp,
type: data.type,
}),
headers: {
"Content-Type": "application/json",
},
method: "POST",
})
.then((response) => response.json())
.then((answer) => peer.signal(answer))
.catch((e) => {
alert(e);
});
}
});
peer.on("connect", () => {
console.log("WebRTC connected");
setData((prevData) => ({ ...prevData, peer: peer }));
});
peer.on("data", (data) => {
const serverData = JSON.parse(data.toString());
console.log(serverData);
switch (serverData.cmd) {
case "SHOW_TRANSCRIPTION":
setData((prevData) => ({
...prevData,
text: serverData.text,
}));
break;
case "UPDATE_TOPICS":
setData((prevData) => ({
...prevData,
topics: serverData.topics,
}));
break;
case "DISPLAY_FINAL_SUMMARY":
setData((prevData) => ({
...prevData,
finalSummary: {
duration: serverData.duration,
summary: serverData.summary,
},
text: ''
}));
setIsRecording(false);
break;
default:
console.error(`Unknown command ${serverData.cmd}`);
}
});
return () => {
peer.destroy();
};
}, [stream, setIsRecording]);
return data;
};
export default useWebRTC;

BIN
www/app/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

20
www/app/globals.scss Normal file
View File

@@ -0,0 +1,20 @@
@tailwind base;
@tailwind components;
@tailwind utilities;
:root {
--foreground-rgb: 0, 0, 0;
--background-start-rgb: 214, 219, 220;
--background-end-rgb: 255, 255, 255;
}
body {
background: white;
font-family: "Roboto", sans-serif;
}
.temp-transcription {
background: beige;
border-radius: 5px;
}

25
www/app/layout.js Normal file
View File

@@ -0,0 +1,25 @@
import "./globals.scss";
import { Roboto } from "next/font/google";
import Head from "next/head";
const roboto = Roboto({ subsets: ["latin"], weight: "400" });
export const metadata = {
title: "Reflector Monadical",
description: "Capture The Signal, Not The Noise",
};
export default function RootLayout({ children }) {
return (
<html lang="en">
<Head>
<title>Test</title>
</Head>
<body className={roboto.className + " flex flex-col min-h-screen"}>
{children}
</body>
</html>
);
}

33
www/app/page.js Normal file
View File

@@ -0,0 +1,33 @@
"use client";
import React, { useState } from "react";
import Recorder from "./components/record.js";
import { Dashboard } from "./components/dashboard.js";
import useWebRTC from "./components/webrtc.js";
import "../public/button.css";
const App = () => {
const [stream, setStream] = useState(null);
// This is where you'd send the stream and receive the data from the server.
// transcription, summary, etc
const serverData = useWebRTC(stream, () => {});
return (
<div className="flex flex-col items-center h-[100svh]">
<div className="text-center py-6 mt-10">
<h1 className="text-5xl font-bold text-blue-500">Reflector</h1>
<p className="text-gray-500">Capture The Signal, Not The Noise</p>
</div>
<Recorder setStream={setStream} serverData={serverData} />
<Dashboard
transcriptionText={serverData.text ?? "..."}
finalSummary={serverData.finalSummary}
topics={serverData.topics ?? []}
stream={stream}
/>
</div>
);
};
export default App;

19
www/app/utils.js Normal file
View File

@@ -0,0 +1,19 @@
export function getRandomNumber(min, max) {
return Math.floor(Math.random() * (max - min + 1)) + min;
}
export function SeededRand(seed) {
seed ^= seed << 13;
seed ^= seed >> 17;
seed ^= seed << 5;
return seed / 2 ** 32;
}
export function Mulberry32(seed) {
return function () {
var t = (seed += 0x6d2b79f5);
t = Math.imul(t ^ (t >>> 15), t | 1);
t ^= t + Math.imul(t ^ (t >>> 7), t | 61);
return ((t ^ (t >>> 14)) >>> 0) / 4294967296;
};
}