Merge remote-tracking branch 'origin' into jose/vertical-waveform

This commit is contained in:
Jose B
2023-07-21 07:04:36 -05:00
8 changed files with 174 additions and 161 deletions

View File

@@ -1,14 +1,21 @@
// Override the startRecording method so we can pass the desired stream
// Checkout: https://github.com/katspaugh/wavesurfer.js/blob/fa2bcfe/src/plugins/record.ts
import RecordPlugin from "wavesurfer.js/dist/plugins/record"
import RecordPlugin from "wavesurfer.js/dist/plugins/record";
const MIME_TYPES = ['audio/webm', 'audio/wav', 'audio/mpeg', 'audio/mp4', 'audio/mp3']
const findSupportedMimeType = () => MIME_TYPES.find((mimeType) => MediaRecorder.isTypeSupported(mimeType))
const MIME_TYPES = [
"audio/webm",
"audio/wav",
"audio/mpeg",
"audio/mp4",
"audio/mp3",
];
const findSupportedMimeType = () =>
MIME_TYPES.find((mimeType) => MediaRecorder.isTypeSupported(mimeType));
class CustomRecordPlugin extends RecordPlugin {
static create(options) {
return new CustomRecordPlugin(options || {})
return new CustomRecordPlugin(options || {});
}
render(stream) {
if (!this.wavesurfer) return () => undefined
@@ -88,34 +95,34 @@ class CustomRecordPlugin extends RecordPlugin {
}
}
startRecording(stream) {
this.preventInteraction()
this.cleanUp()
this.preventInteraction();
this.cleanUp();
const onStop = this.render(stream)
const onStop = this.render(stream);
const mediaRecorder = new MediaRecorder(stream, {
mimeType: this.options.mimeType || findSupportedMimeType(),
audioBitsPerSecond: this.options.audioBitsPerSecond,
})
const recordedChunks = []
});
const recordedChunks = [];
mediaRecorder.addEventListener('dataavailable', (event) => {
mediaRecorder.addEventListener("dataavailable", (event) => {
if (event.data.size > 0) {
recordedChunks.push(event.data)
recordedChunks.push(event.data);
}
})
});
mediaRecorder.addEventListener('stop', () => {
onStop()
this.loadBlob(recordedChunks, mediaRecorder.mimeType)
this.emit('stopRecording')
})
mediaRecorder.addEventListener("stop", () => {
onStop();
this.loadBlob(recordedChunks, mediaRecorder.mimeType);
this.emit("stopRecording");
});
mediaRecorder.start()
mediaRecorder.start();
this.emit('startRecording')
this.emit("startRecording");
this.mediaRecorder = mediaRecorder
this.mediaRecorder = mediaRecorder;
}
}
export default CustomRecordPlugin;
export default CustomRecordPlugin;

View File

@@ -14,7 +14,10 @@ export function Dashboard({
return (
<>
<div className="p-4">
<div className="w-3/4 py-4">
<div className="text-center py-6">
<h1 className="text-2xl font-bold text-blue-500">Meeting Notes</h1>
</div>
<div className="flex justify-between border-b-2">
<div className="w-1/4">Timestamp</div>
<div className="w-1/4">Topic</div>
@@ -38,21 +41,19 @@ export function Dashboard({
{">"}
</span>
</div>
<div className="w-1/4 flex flex-row space-x-0.5">
</div>
<div className="w-1/4 flex flex-row space-x-0.5"></div>
</div>
{openIndex === index && (
<div className="mt-2 p-2">{item.transcript}</div>
<div className="mt-2 p-2 bg-white">{item.transcript}</div>
)}
</div>
))}
<div className="border-b-2 py-2 w-[90vw] max-w-[1280px]">
<div className="border-b-2 py-2">
<div className="flex justify-between">
<div className="w-1/4">Live</div>
<div className="w-1/4">Transcript</div>
<div className="w-1/4 flex flex-row space-x-0.5">
</div>
<div className="w-1/4 flex flex-row space-x-0.5"></div>
</div>
<div className="mt-2 p-2 bg-white temp-transcription">
{transcriptionText}
@@ -66,7 +67,6 @@ export function Dashboard({
<p>{finalSummary.summary}</p>
</div>
)}
</div>
</>
);

View File

@@ -2,105 +2,117 @@ import React, { useRef, useEffect, useState } from "react";
import WaveSurfer from "wavesurfer.js";
import Dropdown from 'react-dropdown'
import 'react-dropdown/style.css'
import CustomRecordPlugin from './CustomRecordPlugin'
import Dropdown from "react-dropdown";
import "react-dropdown/style.css";
import CustomRecordPlugin from "./CustomRecordPlugin";
export default function Recorder(props) {
const waveformRef = useRef()
const [wavesurfer, setWavesurfer] = useState(null)
const [record, setRecord] = useState(null)
const [isRecording, setIsRecording] = useState(false)
const [isPlaying, setIsPlaying] = useState(false)
const [deviceId, setDeviceId] = useState(null)
const [ddOptions, setDdOptions] = useState([])
const waveformRef = useRef();
const [wavesurfer, setWavesurfer] = useState(null);
const [record, setRecord] = useState(null);
const [isRecording, setIsRecording] = useState(false);
const [isPlaying, setIsPlaying] = useState(false);
const [deviceId, setDeviceId] = useState(null);
const [ddOptions, setDdOptions] = useState([]);
useEffect(() => {
document.getElementById('play-btn').disabled = true
document.getElementById("play-btn").disabled = true;
navigator.mediaDevices.enumerateDevices().then(devices => {
navigator.mediaDevices.enumerateDevices().then((devices) => {
const audioDevices = devices
.filter(d => d.kind === 'audioinput')
.map(d => ({value: d.deviceId, label: d.label}))
if (audioDevices.length < 1) return console.log("no audio input devices")
.filter((d) => d.kind === "audioinput")
.map((d) => ({ value: d.deviceId, label: d.label }));
setDdOptions(audioDevices)
setDeviceId(audioDevices[0].value)
})
if (audioDevices.length < 1) return console.log("no audio input devices");
if(waveformRef.current) {
setDdOptions(audioDevices);
setDeviceId(audioDevices[0].value);
});
if (waveformRef.current) {
const _wavesurfer = WaveSurfer.create({
container: waveformRef.current,
waveColor: "#333",
progressColor: "#0178FF",
waveColor: "#cc3347",
progressColor: "#0178FFπ",
cursorColor: "OrangeRed",
hideScrollbar: true,
autoCenter: true,
barWidth: 2,
})
const wsWrapper = _wavesurfer.getWrapper()
wsWrapper.style.cursor = 'pointer'
wsWrapper.style.backgroundColor = 'lightgray'
wsWrapper.style.borderRadius = '15px'
});
const wsWrapper = _wavesurfer.getWrapper();
wsWrapper.style.cursor = "pointer";
wsWrapper.style.backgroundColor = "lightgray";
wsWrapper.style.borderRadius = "15px";
_wavesurfer.on('play', () => {
setIsPlaying(true)
})
_wavesurfer.on('pause', () => {
setIsPlaying(false)
})
_wavesurfer.on("play", () => {
setIsPlaying(true);
});
_wavesurfer.on("pause", () => {
setIsPlaying(false);
});
setRecord(_wavesurfer.registerPlugin(CustomRecordPlugin.create()))
setWavesurfer(_wavesurfer)
setRecord(_wavesurfer.registerPlugin(CustomRecordPlugin.create()));
setWavesurfer(_wavesurfer);
return () => {
_wavesurfer.destroy()
setIsRecording(false)
setIsPlaying(false)
}
_wavesurfer.destroy();
setIsRecording(false);
setIsPlaying(false);
};
}
}, [])
}, []);
const handleRecClick = async () => {
if (!record) return console.log("no record")
if (!record) return console.log("no record");
if(record?.isRecording()) {
record.stopRecording()
setIsRecording(false)
document.getElementById('play-btn').disabled = false
if (record?.isRecording()) {
record.stopRecording();
setIsRecording(false);
document.getElementById("play-btn").disabled = false;
} else {
const stream = await navigator.mediaDevices.getUserMedia({ audio: { deviceId } })
await record.startRecording(stream)
props.setStream(stream)
setIsRecording(true)
const stream = await navigator.mediaDevices.getUserMedia({
audio: { deviceId },
});
await record.startRecording(stream);
props.setStream(stream);
setIsRecording(true);
}
}
};
const handlePlayClick = () => {
wavesurfer?.playPause()
}
wavesurfer?.playPause();
};
const handleDropdownChange = (e) => {
setDeviceId(e.value)
}
setDeviceId(e.value);
};
return (
<div className="flex flex-col items-center justify-center max-w-[90vw] w-full">
<div className="flex flex-col items-center justify-center max-w-[75vw] w-full">
<div className="flex my-2 mx-auto">
<Dropdown options={ddOptions} onChange={handleDropdownChange} value={ddOptions[0]} />
<Dropdown
options={ddOptions}
onChange={handleDropdownChange}
value={ddOptions[0]}
/>
&nbsp;
<button onClick={handleRecClick} data-color={isRecording ? "red" : "blue"}>
<button
onClick={handleRecClick}
data-color={isRecording ? "red" : "blue"}
>
{isRecording ? "Stop" : "Record"}
</button>
&nbsp;
<button id="play-btn" onClick={handlePlayClick} data-color={isPlaying ? "orange" : "green"}>
<button
id="play-btn"
onClick={handlePlayClick}
data-color={isPlaying ? "orange" : "green"}
>
{isPlaying ? "Pause" : "Play"}
</button>
</div>
<div ref={waveformRef} className="w-full"></div>
{/* TODO: Download audio <a> tag */}
</div>
)
);
}

View File

@@ -37,7 +37,7 @@ const useWebRTC = (stream, setIsRecording) => {
peer.on("connect", () => {
console.log("WebRTC connected");
setData(prevData => ({ ...prevData, peer: peer }));
setData((prevData) => ({ ...prevData, peer: peer }));
});
peer.on("data", (data) => {

View File

@@ -16,7 +16,15 @@ export default function RootLayout({ children }) {
<Head>
<title>Test</title>
</Head>
<body className={roboto.className}>{children}</body>
<body className={roboto.className + " flex flex-col min-h-screen"}>
<main className="flex-grow">
{children}
</main>
<footer className="w-full bg-gray-800 text-white text-center py-4 fixed inset-x-0 bottom-0">
© 2023 Reflector, a product of Monadical
</footer>
</body>
</html>
);
}

View File

@@ -11,7 +11,6 @@ const App = () => {
// This is where you'd send the stream and receive the data from the server.
// transcription, summary, etc
const serverData = useWebRTC(stream, () => {});
const text = serverData?.text ?? "";
return (
<div className="flex flex-col items-center h-[100svh]">
@@ -22,15 +21,11 @@ const App = () => {
<Recorder setStream={setStream} onStop={() => serverData.peer.send(JSON.stringify({ cmd: 'STOP' }))}/>
<Dashboard
transcriptionText={serverData.text ?? "(No transcription text)"}
transcriptionText={serverData.text ?? "..."}
finalSummary={serverData.finalSummary}
topics={serverData.topics ?? []}
stream={stream}
/>
<footer className="w-full bg-gray-800 text-center py-4 mt-auto text-white">
Reflector © 2023 Monadical
</footer>
</div>
);
};