Last touches for new audiowave integration

This commit is contained in:
Koper
2023-07-21 18:08:42 +07:00
parent 863a04094b
commit f4235147aa
5 changed files with 108 additions and 92 deletions

View File

@@ -1,44 +1,51 @@
// Override the startRecording method so we can pass the desired stream
// Checkout: https://github.com/katspaugh/wavesurfer.js/blob/fa2bcfe/src/plugins/record.ts
import RecordPlugin from "wavesurfer.js/dist/plugins/record"
import RecordPlugin from "wavesurfer.js/dist/plugins/record";
const MIME_TYPES = ['audio/webm', 'audio/wav', 'audio/mpeg', 'audio/mp4', 'audio/mp3']
const findSupportedMimeType = () => MIME_TYPES.find((mimeType) => MediaRecorder.isTypeSupported(mimeType))
const MIME_TYPES = [
"audio/webm",
"audio/wav",
"audio/mpeg",
"audio/mp4",
"audio/mp3",
];
const findSupportedMimeType = () =>
MIME_TYPES.find((mimeType) => MediaRecorder.isTypeSupported(mimeType));
class CustomRecordPlugin extends RecordPlugin {
static create(options) {
return new CustomRecordPlugin(options || {})
return new CustomRecordPlugin(options || {});
}
startRecording(stream) {
this.preventInteraction()
this.cleanUp()
this.preventInteraction();
this.cleanUp();
const onStop = this.render(stream)
const onStop = this.render(stream);
const mediaRecorder = new MediaRecorder(stream, {
mimeType: this.options.mimeType || findSupportedMimeType(),
audioBitsPerSecond: this.options.audioBitsPerSecond,
})
const recordedChunks = []
});
const recordedChunks = [];
mediaRecorder.addEventListener('dataavailable', (event) => {
mediaRecorder.addEventListener("dataavailable", (event) => {
if (event.data.size > 0) {
recordedChunks.push(event.data)
recordedChunks.push(event.data);
}
})
});
mediaRecorder.addEventListener('stop', () => {
onStop()
this.loadBlob(recordedChunks, mediaRecorder.mimeType)
this.emit('stopRecording')
})
mediaRecorder.addEventListener("stop", () => {
onStop();
this.loadBlob(recordedChunks, mediaRecorder.mimeType);
this.emit("stopRecording");
});
mediaRecorder.start()
mediaRecorder.start();
this.emit('startRecording')
this.emit("startRecording");
this.mediaRecorder = mediaRecorder
this.mediaRecorder = mediaRecorder;
}
}
export default CustomRecordPlugin;
export default CustomRecordPlugin;

View File

@@ -34,14 +34,14 @@ export function Dashboard({
<div className="w-1/4">
{item.title}{" "}
<span
className={`inline-block transform transition-transform duration-200 ${openIndex === index ? "rotate-90" : ""
}`}
className={`inline-block transform transition-transform duration-200 ${
openIndex === index ? "rotate-90" : ""
}`}
>
{">"}
</span>
</div>
<div className="w-1/4 flex flex-row space-x-0.5">
</div>
<div className="w-1/4 flex flex-row space-x-0.5"></div>
</div>
{openIndex === index && (
<div className="mt-2 p-2 bg-white">{item.transcript}</div>
@@ -53,8 +53,7 @@ export function Dashboard({
<div className="flex justify-between">
<div className="w-1/4">Live</div>
<div className="w-1/4">Transcript</div>
<div className="w-1/4 flex flex-row space-x-0.5">
</div>
<div className="w-1/4 flex flex-row space-x-0.5"></div>
</div>
<div className="mt-2 p-2 bg-white temp-transcription">
{transcriptionText}
@@ -63,4 +62,4 @@ export function Dashboard({
</div>
</>
);
}
}

View File

@@ -2,106 +2,117 @@ import React, { useRef, useEffect, useState } from "react";
import WaveSurfer from "wavesurfer.js";
import Dropdown from 'react-dropdown'
import 'react-dropdown/style.css'
import CustomRecordPlugin from './CustomRecordPlugin'
import Dropdown from "react-dropdown";
import "react-dropdown/style.css";
import CustomRecordPlugin from "./CustomRecordPlugin";
export default function Recorder(props) {
const waveformRef = useRef()
const [wavesurfer, setWavesurfer] = useState(null)
const [record, setRecord] = useState(null)
const [isRecording, setIsRecording] = useState(false)
const [isPlaying, setIsPlaying] = useState(false)
const [deviceId, setDeviceId] = useState(null)
const [ddOptions, setDdOptions] = useState([])
const waveformRef = useRef();
const [wavesurfer, setWavesurfer] = useState(null);
const [record, setRecord] = useState(null);
const [isRecording, setIsRecording] = useState(false);
const [isPlaying, setIsPlaying] = useState(false);
const [deviceId, setDeviceId] = useState(null);
const [ddOptions, setDdOptions] = useState([]);
useEffect(() => {
document.getElementById('play-btn').disabled = true
document.getElementById("play-btn").disabled = true;
navigator.mediaDevices.enumerateDevices().then(devices => {
navigator.mediaDevices.enumerateDevices().then((devices) => {
const audioDevices = devices
.filter(d => d.kind === 'audioinput')
.map(d => ({value: d.deviceId, label: d.label}))
if (audioDevices.length < 1) return console.log("no audio input devices")
.filter((d) => d.kind === "audioinput")
.map((d) => ({ value: d.deviceId, label: d.label }));
setDdOptions(audioDevices)
setDeviceId(audioDevices[0].value)
})
if (audioDevices.length < 1) return console.log("no audio input devices");
if(waveformRef.current) {
setDdOptions(audioDevices);
setDeviceId(audioDevices[0].value);
});
if (waveformRef.current) {
const _wavesurfer = WaveSurfer.create({
container: waveformRef.current,
waveColor: "#333",
progressColor: "#0178FF",
waveColor: "#cc3347",
progressColor: "#0178FFπ",
cursorColor: "OrangeRed",
hideScrollbar: true,
autoCenter: true,
barWidth: 2,
})
const wsWrapper = _wavesurfer.getWrapper()
wsWrapper.style.cursor = 'pointer'
wsWrapper.style.backgroundColor = 'lightgray'
wsWrapper.style.borderRadius = '15px'
});
const wsWrapper = _wavesurfer.getWrapper();
wsWrapper.style.cursor = "pointer";
wsWrapper.style.backgroundColor = "lightgray";
wsWrapper.style.borderRadius = "15px";
_wavesurfer.on('play', () => {
setIsPlaying(true)
})
_wavesurfer.on('pause', () => {
setIsPlaying(false)
})
_wavesurfer.on("play", () => {
setIsPlaying(true);
});
_wavesurfer.on("pause", () => {
setIsPlaying(false);
});
setRecord(_wavesurfer.registerPlugin(CustomRecordPlugin.create()))
setWavesurfer(_wavesurfer)
setRecord(_wavesurfer.registerPlugin(CustomRecordPlugin.create()));
setWavesurfer(_wavesurfer);
return () => {
_wavesurfer.destroy()
setIsRecording(false)
setIsPlaying(false)
}
_wavesurfer.destroy();
setIsRecording(false);
setIsPlaying(false);
};
}
}, [])
}, []);
const handleRecClick = async () => {
if (!record) return console.log("no record")
if (!record) return console.log("no record");
if(record?.isRecording()) {
record.stopRecording()
setIsRecording(false)
document.getElementById('play-btn').disabled = false
if (record?.isRecording()) {
record.stopRecording();
setIsRecording(false);
document.getElementById("play-btn").disabled = false;
} else {
const stream = await navigator.mediaDevices.getUserMedia({ audio: { deviceId } })
await record.startRecording(stream)
props.setStream(stream)
setIsRecording(true)
const stream = await navigator.mediaDevices.getUserMedia({
audio: { deviceId },
});
await record.startRecording(stream);
props.setStream(stream);
setIsRecording(true);
}
}
};
const handlePlayClick = () => {
wavesurfer?.playPause()
}
wavesurfer?.playPause();
};
const handleDropdownChange = (e) => {
setDeviceId(e.value)
}
setDeviceId(e.value);
};
return (
<div className="flex flex-col items-center justify-center max-w-[75vw] w-full">
<div className="flex my-2 mx-auto">
<Dropdown options={ddOptions} onChange={handleDropdownChange} value={ddOptions[0]} />
<Dropdown
options={ddOptions}
onChange={handleDropdownChange}
value={ddOptions[0]}
/>
&nbsp;
<button onClick={handleRecClick} data-color={isRecording ? "red" : "blue"}>
<button
onClick={handleRecClick}
data-color={isRecording ? "red" : "blue"}
>
{isRecording ? "Stop" : "Record"}
</button>
&nbsp;
<button id="play-btn" onClick={handlePlayClick} data-color={isPlaying ? "orange" : "green"}>
<button
id="play-btn"
onClick={handlePlayClick}
data-color={isPlaying ? "orange" : "green"}
>
{isPlaying ? "Pause" : "Play"}
</button>
</div>
<div ref={waveformRef} className="w-full"></div>
{/* TODO: Download audio <a> tag */}
</div>
)
);
}

View File

@@ -37,7 +37,7 @@ const useWebRTC = (stream, setIsRecording) => {
peer.on("connect", () => {
console.log("WebRTC connected");
setData(prevData => ({ ...prevData, peer: peer }));
setData((prevData) => ({ ...prevData, peer: peer }));
});
peer.on("data", (data) => {

View File

@@ -18,11 +18,10 @@ const App = () => {
.then(setStream)
.catch((err) => console.error(err));
} else if (!recording && serverData.peer) {
serverData.peer.send(JSON.stringify({ cmd: 'STOP' }));
serverData.peer.send(JSON.stringify({ cmd: "STOP" }));
}
};
const serverData = useWebRTC(stream, setIsRecording);
return (
@@ -45,4 +44,4 @@ const App = () => {
);
};
export default App;
export default App;