mirror of
https://github.com/Monadical-SAS/reflector.git
synced 2025-12-21 12:49:06 +00:00
use wavesurfer, extend class, improve front
This commit is contained in:
44
app/components/CustomRecordPlugin.js
Normal file
44
app/components/CustomRecordPlugin.js
Normal file
@@ -0,0 +1,44 @@
|
||||
// Override the startRecording method so we can pass the desired stream
|
||||
// Checkout: https://github.com/katspaugh/wavesurfer.js/blob/fa2bcfe/src/plugins/record.ts
|
||||
|
||||
import RecordPlugin from "wavesurfer.js/dist/plugins/record"
|
||||
|
||||
const MIME_TYPES = ['audio/webm', 'audio/wav', 'audio/mpeg', 'audio/mp4', 'audio/mp3']
|
||||
const findSupportedMimeType = () => MIME_TYPES.find((mimeType) => MediaRecorder.isTypeSupported(mimeType))
|
||||
|
||||
class CustomRecordPlugin extends RecordPlugin {
|
||||
static create(options) {
|
||||
return new CustomRecordPlugin(options || {})
|
||||
}
|
||||
startRecording(stream) {
|
||||
this.preventInteraction()
|
||||
this.cleanUp()
|
||||
|
||||
const onStop = this.render(stream)
|
||||
const mediaRecorder = new MediaRecorder(stream, {
|
||||
mimeType: this.options.mimeType || findSupportedMimeType(),
|
||||
audioBitsPerSecond: this.options.audioBitsPerSecond,
|
||||
})
|
||||
const recordedChunks = []
|
||||
|
||||
mediaRecorder.addEventListener('dataavailable', (event) => {
|
||||
if (event.data.size > 0) {
|
||||
recordedChunks.push(event.data)
|
||||
}
|
||||
})
|
||||
|
||||
mediaRecorder.addEventListener('stop', () => {
|
||||
onStop()
|
||||
this.loadBlob(recordedChunks, mediaRecorder.mimeType)
|
||||
this.emit('stopRecording')
|
||||
})
|
||||
|
||||
mediaRecorder.start()
|
||||
|
||||
this.emit('startRecording')
|
||||
|
||||
this.mediaRecorder = mediaRecorder
|
||||
}
|
||||
}
|
||||
|
||||
export default CustomRecordPlugin;
|
||||
@@ -1,63 +0,0 @@
|
||||
import React, { useRef, useEffect } from "react";
|
||||
|
||||
function AudioVisualizer(props) {
|
||||
const canvasRef = useRef(null);
|
||||
|
||||
useEffect(() => {
|
||||
let animationFrameId;
|
||||
|
||||
const canvas = canvasRef.current;
|
||||
const context = canvas.getContext("2d");
|
||||
const analyser = new AnalyserNode(new AudioContext());
|
||||
|
||||
navigator.mediaDevices.getUserMedia({ audio: true }).then((stream) => {
|
||||
const audioContext = new (window.AudioContext ||
|
||||
window.webkitAudioContext)();
|
||||
const source = audioContext.createMediaStreamSource(stream);
|
||||
const analyser = audioContext.createAnalyser();
|
||||
analyser.fftSize = 2048;
|
||||
source.connect(analyser);
|
||||
|
||||
const bufferLength = analyser.frequencyBinCount;
|
||||
const dataArray = new Uint8Array(bufferLength);
|
||||
const barWidth = (canvas.width / bufferLength) * 2.5;
|
||||
let barHeight;
|
||||
let x = 0;
|
||||
|
||||
function renderFrame() {
|
||||
x = 0;
|
||||
analyser.getByteFrequencyData(dataArray);
|
||||
context.fillStyle = "#000";
|
||||
context.fillRect(0, 0, canvas.width, canvas.height);
|
||||
|
||||
for (let i = 0; i < bufferLength; i++) {
|
||||
barHeight = dataArray[i];
|
||||
|
||||
const red = 255;
|
||||
const green = 250 * (i / bufferLength);
|
||||
const blue = barHeight + 25 * (i / bufferLength);
|
||||
|
||||
context.fillStyle = `rgb(${red},${green},${blue})`;
|
||||
context.fillRect(
|
||||
x,
|
||||
canvas.height - barHeight / 2,
|
||||
barWidth,
|
||||
barHeight / 2,
|
||||
);
|
||||
|
||||
x += barWidth + 1;
|
||||
}
|
||||
animationFrameId = requestAnimationFrame(renderFrame);
|
||||
}
|
||||
renderFrame();
|
||||
});
|
||||
|
||||
return () => cancelAnimationFrame(animationFrameId);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<canvas className="w-full h-16" ref={canvasRef} />
|
||||
);
|
||||
}
|
||||
|
||||
export default AudioVisualizer;
|
||||
@@ -1,36 +1,106 @@
|
||||
import AudioVisualizer from "./audioVisualizer.js";
|
||||
import React, { useRef, useEffect, useState } from "react";
|
||||
|
||||
import WaveSurfer from "wavesurfer.js";
|
||||
|
||||
import Dropdown from 'react-dropdown'
|
||||
import 'react-dropdown/style.css'
|
||||
|
||||
import CustomRecordPlugin from './CustomRecordPlugin'
|
||||
|
||||
|
||||
export default function Recorder(props) {
|
||||
let mediaRecorder = null; // mediaRecorder instance
|
||||
const waveformRef = useRef()
|
||||
const [wavesurfer, setWavesurfer] = useState(null)
|
||||
const [record, setRecord] = useState(null)
|
||||
const [isRecording, setIsRecording] = useState(false)
|
||||
const [isPlaying, setIsPlaying] = useState(false)
|
||||
const [deviceId, setDeviceId] = useState(null)
|
||||
const [ddOptions, setDdOptions] = useState([])
|
||||
|
||||
const startRecording = () => {
|
||||
navigator.mediaDevices.getUserMedia({ audio: true }).then((stream) => {
|
||||
mediaRecorder = new MediaRecorder(stream);
|
||||
mediaRecorder.start();
|
||||
props.onRecord(true);
|
||||
});
|
||||
};
|
||||
useEffect(() => {
|
||||
document.getElementById('play-btn').disabled = true
|
||||
|
||||
const stopRecording = () => {
|
||||
if (mediaRecorder) {
|
||||
mediaRecorder.stop();
|
||||
props.onRecord(false);
|
||||
navigator.mediaDevices.enumerateDevices().then(devices => {
|
||||
const audioDevices = devices
|
||||
.filter(d => d.kind === 'audioinput')
|
||||
.map(d => ({value: d.deviceId, label: d.label}))
|
||||
|
||||
if (audioDevices.length < 1) return console.log("no audio input devices")
|
||||
|
||||
setDdOptions(audioDevices)
|
||||
setDeviceId(audioDevices[0].value)
|
||||
})
|
||||
|
||||
if(waveformRef.current) {
|
||||
const _wavesurfer = WaveSurfer.create({
|
||||
container: waveformRef.current,
|
||||
waveColor: "#333",
|
||||
progressColor: "#0178FF",
|
||||
cursorColor: "OrangeRed",
|
||||
hideScrollbar: true,
|
||||
autoCenter: true,
|
||||
barWidth: 2,
|
||||
})
|
||||
const wsWrapper = _wavesurfer.getWrapper()
|
||||
wsWrapper.style.cursor = 'pointer'
|
||||
wsWrapper.style.backgroundColor = 'lightgray'
|
||||
wsWrapper.style.borderRadius = '15px'
|
||||
|
||||
_wavesurfer.on('play', () => {
|
||||
setIsPlaying(true)
|
||||
})
|
||||
_wavesurfer.on('pause', () => {
|
||||
setIsPlaying(false)
|
||||
})
|
||||
|
||||
setRecord(_wavesurfer.registerPlugin(CustomRecordPlugin.create()))
|
||||
setWavesurfer(_wavesurfer)
|
||||
return () => {
|
||||
_wavesurfer.destroy()
|
||||
setIsRecording(false)
|
||||
setIsPlaying(false)
|
||||
}
|
||||
}
|
||||
};
|
||||
}, [])
|
||||
|
||||
const handleRecClick = async () => {
|
||||
if (!record) return console.log("no record")
|
||||
|
||||
if(record?.isRecording()) {
|
||||
record.stopRecording()
|
||||
setIsRecording(false)
|
||||
document.getElementById('play-btn').disabled = false
|
||||
} else {
|
||||
const stream = await navigator.mediaDevices.getUserMedia({ audio: { deviceId } })
|
||||
await record.startRecording(stream)
|
||||
props.setStream(stream)
|
||||
setIsRecording(true)
|
||||
}
|
||||
}
|
||||
|
||||
const handlePlayClick = () => {
|
||||
wavesurfer?.playPause()
|
||||
}
|
||||
|
||||
const handleDropdownChange = (e) => {
|
||||
setDeviceId(e.value)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col items-center justify-center">
|
||||
{props.isRecording && <AudioVisualizer />}
|
||||
|
||||
{props.isRecording ? (
|
||||
<button onClick={stopRecording} data-color="red">
|
||||
Stop
|
||||
</button>
|
||||
) : (
|
||||
<button onClick={startRecording} data-color="blue">
|
||||
Record
|
||||
</button>
|
||||
)}
|
||||
<div className="flex flex-col items-center justify-center max-w-[90vw] w-full">
|
||||
<div className="flex my-2 mx-auto">
|
||||
<Dropdown options={ddOptions} onChange={handleDropdownChange} value={ddOptions[0]} />
|
||||
|
||||
<button onClick={handleRecClick} data-color={isRecording ? "red" : "blue"}>
|
||||
{isRecording ? "Stop" : "Record"}
|
||||
</button>
|
||||
|
||||
<button id="play-btn" onClick={handlePlayClick} data-color={isPlaying ? "orange" : "green"}>
|
||||
{isPlaying ? "Pause" : "Play"}
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
<div ref={waveformRef} className="w-full"></div>
|
||||
{/* TODO: Download audio <a> tag */}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user