switch to TS

This commit is contained in:
Jose B
2023-08-22 16:05:34 -05:00
parent 86917e9b7c
commit 7b7d87c06d
4 changed files with 135 additions and 56 deletions

View File

@@ -1,7 +1,21 @@
// Override the startRecording method so we can pass the desired stream
// Checkout: https://github.com/katspaugh/wavesurfer.js/blob/fa2bcfe/src/plugins/record.ts
// Source code: https://github.com/katspaugh/wavesurfer.js/blob/fa2bcfe/src/plugins/record.ts
/**
* Record audio from the microphone, render a waveform and download the audio.
*/
import RecordPlugin from "wavesurfer.js/dist/plugins/record";
import BasePlugin, {
type BasePluginEvents,
} from "wavesurfer.js/dist/base-plugin";
export type RecordPluginOptions = {
mimeType?: MediaRecorderOptions["mimeType"];
audioBitsPerSecond?: MediaRecorderOptions["audioBitsPerSecond"];
};
export type RecordPluginEvents = BasePluginEvents & {
startRecording: [];
stopRecording: [];
};
const MIME_TYPES = [
"audio/webm",
@@ -13,11 +27,44 @@ const MIME_TYPES = [
const findSupportedMimeType = () =>
MIME_TYPES.find((mimeType) => MediaRecorder.isTypeSupported(mimeType));
class CustomRecordPlugin extends RecordPlugin {
static create(options) {
return new CustomRecordPlugin(options || {});
class RecordPlugin extends BasePlugin<RecordPluginEvents, RecordPluginOptions> {
private mediaRecorder: MediaRecorder | null = null;
private recordedUrl = "";
private savedCursorWidth = 1;
private savedInteractive = true;
public static create(options?: RecordPluginOptions) {
return new RecordPlugin(options || {});
}
render(stream) {
private preventInteraction() {
if (this.wavesurfer) {
this.savedCursorWidth = this.wavesurfer.options.cursorWidth || 1;
this.savedInteractive = this.wavesurfer.options.interact || true;
this.wavesurfer.options.cursorWidth = 0;
this.wavesurfer.options.interact = false;
}
}
private restoreInteraction() {
if (this.wavesurfer) {
this.wavesurfer.options.cursorWidth = this.savedCursorWidth;
this.wavesurfer.options.interact = this.savedInteractive;
}
}
onInit() {
this.preventInteraction();
}
private loadBlob(data: Blob[], type: string) {
const blob = new Blob(data, { type });
this.recordedUrl = URL.createObjectURL(blob);
this.restoreInteraction();
this.wavesurfer?.load(this.recordedUrl);
}
render(stream: MediaStream): () => void {
if (!this.wavesurfer) return () => undefined;
const container = this.wavesurfer.getWrapper();
@@ -36,11 +83,12 @@ class CustomRecordPlugin extends RecordPlugin {
const bufferLength = analyser.frequencyBinCount;
const dataArray = new Uint8Array(bufferLength);
let animationId, previousTimeStamp;
let animationId: number, previousTimeStamp: number;
const DATA_SIZE = 128.0;
const BUFFER_SIZE = 2 ** 8;
const dataBuffer = new Array(BUFFER_SIZE).fill(canvas.height);
const dataBuffer = new Array(BUFFER_SIZE).fill(DATA_SIZE);
const drawWaveform = (timeStamp) => {
const drawWaveform = (timeStamp: number) => {
if (!canvasCtx) return;
analyser.getByteTimeDomainData(dataArray);
@@ -64,9 +112,9 @@ class CustomRecordPlugin extends RecordPlugin {
let x = 0;
for (let i = 0; i < dataBuffer.length; i++) {
const valueNormalized = dataBuffer[i] / canvas.height;
const y = (valueNormalized * canvas.height) / 2;
const sliceHeight = canvas.height + 1 - y * 2;
const y = (canvas.height * dataBuffer[i]) / (2 * DATA_SIZE);
const sliceHeight =
((1 - canvas.height) * dataBuffer[i]) / DATA_SIZE + canvas.height;
canvasCtx.fillRect(x, y, (sliceWidth * 2) / 3, sliceHeight);
x += sliceWidth;
@@ -75,7 +123,7 @@ class CustomRecordPlugin extends RecordPlugin {
animationId = requestAnimationFrame(drawWaveform);
};
drawWaveform();
drawWaveform(0);
return () => {
if (animationId) {
@@ -94,7 +142,17 @@ class CustomRecordPlugin extends RecordPlugin {
canvas?.remove();
};
}
startRecording(stream) {
private cleanUp() {
this.stopRecording();
this.wavesurfer?.empty();
if (this.recordedUrl) {
URL.revokeObjectURL(this.recordedUrl);
this.recordedUrl = "";
}
}
public async startRecording(stream: MediaStream) {
this.preventInteraction();
this.cleanUp();
@@ -103,7 +161,7 @@ class CustomRecordPlugin extends RecordPlugin {
mimeType: this.options.mimeType || findSupportedMimeType(),
audioBitsPerSecond: this.options.audioBitsPerSecond,
});
const recordedChunks = [];
const recordedChunks: Blob[] = [];
mediaRecorder.addEventListener("dataavailable", (event) => {
if (event.data.size > 0) {
@@ -123,6 +181,25 @@ class CustomRecordPlugin extends RecordPlugin {
this.mediaRecorder = mediaRecorder;
}
public isRecording(): boolean {
return this.mediaRecorder?.state === "recording";
}
public stopRecording() {
if (this.isRecording()) {
this.mediaRecorder?.stop();
}
}
public getRecordedUrl(): string {
return this.recordedUrl;
}
public destroy() {
super.destroy();
this.cleanUp();
}
}
export default CustomRecordPlugin;
export default RecordPlugin;

View File

@@ -36,12 +36,7 @@ const App = () => {
} = useAudioDevice();
return (
<div className="flex flex-col items-center h-[100svh] bg-gradient-to-r from-[#8ec5fc30] to-[#e0c3fc42]">
<div className="h-[13svh] flex flex-col justify-center items-center">
<h1 className="text-5xl font-bold text-blue-500">Reflector</h1>
<p className="text-gray-500">Capture The Signal, Not The Noise</p>
</div>
<div className="w-full flex flex-col items-center h-[100svh]">
{permissionOk ? (
<>
<Recorder

View File

@@ -1,18 +1,22 @@
import React, { useRef, useEffect, useState } from "react";
import WaveSurfer from "wavesurfer.js";
import RecordPlugin from "../lib/custom-plugins/record";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { faDownload } from "@fortawesome/free-solid-svg-icons";
import Dropdown from "react-dropdown";
import Dropdown, { Option } from "react-dropdown";
import "react-dropdown/style.css";
import CustomRecordPlugin from "../lib/CustomRecordPlugin";
import { formatTime } from "../lib/time";
const AudioInputsDropdown = (props) => {
const [ddOptions, setDdOptions] = useState([]);
const AudioInputsDropdown = (props: {
audioDevices: Option[];
setDeviceId: React.Dispatch<React.SetStateAction<string | null>>;
disabled: boolean;
}) => {
const [ddOptions, setDdOptions] = useState<Option[]>([]);
useEffect(() => {
setDdOptions(props.audioDevices);
@@ -21,8 +25,8 @@ const AudioInputsDropdown = (props) => {
);
}, [props.audioDevices]);
const handleDropdownChange = (e) => {
props.setDeviceId(e.value);
const handleDropdownChange = (option: Option) => {
props.setDeviceId(option.value);
};
return (
@@ -36,18 +40,19 @@ const AudioInputsDropdown = (props) => {
};
export default function Recorder(props) {
const waveformRef = useRef();
const [wavesurfer, setWavesurfer] = useState(null);
const [record, setRecord] = useState(null);
const [isRecording, setIsRecording] = useState(false);
const [isPlaying, setIsPlaying] = useState(false);
const [deviceId, setDeviceId] = useState(null);
const [currentTime, setCurrentTime] = useState(0);
const [timeInterval, setTimeInterval] = useState(null);
const [duration, setDuration] = useState(0);
const waveformRef = useRef<HTMLDivElement>(null);
const [wavesurfer, setWavesurfer] = useState<WaveSurfer | null>(null);
const [record, setRecord] = useState<RecordPlugin | null>(null);
const [isRecording, setIsRecording] = useState<boolean>(false);
const [isPlaying, setIsPlaying] = useState<boolean>(false);
const [deviceId, setDeviceId] = useState<string | null>(null);
const [currentTime, setCurrentTime] = useState<number>(0);
const [timeInterval, setTimeInterval] = useState<number | null>(null);
const [duration, setDuration] = useState<number>(0);
useEffect(() => {
document.getElementById("play-btn").disabled = true;
const playBtn = document.getElementById("play-btn");
if (playBtn) playBtn.setAttribute("disabled", "true");
if (waveformRef.current) {
const _wavesurfer = WaveSurfer.create({
@@ -72,7 +77,7 @@ export default function Recorder(props) {
});
_wavesurfer.on("timeupdate", setCurrentTime);
setRecord(_wavesurfer.registerPlugin(CustomRecordPlugin.create()));
setRecord(_wavesurfer.registerPlugin(RecordPlugin.create()));
setWavesurfer(_wavesurfer);
return () => {
_wavesurfer.destroy();
@@ -86,8 +91,10 @@ export default function Recorder(props) {
if (record) {
return record.on("stopRecording", () => {
const link = document.getElementById("download-recording");
link.href = record.getRecordedUrl();
link.download = "reflector-recording.webm";
if (!link) return;
link.setAttribute("href", record.getRecordedUrl());
link.setAttribute("download", "reflector-recording.webm");
link.style.visibility = "visible";
});
}
@@ -95,13 +102,13 @@ export default function Recorder(props) {
useEffect(() => {
if (isRecording) {
const interval = setInterval(() => {
const interval = window.setInterval(() => {
setCurrentTime((prev) => prev + 1);
}, 1000);
setTimeInterval(interval);
return () => clearInterval(interval);
} else {
clearInterval(timeInterval);
clearInterval(timeInterval as number);
setCurrentTime((prev) => {
setDuration(prev);
return 0;
@@ -116,7 +123,8 @@ export default function Recorder(props) {
props.onStop();
record.stopRecording();
setIsRecording(false);
document.getElementById("play-btn").disabled = false;
const playBtn = document.getElementById("play-btn");
if (playBtn) playBtn.removeAttribute("disabled");
} else {
const stream = await props.getAudioStream(deviceId);
props.setStream(stream);
@@ -133,9 +141,8 @@ export default function Recorder(props) {
const timeLabel = () => {
if (isRecording) return formatTime(currentTime);
else if (duration)
return `${formatTime(currentTime)}/${formatTime(duration)}`;
else "";
if (duration) return `${formatTime(currentTime)}/${formatTime(duration)}`;
return "";
};
return (

View File

@@ -1,8 +1,10 @@
import { useEffect, useState } from "react";
import { useState } from "react";
import { Option } from "react-dropdown";
const useAudioDevice = () => {
const [permissionOk, setPermissionOk] = useState(false);
const [audioDevices, setAudioDevices] = useState([]);
const [audioDevices, setAudioDevices] = useState<Option[]>([]);
const [loading, setLoading] = useState(true);
const requestPermission = () => {
@@ -22,7 +24,9 @@ const useAudioDevice = () => {
});
};
const getAudioStream = async (deviceId) => {
const getAudioStream = async (
deviceId: string,
): Promise<MediaStream | null> => {
try {
const stream = await navigator.mediaDevices.getUserMedia({
audio: {
@@ -39,7 +43,7 @@ const useAudioDevice = () => {
}
};
const updateDevices = async () => {
const updateDevices = async (): Promise<void> => {
const devices = await navigator.mediaDevices.enumerateDevices();
const _audioDevices = devices
.filter(
@@ -51,10 +55,6 @@ const useAudioDevice = () => {
setAudioDevices(_audioDevices);
};
useEffect(() => {
requestPermission();
}, []);
return {
permissionOk,
audioDevices,