ADditional features

This commit is contained in:
Koper
2023-07-18 15:45:01 +07:00
parent dc0750f178
commit 21aec04008
3 changed files with 35 additions and 11 deletions

View File

@@ -1,6 +1,6 @@
import React, { useRef, useEffect } from 'react';
function AudioVisualizer() {
function AudioVisualizer(props) {
const canvasRef = useRef(null);
useEffect(() => {
@@ -29,6 +29,8 @@ function AudioVisualizer() {
analyser.getByteFrequencyData(dataArray);
context.fillStyle = '#000';
context.fillRect(0, 0, canvas.width, canvas.height);
for (let i = 0; i < bufferLength; i++) {
barHeight = dataArray[i];
@@ -49,7 +51,10 @@ function AudioVisualizer() {
return () => cancelAnimationFrame(animationFrameId);
}, []);
return <canvas className='w-full h-16' ref={canvasRef} />;
return <>
<p>Is recording: {props.isRecording ? "true" : "false"}</p>
<canvas className='w-full h-16' ref={canvasRef} />
</>;
}
export default AudioVisualizer;

View File

@@ -2,7 +2,7 @@ import { Mulberry32 } from '../utils.js'
import React, { useState, useEffect } from 'react';
import AudioVisualizer from './audioVisualizer.js';
export function Dashboard()
export function Dashboard(props)
{
const [openIndex, setOpenIndex] = useState(null);
const [liveTranscript, setLiveTranscript] = useState("");
@@ -107,7 +107,15 @@ export function Dashboard()
{liveTranscript}
</div>
</div>
<AudioVisualizer />
<AudioVisualizer isRecording={props.isRecording} />
<button
onClick={() => props.onRecord(!props.isRecording)}
className={`px-4 py-2 mb-4 text-2xl font-bold rounded ${props.isRecording ? 'bg-red-500' : 'bg-blue-500'}`}
>
{props.isRecording ? 'STOP' : 'RESUME'}
</button>
</div>
</>
);

View File

@@ -11,24 +11,35 @@ const App = () => {
const handleRecord = (recording) => {
console.log("handleRecord", recording);
setIsRecording(recording);
setSplashScreen(false);
if (recording)
{
navigator.mediaDevices.getUserMedia({ audio: true })
.then(setStream)
.catch(err => console.error(err));
} else if (!recording) {
if (stream) {
const tracks = stream.getTracks();
tracks.forEach(track => track.stop());
setStream(null);
}
setIsRecording(false);
}
};
const [stream, setStream] = useState(null);
const serverData = useWebRTC(stream);
console.log(serverData);
useEffect(() => {
navigator.mediaDevices.getUserMedia({ audio: true })
.then(setStream)
.catch(err => console.error(err));
}, []);
return (
<div className="flex flex-col items-center justify-center min-h-screen bg-gray-100">
{splashScreen && <Record isRecording={isRecording} onRecord={(recording) => handleRecord(recording)} /> }
{!splashScreen && <Dashboard />}
{!splashScreen && <Dashboard isRecording={isRecording} onRecord={(recording) => handleRecord(recording)} />}
</div>
);