diff --git a/README.md b/README.md index ed16ca96..58b561c4 100644 --- a/README.md +++ b/README.md @@ -38,7 +38,7 @@ To connect the application with your server, you need to implement the following 1. **Signal Data Sending**: In the `useWebRTC` hook, when a `'signal'` event is emitted, the hook logs the signal data to the console. You should replace this logging with sending the data to the server: ```jsx -peer.on('signal', data => { +peer.on("signal", (data) => { // This is where you send the signal data to the server. }); ``` @@ -46,7 +46,7 @@ peer.on('signal', data => { 2. **Data Receiving**: The `useWebRTC` hook listens for `'data'` event and when it is emitted, it sets the received data to the `data` state: ```jsx -peer.on('data', data => { +peer.on("data", (data) => { // Received data from the server. const serverData = JSON.parse(data.toString()); setData(serverData); @@ -60,7 +60,7 @@ The received data is expected to be a JSON object containing the live transcript "transcription": "live transcription...", "topics": [ { "title": "topic 1", "description": "description 1" }, - { "title": "topic 2", "description": "description 2" }, + { "title": "topic 2", "description": "description 2" } // ... ] } diff --git a/TIMELINE.md b/TIMELINE.md index 95fd6d66..82007396 100644 --- a/TIMELINE.md +++ b/TIMELINE.md @@ -2,11 +2,11 @@ Here's a structured timeline for our project completion: -| Day | Objective | -| --------- | -------------------------------------------------------- | -| Tuesday | Front-end and Back-end integration | -| Wednesday | Project will be polished and tested by Adam | -| Thursday | Project completion. Additional tests will be performed | -| Friday | Big demo presentation | +| Day | Objective | +| --------- | ------------------------------------------------------ | +| Tuesday | Front-end and Back-end integration | +| Wednesday | Project will be polished and tested by Adam | +| Thursday | Project completion. Additional tests will be performed | +| Friday | Big demo presentation | Let's stay focused and get our tasks done on time for a successful demo on Friday. Let's have a successful week! diff --git a/app/components/audioVisualizer.js b/app/components/audioVisualizer.js index eaf19e00..12954d63 100644 --- a/app/components/audioVisualizer.js +++ b/app/components/audioVisualizer.js @@ -1,60 +1,66 @@ -import React, { useRef, useEffect } from 'react'; +import React, { useRef, useEffect } from "react"; function AudioVisualizer(props) { - const canvasRef = useRef(null); + const canvasRef = useRef(null); - useEffect(() => { - let animationFrameId; + useEffect(() => { + let animationFrameId; - const canvas = canvasRef.current; - const context = canvas.getContext('2d'); - const analyser = new AnalyserNode(new AudioContext()); + const canvas = canvasRef.current; + const context = canvas.getContext("2d"); + const analyser = new AnalyserNode(new AudioContext()); - navigator.mediaDevices.getUserMedia({ audio: true }) - .then(stream => { - const audioContext = new (window.AudioContext || window.webkitAudioContext)(); - const source = audioContext.createMediaStreamSource(stream); - const analyser = audioContext.createAnalyser(); - analyser.fftSize = 2048; - source.connect(analyser); + navigator.mediaDevices.getUserMedia({ audio: true }).then((stream) => { + const audioContext = new (window.AudioContext || + window.webkitAudioContext)(); + const source = audioContext.createMediaStreamSource(stream); + const analyser = audioContext.createAnalyser(); + analyser.fftSize = 2048; + source.connect(analyser); - const bufferLength = analyser.frequencyBinCount; - const dataArray = new Uint8Array(bufferLength); - const barWidth = (canvas.width / bufferLength) * 2.5; - let barHeight; - let x = 0; + const bufferLength = analyser.frequencyBinCount; + const dataArray = new Uint8Array(bufferLength); + const barWidth = (canvas.width / bufferLength) * 2.5; + let barHeight; + let x = 0; - function renderFrame() { - x = 0; - analyser.getByteFrequencyData(dataArray); - context.fillStyle = '#000'; - context.fillRect(0, 0, canvas.width, canvas.height); + function renderFrame() { + x = 0; + analyser.getByteFrequencyData(dataArray); + context.fillStyle = "#000"; + context.fillRect(0, 0, canvas.width, canvas.height); + for (let i = 0; i < bufferLength; i++) { + barHeight = dataArray[i]; - for (let i = 0; i < bufferLength; i++) { - barHeight = dataArray[i]; + const red = 255; + const green = 250 * (i / bufferLength); + const blue = barHeight + 25 * (i / bufferLength); - const red = 255; - const green = 250 * (i / bufferLength); - const blue = barHeight + (25 * (i / bufferLength)); + context.fillStyle = `rgb(${red},${green},${blue})`; + context.fillRect( + x, + canvas.height - barHeight / 2, + barWidth, + barHeight / 2, + ); - context.fillStyle = `rgb(${red},${green},${blue})`; - context.fillRect(x, canvas.height - barHeight / 2, barWidth, barHeight / 2); + x += barWidth + 1; + } + animationFrameId = requestAnimationFrame(renderFrame); + } + renderFrame(); + }); - x += barWidth + 1; - } - animationFrameId = requestAnimationFrame(renderFrame); - } - renderFrame(); - }); + return () => cancelAnimationFrame(animationFrameId); + }, []); - return () => cancelAnimationFrame(animationFrameId); - }, []); - - return <> -
Is recording: {props.isRecording ? "true" : "false"}
- - >; + return ( + <> +Is recording: {props.isRecording ? "true" : "false"}
+ + > + ); } export default AudioVisualizer; diff --git a/app/components/dashboard.js b/app/components/dashboard.js index cbe74378..ed92fb9a 100644 --- a/app/components/dashboard.js +++ b/app/components/dashboard.js @@ -1,122 +1,166 @@ -import { Mulberry32 } from '../utils.js' -import React, { useState, useEffect } from 'react'; -import AudioVisualizer from './audioVisualizer.js'; +import { Mulberry32 } from "../utils.js"; +import React, { useState, useEffect } from "react"; +import AudioVisualizer from "./audioVisualizer.js"; -export function Dashboard(props) -{ - const [openIndex, setOpenIndex] = useState(null); - const [liveTranscript, setLiveTranscript] = useState(""); +export function Dashboard(props) { + const [openIndex, setOpenIndex] = useState(null); + const [liveTranscript, setLiveTranscript] = useState(""); - const [fakeTranscriptIndex, setFakeTranscriptIndex] = useState(0); + const [fakeTranscriptIndex, setFakeTranscriptIndex] = useState(0); - const fakeTranscripts = [ - "This is the first transcript. We are discussing the current situation of our company. We are currently leading the market with a significant margin, and our future outlook is also very promising...", - "Here is the second transcript. We are now moving to our next topic, which is the progress in our ongoing projects. Most of them are on schedule and the quality of work is up to our standard...", - "This is the third transcript. It's about the financial status of our company. We are doing quite well financially. The revenue for this quarter is higher than expected...", - // add more fake transcripts as needed - ]; + const fakeTranscripts = [ + "This is the first transcript. We are discussing the current situation of our company. We are currently leading the market with a significant margin, and our future outlook is also very promising...", + "Here is the second transcript. We are now moving to our next topic, which is the progress in our ongoing projects. Most of them are on schedule and the quality of work is up to our standard...", + "This is the third transcript. It's about the financial status of our company. We are doing quite well financially. The revenue for this quarter is higher than expected...", + // add more fake transcripts as needed + ]; - useEffect(() => { - // Randomly select a fake transcript - const selectedTranscript = fakeTranscripts[Math.floor(Math.random() * fakeTranscripts.length)]; - // Split the selected transcript into characters - const characters = Array.from(selectedTranscript); + useEffect(() => { + // Randomly select a fake transcript + const selectedTranscript = + fakeTranscripts[Math.floor(Math.random() * fakeTranscripts.length)]; + // Split the selected transcript into characters + const characters = Array.from(selectedTranscript); - let counter = 0; - let liveTranscriptCopy = ''; - let intervalId = setInterval(() => { - if (counter < characters.length) { - liveTranscriptCopy += characters[counter]; - setLiveTranscript(liveTranscriptCopy); - counter++; - } else { - clearInterval(intervalId); - } - }, 50); // delay of 100ms + let counter = 0; + let liveTranscriptCopy = ""; + let intervalId = setInterval(() => { + if (counter < characters.length) { + liveTranscriptCopy += characters[counter]; + setLiveTranscript(liveTranscriptCopy); + counter++; + } else { + clearInterval(intervalId); + } + }, 50); // delay of 100ms - // Cleanup function to clear the interval when the component unmounts - return () => clearInterval(intervalId); - }, []); + // Cleanup function to clear the interval when the component unmounts + return () => clearInterval(intervalId); + }, []); - const generateDecibelData = (x) => { - let data = []; - let random = Mulberry32(123456789 + x); - for (let i = 0; i < 50; i++) { - data.push(Math.floor(random() * 30) + 10); // generate random values between 10 and 40 - } - return data; - }; - const generateDecibelGraph = (decibelData) => { - return decibelData.map((decibel, i) => ( -Capture The Signal, Not The Noise
-Capture The Signal, Not The Noise
Capture The Signal, Not The Noise
-Capture The Signal, Not The Noise
+