diff --git a/www/app/styles/globals.scss b/www/app/styles/globals.scss index 90f54fe5..3845af58 100644 --- a/www/app/styles/globals.scss +++ b/www/app/styles/globals.scss @@ -35,6 +35,6 @@ body { @media (max-width: 768px) { .audio-source-dropdown .Dropdown-control { - max-width: 90px; + max-width: 200px; } } diff --git a/www/app/transcripts/dashboard.tsx b/www/app/transcripts/dashboard.tsx index b74082d0..4f2fdf64 100644 --- a/www/app/transcripts/dashboard.tsx +++ b/www/app/transcripts/dashboard.tsx @@ -103,7 +103,11 @@ export function Dashboard({ ))} {topics.length === 0 && ( -
No topics yet
+
+ Discussion topics will appear here after you start recording. It + may take up to 5 minutes of conversation for the first topic to + appear. +
)} diff --git a/www/app/transcripts/new/page.tsx b/www/app/transcripts/new/page.tsx index 4eb5f47c..85e7deb8 100644 --- a/www/app/transcripts/new/page.tsx +++ b/www/app/transcripts/new/page.tsx @@ -26,7 +26,7 @@ const App = () => { }, []); const api = getApi(); - const transcript = useTranscript(); + const transcript = useTranscript(api); const webRTC = useWebRTC(stream, transcript.response?.id, api); const webSockets = useWebSockets(transcript.response?.id); const { diff --git a/www/app/transcripts/recorder.tsx b/www/app/transcripts/recorder.tsx index eae79b18..ccdbbda4 100644 --- a/www/app/transcripts/recorder.tsx +++ b/www/app/transcripts/recorder.tsx @@ -58,6 +58,7 @@ export default function Recorder(props: RecorderProps) { const [wavesurfer, setWavesurfer] = useState(null); const [record, setRecord] = useState(null); const [isRecording, setIsRecording] = useState(false); + const [hasRecorded, setHasRecorded] = useState(false); const [isPlaying, setIsPlaying] = useState(false); const [deviceId, setDeviceId] = useState(null); const [currentTime, setCurrentTime] = useState(0); @@ -210,8 +211,7 @@ export default function Recorder(props: RecorderProps) { props.onStop(); record.stopRecording(); setIsRecording(false); - const playBtn = document.getElementById("play-btn"); - if (playBtn) playBtn.removeAttribute("disabled"); + setHasRecorded(true); } else { const stream = await props.getAudioStream(deviceId); props.setStream(stream); @@ -236,36 +236,45 @@ export default function Recorder(props: RecorderProps) { return (
- -   - -   - - - - + {!hasRecorded && ( + <> + +   + +   + + )} + + {hasRecorded && ( + <> + + + + + + )}
diff --git a/www/app/transcripts/useTranscript.ts b/www/app/transcripts/useTranscript.ts index 14e67c79..07b614f4 100644 --- a/www/app/transcripts/useTranscript.ts +++ b/www/app/transcripts/useTranscript.ts @@ -1,6 +1,5 @@ import { useEffect, useState } from "react"; import { DefaultApi, V1TranscriptsCreateRequest } from "../api/apis/DefaultApi"; -import { Configuration } from "../api/runtime"; import { GetTranscript } from "../api"; import getApi from "../lib/getApi"; @@ -11,13 +10,11 @@ type UseTranscript = { createTranscript: () => void; }; -const useTranscript = (): UseTranscript => { +const useTranscript = (api: DefaultApi): UseTranscript => { const [response, setResponse] = useState(null); const [loading, setLoading] = useState(false); const [error, setError] = useState(null); - const api = getApi(); - const createTranscript = () => { setLoading(true); const requestParameters: V1TranscriptsCreateRequest = {