Ability to load past meetings + URL management

This commit is contained in:
Koper
2023-09-14 23:05:13 +07:00
parent 07204ee2db
commit c9d01a9d30
13 changed files with 437 additions and 59 deletions

View File

@@ -97,6 +97,45 @@ export interface V1TranscriptsListRequest {
*
*/
export class DefaultApi extends runtime.BaseAPI {
/**
* Endpoint that serves Prometheus metrics.
* Metrics
*/
async metricsRaw(
initOverrides?: RequestInit | runtime.InitOverrideFunction,
): Promise<runtime.ApiResponse<any>> {
const queryParameters: any = {};
const headerParameters: runtime.HTTPHeaders = {};
const response = await this.request(
{
path: `/metrics`,
method: "GET",
headers: headerParameters,
query: queryParameters,
},
initOverrides,
);
if (this.isJsonMime(response.headers.get("content-type"))) {
return new runtime.JSONApiResponse<any>(response);
} else {
return new runtime.TextApiResponse(response) as any;
}
}
/**
* Endpoint that serves Prometheus metrics.
* Metrics
*/
async metrics(
initOverrides?: RequestInit | runtime.InitOverrideFunction,
): Promise<any> {
const response = await this.metricsRaw(initOverrides);
return await response.value();
}
/**
* Rtc Offer
*/

View File

@@ -54,7 +54,19 @@ export interface GetTranscript {
* @type {any}
* @memberof GetTranscript
*/
summary: any | null;
title: any | null;
/**
*
* @type {any}
* @memberof GetTranscript
*/
shortSummary: any | null;
/**
*
* @type {any}
* @memberof GetTranscript
*/
longSummary: any | null;
/**
*
* @type {any}
@@ -85,7 +97,9 @@ export function instanceOfGetTranscript(value: object): boolean {
isInstance = isInstance && "status" in value;
isInstance = isInstance && "locked" in value;
isInstance = isInstance && "duration" in value;
isInstance = isInstance && "summary" in value;
isInstance = isInstance && "title" in value;
isInstance = isInstance && "shortSummary" in value;
isInstance = isInstance && "longSummary" in value;
isInstance = isInstance && "createdAt" in value;
isInstance = isInstance && "sourceLanguage" in value;
isInstance = isInstance && "targetLanguage" in value;
@@ -110,7 +124,9 @@ export function GetTranscriptFromJSONTyped(
status: json["status"],
locked: json["locked"],
duration: json["duration"],
summary: json["summary"],
title: json["title"],
shortSummary: json["short_summary"],
longSummary: json["long_summary"],
createdAt: json["created_at"],
sourceLanguage: json["source_language"],
targetLanguage: json["target_language"],
@@ -130,7 +146,9 @@ export function GetTranscriptToJSON(value?: GetTranscript | null): any {
status: value.status,
locked: value.locked,
duration: value.duration,
summary: value.summary,
title: value.title,
short_summary: value.shortSummary,
long_summary: value.longSummary,
created_at: value.createdAt,
source_language: value.sourceLanguage,
target_language: value.targetLanguage,

View File

@@ -36,7 +36,19 @@ export interface UpdateTranscript {
* @type {any}
* @memberof UpdateTranscript
*/
summary?: any | null;
title?: any | null;
/**
*
* @type {any}
* @memberof UpdateTranscript
*/
shortSummary?: any | null;
/**
*
* @type {any}
* @memberof UpdateTranscript
*/
longSummary?: any | null;
}
/**
@@ -62,7 +74,13 @@ export function UpdateTranscriptFromJSONTyped(
return {
name: !exists(json, "name") ? undefined : json["name"],
locked: !exists(json, "locked") ? undefined : json["locked"],
summary: !exists(json, "summary") ? undefined : json["summary"],
title: !exists(json, "title") ? undefined : json["title"],
shortSummary: !exists(json, "short_summary")
? undefined
: json["short_summary"],
longSummary: !exists(json, "long_summary")
? undefined
: json["long_summary"],
};
}
@@ -76,6 +94,8 @@ export function UpdateTranscriptToJSON(value?: UpdateTranscript | null): any {
return {
name: value.name,
locked: value.locked,
summary: value.summary,
title: value.title,
short_summary: value.shortSummary,
long_summary: value.longSummary,
};
}

View File

@@ -0,0 +1,67 @@
"use client";
import Modal from "../modal";
import getApi from "../../lib/getApi";
import useTranscript from "../useTranscript";
import useTopics from "../useTopics";
import useWaveform from "../useWaveform";
import { Dashboard } from "../dashboard";
import Recorder from "../recorder";
import { Topic } from "../webSocketTypes";
import React, { useEffect, useState } from "react";
import "../../styles/button.css";
type TranscriptDetails = {
params: {
transcriptId: string;
};
};
export default function TranscriptDetails(details: TranscriptDetails) {
const api = getApi();
const transcript = useTranscript(null, api, details.params.transcriptId);
const topics = useTopics(api, details.params.transcriptId);
const waveform = useWaveform(api, details.params.transcriptId);
const useActiveTopic = useState<Topic | null>(null);
if (transcript?.error || topics?.error || waveform?.error) {
return (
<Modal
title="Transcription Not Found"
text="A trascription with this ID does not exist."
/>
);
}
return (
<>
<div className="w-full flex flex-col items-center h-[100svh]">
{transcript?.loading === true ||
waveform?.loading == true ||
topics?.loading == true ? (
<Modal
title="Loading"
text={"Loading transcript..." + transcript.loading}
/>
) : (
<>
<Recorder
topics={topics?.topics || []}
useActiveTopic={useActiveTopic}
waveform={waveform?.waveform}
isPastMeeting={true}
transcriptId={transcript?.response?.id}
/>
<Dashboard
transcriptionText={""}
finalSummary={{ summary: transcript?.response?.shortSummary }}
topics={topics?.topics || []}
disconnected={false}
useActiveTopic={useActiveTopic}
/>
</>
)}
</div>
</>
);
}

View File

@@ -0,0 +1,15 @@
type ModalProps = {
title: string;
text: string;
};
export default function Modal(props: ModalProps) {
return (
<>
<div className="flex flex-col items-center justify-center w-fit bg-white px-6 py-8 mt-8 rounded-xl">
<h1 className="text-2xl font-bold text-blue-500">{props.title}</h1>
<p className="text-gray-500 text-center mt-5">{props.text}</p>
</div>
</>
);
}

View File

@@ -10,7 +10,7 @@ import "../../styles/button.css";
import { Topic } from "../webSocketTypes";
import getApi from "../../lib/getApi";
const App = () => {
const TranscriptCreate = () => {
const [stream, setStream] = useState<MediaStream | null>(null);
const [disconnected, setDisconnected] = useState<boolean>(false);
const useActiveTopic = useState<Topic | null>(null);
@@ -26,9 +26,9 @@ const App = () => {
}, []);
const api = getApi();
const transcript = useTranscript(api);
const webRTC = useWebRTC(stream, transcript.response?.id, api);
const webSockets = useWebSockets(transcript.response?.id);
const transcript = useTranscript(stream, api);
const webRTC = useWebRTC(stream, transcript?.response?.id, api);
const webSockets = useWebSockets(transcript?.response?.id);
const {
loading,
permissionOk,
@@ -52,6 +52,7 @@ const App = () => {
getAudioStream={getAudioStream}
audioDevices={audioDevices}
useActiveTopic={useActiveTopic}
isPastMeeting={false}
/>
<Dashboard
@@ -97,4 +98,4 @@ const App = () => {
);
};
export default App;
export default TranscriptCreate;

View File

@@ -12,19 +12,22 @@ import "react-dropdown/style.css";
import { formatTime } from "../lib/time";
import { Topic } from "./webSocketTypes";
import { AudioWaveform } from "../api";
const AudioInputsDropdown: React.FC<{
audioDevices: Option[];
audioDevices?: Option[];
setDeviceId: React.Dispatch<React.SetStateAction<string | null>>;
disabled: boolean;
}> = (props) => {
const [ddOptions, setDdOptions] = useState<Option[]>([]);
useEffect(() => {
setDdOptions(props.audioDevices);
props.setDeviceId(
props.audioDevices.length > 0 ? props.audioDevices[0].value : null,
);
if (props.audioDevices) {
setDdOptions(props.audioDevices);
props.setDeviceId(
props.audioDevices.length > 0 ? props.audioDevices[0].value : null,
);
}
}, [props.audioDevices]);
const handleDropdownChange = (option: Option) => {
@@ -42,15 +45,18 @@ const AudioInputsDropdown: React.FC<{
};
type RecorderProps = {
setStream: React.Dispatch<React.SetStateAction<MediaStream | null>>;
onStop: () => void;
setStream?: React.Dispatch<React.SetStateAction<MediaStream | null>>;
onStop?: () => void;
topics: Topic[];
getAudioStream: (deviceId: string | null) => Promise<MediaStream | null>;
audioDevices: Option[];
getAudioStream?: (deviceId: string | null) => Promise<MediaStream | null>;
audioDevices?: Option[];
useActiveTopic: [
Topic | null,
React.Dispatch<React.SetStateAction<Topic | null>>,
];
waveform?: AudioWaveform | null;
isPastMeeting: boolean;
transcriptId?: string | null;
};
export default function Recorder(props: RecorderProps) {
@@ -58,7 +64,7 @@ export default function Recorder(props: RecorderProps) {
const [wavesurfer, setWavesurfer] = useState<WaveSurfer | null>(null);
const [record, setRecord] = useState<RecordPlugin | null>(null);
const [isRecording, setIsRecording] = useState<boolean>(false);
const [hasRecorded, setHasRecorded] = useState<boolean>(false);
const [hasRecorded, setHasRecorded] = useState<boolean>(props.isPastMeeting);
const [isPlaying, setIsPlaying] = useState<boolean>(false);
const [deviceId, setDeviceId] = useState<string | null>(null);
const [currentTime, setCurrentTime] = useState<number>(0);
@@ -73,9 +79,6 @@ export default function Recorder(props: RecorderProps) {
const topicsRef = useRef(props.topics);
useEffect(() => {
const playBtn = document.getElementById("play-btn");
if (playBtn) playBtn.setAttribute("disabled", "true");
if (waveformRef.current) {
const _wavesurfer = WaveSurfer.create({
container: waveformRef.current,
@@ -86,7 +89,11 @@ export default function Recorder(props: RecorderProps) {
autoCenter: true,
barWidth: 2,
height: 90,
url: props.transcriptId
? `${process.env.NEXT_PUBLIC_API_URL}/v1/transcripts/${props.transcriptId}/audio/mp3`
: undefined,
});
const wsWrapper = _wavesurfer.getWrapper();
wsWrapper.style.cursor = "pointer";
wsWrapper.style.backgroundColor = "#e0c3fc42";
@@ -103,6 +110,8 @@ export default function Recorder(props: RecorderProps) {
setRecord(_wavesurfer.registerPlugin(RecordPlugin.create()));
setWaveRegions(_wavesurfer.registerPlugin(CustomRegionsPlugin.create()));
if (props.transcriptId) _wavesurfer.toggleInteraction(true);
setWavesurfer(_wavesurfer);
return () => {
_wavesurfer.destroy();
@@ -208,18 +217,21 @@ export default function Recorder(props: RecorderProps) {
if (!record) return console.log("no record");
if (record.isRecording()) {
props.onStop();
if (props.onStop) props.onStop();
record.stopRecording();
setIsRecording(false);
setHasRecorded(true);
} else {
} else if (props.getAudioStream) {
const stream = await props.getAudioStream(deviceId);
props.setStream(stream);
if (props.setStream) props.setStream(stream);
waveRegions?.clearRegions();
if (stream) {
await record.startRecording(stream);
setIsRecording(true);
}
} else {
throw new Error("No getAudioStream function provided");
}
};
@@ -266,13 +278,26 @@ export default function Recorder(props: RecorderProps) {
>
{isPlaying ? "Pause" : "Play"}
</button>
<a
id="download-recording"
title="Download recording"
className="invisible w-9 m-auto text-center cursor-pointer text-blue-300 hover:text-blue-700"
>
<FontAwesomeIcon icon={faDownload} />
</a>
{props.transcriptId && (
<a
title="Download recording"
className="w-9 m-auto text-center cursor-pointer text-blue-300 hover:text-blue-700"
href={`${process.env.NEXT_PUBLIC_API_URL}/v1/transcripts/${props.transcriptId}/audio/mp3`}
>
<FontAwesomeIcon icon={faDownload} />
</a>
)}
{!props.transcriptId && (
<a
id="download-recording"
title="Download recording"
className="invisible w-9 m-auto text-center cursor-pointer text-blue-300 hover:text-blue-700"
>
<FontAwesomeIcon icon={faDownload} />
</a>
)}
</>
)}
</div>

View File

@@ -0,0 +1,48 @@
import { useEffect, useState } from "react";
import {
DefaultApi,
V1TranscriptGetAudioMp3Request,
} from "../api/apis/DefaultApi";
import {} from "../api";
import { useError } from "../(errors)/errorContext";
type Mp3Response = {
url: string | null;
loading: boolean;
error: Error | null;
};
const useMp3 = (api: DefaultApi, id: string): Mp3Response => {
const [url, setUrl] = useState<string | null>(null);
const [loading, setLoading] = useState<boolean>(false);
const [error, setErrorState] = useState<Error | null>(null);
const { setError } = useError();
const getMp3 = (id: string) => {
if (!id) throw new Error("Transcript ID is required to get transcript Mp3");
setLoading(true);
const requestParameters: V1TranscriptGetAudioMp3Request = {
transcriptId: id,
};
api
.v1TranscriptGetAudioMp3(requestParameters)
.then((result) => {
setUrl(result);
setLoading(false);
console.debug("Transcript Mp3 loaded:", result);
})
.catch((err) => {
setError(err);
setErrorState(err);
});
};
useEffect(() => {
getMp3(id);
}, [id]);
return { url, loading, error };
};
export default useMp3;

View File

@@ -0,0 +1,50 @@
import { useEffect, useState } from "react";
import {
DefaultApi,
V1TranscriptGetTopicsRequest,
} from "../api/apis/DefaultApi";
import { TranscriptTopic } from "../api";
import { useError } from "../(errors)/errorContext";
import { Topic } from "./webSocketTypes";
type TranscriptTopics = {
topics: Topic[] | null;
loading: boolean;
error: Error | null;
};
const useTranscript = (api: DefaultApi, id: string): TranscriptTopics => {
const [topics, setTopics] = useState<Topic[] | null>(null);
const [loading, setLoading] = useState<boolean>(false);
const [error, setErrorState] = useState<Error | null>(null);
const { setError } = useError();
const getTopics = (id: string) => {
if (!id)
throw new Error("Transcript ID is required to get transcript topics");
setLoading(true);
const requestParameters: V1TranscriptGetTopicsRequest = {
transcriptId: id,
};
api
.v1TranscriptGetTopics(requestParameters)
.then((result) => {
setTopics(result);
setLoading(false);
console.debug("Transcript topics loaded:", result);
})
.catch((err) => {
setError(err);
setErrorState(err);
});
};
useEffect(() => {
getTopics(id);
}, [id]);
return { topics, loading, error };
};
export default useTranscript;

View File

@@ -1,19 +1,56 @@
import { useEffect, useState } from "react";
import { DefaultApi, V1TranscriptsCreateRequest } from "../api/apis/DefaultApi";
import {
DefaultApi,
V1TranscriptGetRequest,
V1TranscriptsCreateRequest,
} from "../api/apis/DefaultApi";
import { GetTranscript } from "../api";
import { useError } from "../(errors)/errorContext";
type UseTranscript = {
type Transcript = {
response: GetTranscript | null;
loading: boolean;
createTranscript: () => void;
error: Error | null;
};
const useTranscript = (api: DefaultApi): UseTranscript => {
const useTranscript = (
stream: MediaStream | null,
api: DefaultApi,
id: string | null = null,
): Transcript => {
const [response, setResponse] = useState<GetTranscript | null>(null);
const [loading, setLoading] = useState<boolean>(false);
const [error, setErrorState] = useState<Error | null>(null);
const { setError } = useError();
const getOrCreateTranscript = (id: string | null) => {
if (id) {
getTranscript(id);
} else if (stream) {
createTranscript();
}
};
const getTranscript = (id: string | null) => {
if (!id) throw new Error("Transcript ID is required to get transcript");
setLoading(true);
const requestParameters: V1TranscriptGetRequest = {
transcriptId: id,
};
api
.v1TranscriptGet(requestParameters)
.then((result) => {
setResponse(result);
setLoading(false);
console.debug("New transcript created:", result);
})
.catch((err) => {
setError(err);
setErrorState(err);
});
};
const createTranscript = () => {
setLoading(true);
const requestParameters: V1TranscriptsCreateRequest = {
@@ -37,14 +74,15 @@ const useTranscript = (api: DefaultApi): UseTranscript => {
})
.catch((err) => {
setError(err);
setErrorState(err);
});
};
useEffect(() => {
createTranscript();
}, []);
getOrCreateTranscript(id);
}, [id, stream]);
return { response, loading, createTranscript };
return { response, loading, error };
};
export default useTranscript;

View File

@@ -0,0 +1,49 @@
import { useEffect, useState } from "react";
import {
DefaultApi,
V1TranscriptGetAudioWaveformRequest,
} from "../api/apis/DefaultApi";
import { AudioWaveform } from "../api";
import { useError } from "../(errors)/errorContext";
type AudioWaveFormResponse = {
waveform: AudioWaveform | null;
loading: boolean;
error: Error | null;
};
const useWaveform = (api: DefaultApi, id: string): AudioWaveFormResponse => {
const [waveform, setWaveform] = useState<AudioWaveform | null>(null);
const [loading, setLoading] = useState<boolean>(false);
const [error, setErrorState] = useState<Error | null>(null);
const { setError } = useError();
const getWaveform = (id: string) => {
if (!id)
throw new Error("Transcript ID is required to get transcript waveform");
setLoading(true);
const requestParameters: V1TranscriptGetAudioWaveformRequest = {
transcriptId: id,
};
api
.v1TranscriptGetAudioWaveform(requestParameters)
.then((result) => {
setWaveform(result);
setLoading(false);
console.debug("Transcript waveform loaded:", result);
})
.catch((err) => {
setError(err);
setErrorState(err);
});
};
useEffect(() => {
getWaveform(id);
}, [id]);
return { waveform, loading, error };
};
export default useWaveform;

View File

@@ -19,6 +19,8 @@ const useWebRTC = (
return;
}
console.debug("Using WebRTC", stream, transcriptId);
let p: Peer;
try {

View File

@@ -1,6 +1,7 @@
import { useEffect, useState } from "react";
import { Topic, FinalSummary, Status } from "./webSocketTypes";
import { useError } from "../(errors)/errorContext";
import { useRouter } from "next/navigation";
type UseWebSockets = {
transcriptText: string;
@@ -17,6 +18,7 @@ export const useWebSockets = (transcriptId: string | null): UseWebSockets => {
});
const [status, setStatus] = useState<Status>({ value: "disconnected" });
const { setError } = useError();
const router = useRouter();
useEffect(() => {
document.onkeyup = (e) => {
@@ -94,22 +96,21 @@ export const useWebSockets = (transcriptId: string | null): UseWebSockets => {
break;
case "FINAL_LONG_SUMMARY":
if (message.data) {
message.data = { summary: message.data.long_summary };
setFinalSummary(message.data);
console.debug("FINAL_LONG_SUMMARY event:", message.data);
}
break;
case "FINAL_SUMMARY":
if (message.data) {
setFinalSummary(message.data);
console.debug("FINAL_SUMMARY event:", message.data);
}
console.debug("FINAL_LONG_SUMMARY event:", message.data);
break;
case "FINAL_SHORT_SUMMARY":
console.debug("FINAL_SHORT_SUMMARY event:", message.data);
if (message.data) {
setFinalSummary(message.data);
const newUrl = "/transcripts/" + transcriptId;
router.push(newUrl);
console.debug(
"FINAL_SUMMARY event:",
message.data,
"newUrl",
newUrl,
);
}
break;
case "FINAL_TITLE":
@@ -137,10 +138,15 @@ export const useWebSockets = (transcriptId: string | null): UseWebSockets => {
ws.onclose = (event) => {
console.debug("WebSocket connection closed");
if (event.code !== 1000) {
setError(
new Error(`WebSocket closed unexpectedly with code: ${event.code}`),
);
switch (event.code) {
case 1000: // Normal Closure:
case 1001: // Going Away:
case 1005:
break;
default:
setError(
new Error(`WebSocket closed unexpectedly with code: ${event.code}`),
);
}
};