From 74464f5047e334cc7470f3be90b54303a4e9fda9 Mon Sep 17 00:00:00 2001 From: Gokul Mohanarangan Date: Thu, 20 Jul 2023 20:57:18 +0530 Subject: [PATCH] meeting action items --- server_executor_cleaned.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/server_executor_cleaned.py b/server_executor_cleaned.py index ea6d9693..e818db9f 100644 --- a/server_executor_cleaned.py +++ b/server_executor_cleaned.py @@ -38,7 +38,8 @@ incremental_responses = [] sorted_transcripts = SortedDict() blacklisted_messages = [" Thank you.", " See you next time!", - " Thank you for watching!", " Bye!"] + " Thank you for watching!", " Bye!", + " And that's what I'm talking about."] def get_title_and_summary(llm_input_text, last_timestamp): @@ -71,6 +72,7 @@ def get_title_and_summary(llm_input_text, last_timestamp): response = requests.post(LLM_URL, headers=headers, json=data) output = json.loads(response.json()["results"][0]["text"]) output["description"] = output.pop("summary") + output["transcript"] = llm_input_text output["timestamp"] =\ str(datetime.timedelta(seconds=round(last_timestamp))) incremental_responses.append(output) @@ -189,7 +191,7 @@ class AudioStreamTrack(MediaStreamTrack): else None ) - if len(transcription_text) > 25: + if len(transcription_text) > 500: llm_input_text = transcription_text transcription_text = "" llm_result = run_in_executor(get_title_and_summary,