mirror of
https://github.com/Monadical-SAS/reflector.git
synced 2025-12-21 04:39:06 +00:00
minor refactor
This commit is contained in:
2
.gitignore
vendored
2
.gitignore
vendored
@@ -167,7 +167,7 @@ transcript_timestamps.txt
|
|||||||
*.pkl
|
*.pkl
|
||||||
transcript_*.txt
|
transcript_*.txt
|
||||||
test_*.txt
|
test_*.txt
|
||||||
*.png
|
wordcloud*.png
|
||||||
*.ini
|
*.ini
|
||||||
test_samples/
|
test_samples/
|
||||||
*.wav
|
*.wav
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ directory="."
|
|||||||
text_file_pattern="transcript_*.txt"
|
text_file_pattern="transcript_*.txt"
|
||||||
pickle_file_pattern="*.pkl"
|
pickle_file_pattern="*.pkl"
|
||||||
html_file_pattern="*.html"
|
html_file_pattern="*.html"
|
||||||
png_file_pattern="*.png"
|
png_file_pattern="wordcloud*.png"
|
||||||
|
|
||||||
find "$directory" -type f -name "$text_file_pattern" -delete
|
find "$directory" -type f -name "$text_file_pattern" -delete
|
||||||
find "$directory" -type f -name "$pickle_file_pattern" -delete
|
find "$directory" -type f -name "$pickle_file_pattern" -delete
|
||||||
|
|||||||
@@ -11,9 +11,10 @@ import ast
|
|||||||
import stamina
|
import stamina
|
||||||
from aiortc import (RTCPeerConnection, RTCSessionDescription)
|
from aiortc import (RTCPeerConnection, RTCSessionDescription)
|
||||||
from aiortc.contrib.media import (MediaPlayer, MediaRelay)
|
from aiortc.contrib.media import (MediaPlayer, MediaRelay)
|
||||||
|
from utils.server_utils import Mutex
|
||||||
|
|
||||||
logger = logging.getLogger("pc")
|
logger = logging.getLogger("pc")
|
||||||
file_lock = threading.Lock()
|
file_lock = Mutex(open("test_sm_6.txt", "a"))
|
||||||
|
|
||||||
config = configparser.ConfigParser()
|
config = configparser.ConfigParser()
|
||||||
config.read('config.ini')
|
config.read('config.ini')
|
||||||
@@ -24,8 +25,7 @@ class StreamClient:
|
|||||||
signaling,
|
signaling,
|
||||||
url="http://127.0.0.1:1250",
|
url="http://127.0.0.1:1250",
|
||||||
play_from=None,
|
play_from=None,
|
||||||
ping_pong=False,
|
ping_pong=False
|
||||||
audio_stream=None
|
|
||||||
):
|
):
|
||||||
self.signaling = signaling
|
self.signaling = signaling
|
||||||
self.server_url = url
|
self.server_url = url
|
||||||
@@ -36,7 +36,6 @@ class StreamClient:
|
|||||||
self.pc = RTCPeerConnection()
|
self.pc = RTCPeerConnection()
|
||||||
|
|
||||||
self.loop = asyncio.get_event_loop()
|
self.loop = asyncio.get_event_loop()
|
||||||
# self.loop = asyncio.new_event_loop()
|
|
||||||
self.relay = None
|
self.relay = None
|
||||||
self.pcs = set()
|
self.pcs = set()
|
||||||
self.time_start = None
|
self.time_start = None
|
||||||
@@ -68,7 +67,6 @@ class StreamClient:
|
|||||||
channel.send(message)
|
channel.send(message)
|
||||||
|
|
||||||
def current_stamp(self):
|
def current_stamp(self):
|
||||||
|
|
||||||
if self.time_start is None:
|
if self.time_start is None:
|
||||||
self.time_start = time.time()
|
self.time_start = time.time()
|
||||||
return 0
|
return 0
|
||||||
@@ -94,9 +92,7 @@ class StreamClient:
|
|||||||
@pc.on("track")
|
@pc.on("track")
|
||||||
def on_track(track):
|
def on_track(track):
|
||||||
print("Sending %s" % track.kind)
|
print("Sending %s" % track.kind)
|
||||||
# Trials
|
|
||||||
self.pc.addTrack(track)
|
self.pc.addTrack(track)
|
||||||
# self.pc.addTrack(self.microphone)
|
|
||||||
|
|
||||||
@track.on("ended")
|
@track.on("ended")
|
||||||
async def on_ended():
|
async def on_ended():
|
||||||
@@ -104,7 +100,6 @@ class StreamClient:
|
|||||||
|
|
||||||
self.pc.addTrack(audio)
|
self.pc.addTrack(audio)
|
||||||
|
|
||||||
# DataChannel
|
|
||||||
channel = pc.createDataChannel("data-channel")
|
channel = pc.createDataChannel("data-channel")
|
||||||
self.channel_log(channel, "-", "created by local party")
|
self.channel_log(channel, "-", "created by local party")
|
||||||
|
|
||||||
@@ -155,14 +150,12 @@ class StreamClient:
|
|||||||
while True:
|
while True:
|
||||||
msg = await self.queue.get()
|
msg = await self.queue.get()
|
||||||
msg = ast.literal_eval(msg)
|
msg = ast.literal_eval(msg)
|
||||||
with file_lock:
|
with file_lock.lock() as file:
|
||||||
with open("test_sm_6.txt", "a") as f:
|
file.write(msg["text"])
|
||||||
f.write(msg["text"])
|
|
||||||
yield msg["text"]
|
yield msg["text"]
|
||||||
self.queue.task_done()
|
self.queue.task_done()
|
||||||
|
|
||||||
async def start(self):
|
async def start(self):
|
||||||
print("Starting stream client")
|
|
||||||
coro = self.run_offer(self.pc, self.signaling)
|
coro = self.run_offer(self.pc, self.signaling)
|
||||||
task = asyncio.create_task(coro)
|
task = asyncio.create_task(coro)
|
||||||
await task
|
await task
|
||||||
|
|||||||
@@ -1,7 +1,25 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
import contextlib
|
||||||
|
from threading import Lock
|
||||||
|
from typing import ContextManager, Generic, TypeVar
|
||||||
|
|
||||||
def run_in_executor(func, *args, executor=None, **kwargs):
|
def run_in_executor(func, *args, executor=None, **kwargs):
|
||||||
callback = partial(func, *args, **kwargs)
|
callback = partial(func, *args, **kwargs)
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
return asyncio.get_event_loop().run_in_executor(executor, callback)
|
return asyncio.get_event_loop().run_in_executor(executor, callback)
|
||||||
|
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
class Mutex(Generic[T]):
|
||||||
|
def __init__(self, value: T):
|
||||||
|
self.__value = value
|
||||||
|
self.__lock = Lock()
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def lock(self) -> ContextManager[T]:
|
||||||
|
self.__lock.acquire()
|
||||||
|
try:
|
||||||
|
yield self.__value
|
||||||
|
finally:
|
||||||
|
self.__lock.release()
|
||||||
@@ -19,6 +19,9 @@ def preprocess_sentence(sentence):
|
|||||||
return ' '.join(tokens)
|
return ' '.join(tokens)
|
||||||
|
|
||||||
def compute_similarity(sent1, sent2):
|
def compute_similarity(sent1, sent2):
|
||||||
|
"""
|
||||||
|
Compute the similarity
|
||||||
|
"""
|
||||||
tfidf_vectorizer = TfidfVectorizer()
|
tfidf_vectorizer = TfidfVectorizer()
|
||||||
if sent1 is not None and sent2 is not None:
|
if sent1 is not None and sent2 is not None:
|
||||||
tfidf_matrix = tfidf_vectorizer.fit_transform([sent1, sent2])
|
tfidf_matrix = tfidf_vectorizer.fit_transform([sent1, sent2])
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ spacy_stopwords = en.Defaults.stop_words
|
|||||||
|
|
||||||
STOPWORDS = set(STOPWORDS).union(set(stopwords.words("english"))).union(set(spacy_stopwords))
|
STOPWORDS = set(STOPWORDS).union(set(stopwords.words("english"))).union(set(spacy_stopwords))
|
||||||
|
|
||||||
|
|
||||||
def create_wordcloud(timestamp, real_time=False):
|
def create_wordcloud(timestamp, real_time=False):
|
||||||
"""
|
"""
|
||||||
Create a basic word cloud visualization of transcribed text
|
Create a basic word cloud visualization of transcribed text
|
||||||
|
|||||||
Reference in New Issue
Block a user