mirror of
https://github.com/Monadical-SAS/reflector.git
synced 2025-12-20 20:29:06 +00:00
Compare commits
42 Commits
v0.11.0
...
mathieu/sq
| Author | SHA1 | Date | |
|---|---|---|---|
| b7f8e8ef8d | |||
| 27f19ec6ba | |||
| 2aa99fe846 | |||
| df909363f5 | |||
| ad2accb574 | |||
| a07c621bcd | |||
| f51dae8da3 | |||
| b217c7ba41 | |||
| 0b2152ea75 | |||
| e0c71c5548 | |||
| a883df0d63 | |||
| 1c9e8b9cde | |||
| 27b3b9cdee | |||
| 8ad1270229 | |||
| 617a1c8b32 | |||
| 60cc2b16ae | |||
| 606c5f5059 | |||
| 5e036d17b6 | |||
| 04a9c2f2f7 | |||
| fb5bb39716 | |||
| 4f70a7f593 | |||
| 224e40225d | |||
| 24980de4e0 | |||
| 7f178b5f9e | |||
| 0aaa42528a | |||
| 565a62900f | |||
|
|
27016e6051 | ||
| 6ddfee0b4e | |||
|
|
47716f6e5d | ||
| 1520f88e9e | |||
| 9b90aaa57f | |||
| d21b65e4e8 | |||
| 45d1608950 | |||
| 06639d4d8f | |||
| 0abcebfc94 | |||
|
|
2b723da08b | ||
| 6566e04300 | |||
| 870e860517 | |||
| 396a95d5ce | |||
| 6f680b5795 | |||
| ab859d65a6 | |||
| fa049e8d06 |
39
CHANGELOG.md
39
CHANGELOG.md
@@ -1,5 +1,44 @@
|
||||
# Changelog
|
||||
|
||||
## [0.13.1](https://github.com/Monadical-SAS/reflector/compare/v0.13.0...v0.13.1) (2025-09-22)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* TypeError on not all arguments converted during string formatting in logger ([#667](https://github.com/Monadical-SAS/reflector/issues/667)) ([565a629](https://github.com/Monadical-SAS/reflector/commit/565a62900f5a02fc946b68f9269a42190ed70ab6))
|
||||
|
||||
## [0.13.0](https://github.com/Monadical-SAS/reflector/compare/v0.12.1...v0.13.0) (2025-09-19)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* room form edit with enter ([#662](https://github.com/Monadical-SAS/reflector/issues/662)) ([47716f6](https://github.com/Monadical-SAS/reflector/commit/47716f6e5ddee952609d2fa0ffabdfa865286796))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* invalid cleanup call ([#660](https://github.com/Monadical-SAS/reflector/issues/660)) ([0abcebf](https://github.com/Monadical-SAS/reflector/commit/0abcebfc9491f87f605f21faa3e53996fafedd9a))
|
||||
|
||||
## [0.12.1](https://github.com/Monadical-SAS/reflector/compare/v0.12.0...v0.12.1) (2025-09-17)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* production blocked because having existing meeting with room_id null ([#657](https://github.com/Monadical-SAS/reflector/issues/657)) ([870e860](https://github.com/Monadical-SAS/reflector/commit/870e8605171a27155a9cbee215eeccb9a8d6c0a2))
|
||||
|
||||
## [0.12.0](https://github.com/Monadical-SAS/reflector/compare/v0.11.0...v0.12.0) (2025-09-17)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* calendar integration ([#608](https://github.com/Monadical-SAS/reflector/issues/608)) ([6f680b5](https://github.com/Monadical-SAS/reflector/commit/6f680b57954c688882c4ed49f40f161c52a00a24))
|
||||
* self-hosted gpu api ([#636](https://github.com/Monadical-SAS/reflector/issues/636)) ([ab859d6](https://github.com/Monadical-SAS/reflector/commit/ab859d65a6bded904133a163a081a651b3938d42))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* ignore player hotkeys for text inputs ([#646](https://github.com/Monadical-SAS/reflector/issues/646)) ([fa049e8](https://github.com/Monadical-SAS/reflector/commit/fa049e8d068190ce7ea015fd9fcccb8543f54a3f))
|
||||
|
||||
## [0.11.0](https://github.com/Monadical-SAS/reflector/compare/v0.10.0...v0.11.0) (2025-09-16)
|
||||
|
||||
|
||||
|
||||
33
gpu/modal_deployments/.gitignore
vendored
Normal file
33
gpu/modal_deployments/.gitignore
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
# OS / Editor
|
||||
.DS_Store
|
||||
.vscode/
|
||||
.idea/
|
||||
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
|
||||
# Env and secrets
|
||||
.env
|
||||
.env.*
|
||||
*.env
|
||||
*.secret
|
||||
|
||||
# Build / dist
|
||||
build/
|
||||
dist/
|
||||
.eggs/
|
||||
*.egg-info/
|
||||
|
||||
# Coverage / test
|
||||
.pytest_cache/
|
||||
.coverage*
|
||||
htmlcov/
|
||||
|
||||
# Modal local state (if any)
|
||||
modal_mounts/
|
||||
.modal_cache/
|
||||
2
gpu/self_hosted/.env.example
Normal file
2
gpu/self_hosted/.env.example
Normal file
@@ -0,0 +1,2 @@
|
||||
REFLECTOR_GPU_APIKEY=
|
||||
HF_TOKEN=
|
||||
38
gpu/self_hosted/.gitignore
vendored
Normal file
38
gpu/self_hosted/.gitignore
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
cache/
|
||||
|
||||
# OS / Editor
|
||||
.DS_Store
|
||||
.vscode/
|
||||
.idea/
|
||||
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# Env and secrets
|
||||
.env
|
||||
*.env
|
||||
*.secret
|
||||
HF_TOKEN
|
||||
REFLECTOR_GPU_APIKEY
|
||||
|
||||
# Virtual env / uv
|
||||
.venv/
|
||||
venv/
|
||||
ENV/
|
||||
uv/
|
||||
|
||||
# Build / dist
|
||||
build/
|
||||
dist/
|
||||
.eggs/
|
||||
*.egg-info/
|
||||
|
||||
# Coverage / test
|
||||
.pytest_cache/
|
||||
.coverage*
|
||||
htmlcov/
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
46
gpu/self_hosted/Dockerfile
Normal file
46
gpu/self_hosted/Dockerfile
Normal file
@@ -0,0 +1,46 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
ENV PYTHONUNBUFFERED=1 \
|
||||
UV_LINK_MODE=copy \
|
||||
UV_NO_CACHE=1
|
||||
|
||||
WORKDIR /tmp
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
ffmpeg \
|
||||
curl \
|
||||
ca-certificates \
|
||||
gnupg \
|
||||
wget \
|
||||
&& apt-get clean
|
||||
# Add NVIDIA CUDA repo for Debian 12 (bookworm) and install cuDNN 9 for CUDA 12
|
||||
ADD https://developer.download.nvidia.com/compute/cuda/repos/debian12/x86_64/cuda-keyring_1.1-1_all.deb /cuda-keyring.deb
|
||||
RUN dpkg -i /cuda-keyring.deb \
|
||||
&& rm /cuda-keyring.deb \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
cuda-cudart-12-6 \
|
||||
libcublas-12-6 \
|
||||
libcudnn9-cuda-12 \
|
||||
libcudnn9-dev-cuda-12 \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
ADD https://astral.sh/uv/install.sh /uv-installer.sh
|
||||
RUN sh /uv-installer.sh && rm /uv-installer.sh
|
||||
ENV PATH="/root/.local/bin/:$PATH"
|
||||
ENV LD_LIBRARY_PATH="/usr/local/cuda/lib64:/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH"
|
||||
|
||||
RUN mkdir -p /app
|
||||
WORKDIR /app
|
||||
COPY pyproject.toml uv.lock /app/
|
||||
|
||||
|
||||
COPY ./app /app/app
|
||||
COPY ./main.py /app/
|
||||
COPY ./runserver.sh /app/
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["sh", "/app/runserver.sh"]
|
||||
|
||||
|
||||
73
gpu/self_hosted/README.md
Normal file
73
gpu/self_hosted/README.md
Normal file
@@ -0,0 +1,73 @@
|
||||
# Self-hosted Model API
|
||||
|
||||
Run transcription, translation, and diarization services compatible with Reflector's GPU Model API. Works on CPU or GPU.
|
||||
|
||||
Environment variables
|
||||
|
||||
- REFLECTOR_GPU_APIKEY: Optional Bearer token. If unset, auth is disabled.
|
||||
- HF_TOKEN: Optional. Required for diarization to download pyannote pipelines
|
||||
|
||||
Requirements
|
||||
|
||||
- FFmpeg must be installed and on PATH (used for URL-based and segmented transcription)
|
||||
- Python 3.12+
|
||||
- NVIDIA GPU optional. If available, it will be used automatically
|
||||
|
||||
Local run
|
||||
Set env vars in self_hosted/.env file
|
||||
uv sync
|
||||
|
||||
uv run uvicorn main:app --host 0.0.0.0 --port 8000
|
||||
|
||||
Authentication
|
||||
|
||||
- If REFLECTOR_GPU_APIKEY is set, include header: Authorization: Bearer <key>
|
||||
|
||||
Endpoints
|
||||
|
||||
- POST /v1/audio/transcriptions
|
||||
|
||||
- multipart/form-data
|
||||
- fields: file (single file) OR files[] (multiple files), language, batch (true/false)
|
||||
- response: single { text, words, filename } or { results: [ ... ] }
|
||||
|
||||
- POST /v1/audio/transcriptions-from-url
|
||||
|
||||
- application/json
|
||||
- body: { audio_file_url, language, timestamp_offset }
|
||||
- response: { text, words }
|
||||
|
||||
- POST /translate
|
||||
|
||||
- text: query parameter
|
||||
- body (application/json): { source_language, target_language }
|
||||
- response: { text: { <src>: original, <tgt>: translated } }
|
||||
|
||||
- POST /diarize
|
||||
- query parameters: audio_file_url, timestamp (optional)
|
||||
- requires HF_TOKEN to be set (for pyannote)
|
||||
- response: { diarization: [ { start, end, speaker } ] }
|
||||
|
||||
OpenAPI docs
|
||||
|
||||
- Visit /docs when the server is running
|
||||
|
||||
Docker
|
||||
|
||||
- Not yet provided in this directory. A Dockerfile will be added later. For now, use Local run above
|
||||
|
||||
Conformance tests
|
||||
|
||||
# From this directory
|
||||
|
||||
TRANSCRIPT_URL=http://localhost:8000 \
|
||||
TRANSCRIPT_API_KEY=dev-key \
|
||||
uv run -m pytest -m model_api --no-cov ../../server/tests/test_model_api_transcript.py
|
||||
|
||||
TRANSLATION_URL=http://localhost:8000 \
|
||||
TRANSLATION_API_KEY=dev-key \
|
||||
uv run -m pytest -m model_api --no-cov ../../server/tests/test_model_api_translation.py
|
||||
|
||||
DIARIZATION_URL=http://localhost:8000 \
|
||||
DIARIZATION_API_KEY=dev-key \
|
||||
uv run -m pytest -m model_api --no-cov ../../server/tests/test_model_api_diarization.py
|
||||
19
gpu/self_hosted/app/auth.py
Normal file
19
gpu/self_hosted/app/auth.py
Normal file
@@ -0,0 +1,19 @@
|
||||
import os
|
||||
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from fastapi.security import OAuth2PasswordBearer
|
||||
|
||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
|
||||
|
||||
|
||||
def apikey_auth(apikey: str = Depends(oauth2_scheme)):
|
||||
required_key = os.environ.get("REFLECTOR_GPU_APIKEY")
|
||||
if not required_key:
|
||||
return
|
||||
if apikey == required_key:
|
||||
return
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid API key",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
12
gpu/self_hosted/app/config.py
Normal file
12
gpu/self_hosted/app/config.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from pathlib import Path
|
||||
|
||||
SUPPORTED_FILE_EXTENSIONS = ["mp3", "mp4", "mpeg", "mpga", "m4a", "wav", "webm"]
|
||||
SAMPLE_RATE = 16000
|
||||
VAD_CONFIG = {
|
||||
"batch_max_duration": 30.0,
|
||||
"silence_padding": 0.5,
|
||||
"window_size": 512,
|
||||
}
|
||||
|
||||
# App-level paths
|
||||
UPLOADS_PATH = Path("/tmp/whisper-uploads")
|
||||
30
gpu/self_hosted/app/factory.py
Normal file
30
gpu/self_hosted/app/factory.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import FastAPI
|
||||
|
||||
from .routers.diarization import router as diarization_router
|
||||
from .routers.transcription import router as transcription_router
|
||||
from .routers.translation import router as translation_router
|
||||
from .services.transcriber import WhisperService
|
||||
from .services.diarizer import PyannoteDiarizationService
|
||||
from .utils import ensure_dirs
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
ensure_dirs()
|
||||
whisper_service = WhisperService()
|
||||
whisper_service.load()
|
||||
app.state.whisper = whisper_service
|
||||
diarization_service = PyannoteDiarizationService()
|
||||
diarization_service.load()
|
||||
app.state.diarizer = diarization_service
|
||||
yield
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
app.include_router(transcription_router)
|
||||
app.include_router(translation_router)
|
||||
app.include_router(diarization_router)
|
||||
return app
|
||||
30
gpu/self_hosted/app/routers/diarization.py
Normal file
30
gpu/self_hosted/app/routers/diarization.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from typing import List
|
||||
|
||||
from fastapi import APIRouter, Depends, Request
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ..auth import apikey_auth
|
||||
from ..services.diarizer import PyannoteDiarizationService
|
||||
from ..utils import download_audio_file
|
||||
|
||||
router = APIRouter(tags=["diarization"])
|
||||
|
||||
|
||||
class DiarizationSegment(BaseModel):
|
||||
start: float
|
||||
end: float
|
||||
speaker: int
|
||||
|
||||
|
||||
class DiarizationResponse(BaseModel):
|
||||
diarization: List[DiarizationSegment]
|
||||
|
||||
|
||||
@router.post(
|
||||
"/diarize", dependencies=[Depends(apikey_auth)], response_model=DiarizationResponse
|
||||
)
|
||||
def diarize(request: Request, audio_file_url: str, timestamp: float = 0.0):
|
||||
with download_audio_file(audio_file_url) as (file_path, _ext):
|
||||
file_path = str(file_path)
|
||||
diarizer: PyannoteDiarizationService = request.app.state.diarizer
|
||||
return diarizer.diarize_file(file_path, timestamp=timestamp)
|
||||
109
gpu/self_hosted/app/routers/transcription.py
Normal file
109
gpu/self_hosted/app/routers/transcription.py
Normal file
@@ -0,0 +1,109 @@
|
||||
import uuid
|
||||
from typing import Optional, Union
|
||||
|
||||
from fastapi import APIRouter, Body, Depends, Form, HTTPException, Request, UploadFile
|
||||
from pydantic import BaseModel
|
||||
from pathlib import Path
|
||||
from ..auth import apikey_auth
|
||||
from ..config import SUPPORTED_FILE_EXTENSIONS, UPLOADS_PATH
|
||||
from ..services.transcriber import MODEL_NAME
|
||||
from ..utils import cleanup_uploaded_files, download_audio_file
|
||||
|
||||
router = APIRouter(prefix="/v1/audio", tags=["transcription"])
|
||||
|
||||
|
||||
class WordTiming(BaseModel):
|
||||
word: str
|
||||
start: float
|
||||
end: float
|
||||
|
||||
|
||||
class TranscriptResult(BaseModel):
|
||||
text: str
|
||||
words: list[WordTiming]
|
||||
filename: Optional[str] = None
|
||||
|
||||
|
||||
class TranscriptBatchResponse(BaseModel):
|
||||
results: list[TranscriptResult]
|
||||
|
||||
|
||||
@router.post(
|
||||
"/transcriptions",
|
||||
dependencies=[Depends(apikey_auth)],
|
||||
response_model=Union[TranscriptResult, TranscriptBatchResponse],
|
||||
)
|
||||
def transcribe(
|
||||
request: Request,
|
||||
file: UploadFile = None,
|
||||
files: list[UploadFile] | None = None,
|
||||
model: str = Form(MODEL_NAME),
|
||||
language: str = Form("en"),
|
||||
batch: bool = Form(False),
|
||||
):
|
||||
service = request.app.state.whisper
|
||||
if not file and not files:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Either 'file' or 'files' parameter is required"
|
||||
)
|
||||
if batch and not files:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Batch transcription requires 'files'"
|
||||
)
|
||||
|
||||
upload_files = [file] if file else files
|
||||
|
||||
uploaded_paths: list[Path] = []
|
||||
with cleanup_uploaded_files(uploaded_paths):
|
||||
for upload_file in upload_files:
|
||||
audio_suffix = upload_file.filename.split(".")[-1].lower()
|
||||
if audio_suffix not in SUPPORTED_FILE_EXTENSIONS:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=(
|
||||
f"Unsupported audio format. Supported extensions: {', '.join(SUPPORTED_FILE_EXTENSIONS)}"
|
||||
),
|
||||
)
|
||||
unique_filename = f"{uuid.uuid4()}.{audio_suffix}"
|
||||
file_path = UPLOADS_PATH / unique_filename
|
||||
with open(file_path, "wb") as f:
|
||||
content = upload_file.file.read()
|
||||
f.write(content)
|
||||
uploaded_paths.append(file_path)
|
||||
|
||||
if batch and len(upload_files) > 1:
|
||||
results = []
|
||||
for path in uploaded_paths:
|
||||
result = service.transcribe_file(str(path), language=language)
|
||||
result["filename"] = path.name
|
||||
results.append(result)
|
||||
return {"results": results}
|
||||
|
||||
results = []
|
||||
for path in uploaded_paths:
|
||||
result = service.transcribe_file(str(path), language=language)
|
||||
result["filename"] = path.name
|
||||
results.append(result)
|
||||
|
||||
return {"results": results} if len(results) > 1 else results[0]
|
||||
|
||||
|
||||
@router.post(
|
||||
"/transcriptions-from-url",
|
||||
dependencies=[Depends(apikey_auth)],
|
||||
response_model=TranscriptResult,
|
||||
)
|
||||
def transcribe_from_url(
|
||||
request: Request,
|
||||
audio_file_url: str = Body(..., description="URL of the audio file to transcribe"),
|
||||
model: str = Body(MODEL_NAME),
|
||||
language: str = Body("en"),
|
||||
timestamp_offset: float = Body(0.0),
|
||||
):
|
||||
service = request.app.state.whisper
|
||||
with download_audio_file(audio_file_url) as (file_path, _ext):
|
||||
file_path = str(file_path)
|
||||
result = service.transcribe_vad_url_segment(
|
||||
file_path=file_path, timestamp_offset=timestamp_offset, language=language
|
||||
)
|
||||
return result
|
||||
28
gpu/self_hosted/app/routers/translation.py
Normal file
28
gpu/self_hosted/app/routers/translation.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from typing import Dict
|
||||
|
||||
from fastapi import APIRouter, Body, Depends
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ..auth import apikey_auth
|
||||
from ..services.translator import TextTranslatorService
|
||||
|
||||
router = APIRouter(tags=["translation"])
|
||||
|
||||
translator = TextTranslatorService()
|
||||
|
||||
|
||||
class TranslationResponse(BaseModel):
|
||||
text: Dict[str, str]
|
||||
|
||||
|
||||
@router.post(
|
||||
"/translate",
|
||||
dependencies=[Depends(apikey_auth)],
|
||||
response_model=TranslationResponse,
|
||||
)
|
||||
def translate(
|
||||
text: str,
|
||||
source_language: str = Body("en"),
|
||||
target_language: str = Body("fr"),
|
||||
):
|
||||
return translator.translate(text, source_language, target_language)
|
||||
42
gpu/self_hosted/app/services/diarizer.py
Normal file
42
gpu/self_hosted/app/services/diarizer.py
Normal file
@@ -0,0 +1,42 @@
|
||||
import os
|
||||
import threading
|
||||
|
||||
import torch
|
||||
import torchaudio
|
||||
from pyannote.audio import Pipeline
|
||||
|
||||
|
||||
class PyannoteDiarizationService:
|
||||
def __init__(self):
|
||||
self._pipeline = None
|
||||
self._device = "cpu"
|
||||
self._lock = threading.Lock()
|
||||
|
||||
def load(self):
|
||||
self._device = "cuda" if torch.cuda.is_available() else "cpu"
|
||||
self._pipeline = Pipeline.from_pretrained(
|
||||
"pyannote/speaker-diarization-3.1",
|
||||
use_auth_token=os.environ.get("HF_TOKEN"),
|
||||
)
|
||||
self._pipeline.to(torch.device(self._device))
|
||||
|
||||
def diarize_file(self, file_path: str, timestamp: float = 0.0) -> dict:
|
||||
if self._pipeline is None:
|
||||
self.load()
|
||||
waveform, sample_rate = torchaudio.load(file_path)
|
||||
with self._lock:
|
||||
diarization = self._pipeline(
|
||||
{"waveform": waveform, "sample_rate": sample_rate}
|
||||
)
|
||||
words = []
|
||||
for diarization_segment, _, speaker in diarization.itertracks(yield_label=True):
|
||||
words.append(
|
||||
{
|
||||
"start": round(timestamp + diarization_segment.start, 3),
|
||||
"end": round(timestamp + diarization_segment.end, 3),
|
||||
"speaker": int(speaker[-2:])
|
||||
if speaker and speaker[-2:].isdigit()
|
||||
else 0,
|
||||
}
|
||||
)
|
||||
return {"diarization": words}
|
||||
208
gpu/self_hosted/app/services/transcriber.py
Normal file
208
gpu/self_hosted/app/services/transcriber.py
Normal file
@@ -0,0 +1,208 @@
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import threading
|
||||
from typing import Generator
|
||||
|
||||
import faster_whisper
|
||||
import librosa
|
||||
import numpy as np
|
||||
import torch
|
||||
from fastapi import HTTPException
|
||||
from silero_vad import VADIterator, load_silero_vad
|
||||
|
||||
from ..config import SAMPLE_RATE, VAD_CONFIG
|
||||
|
||||
# Whisper configuration (service-local defaults)
|
||||
MODEL_NAME = "large-v2"
|
||||
# None delegates compute type to runtime: float16 on CUDA, int8 on CPU
|
||||
MODEL_COMPUTE_TYPE = None
|
||||
MODEL_NUM_WORKERS = 1
|
||||
CACHE_PATH = os.path.join(os.path.expanduser("~"), ".cache", "reflector-whisper")
|
||||
from ..utils import NoStdStreams
|
||||
|
||||
|
||||
class WhisperService:
|
||||
def __init__(self):
|
||||
self.model = None
|
||||
self.device = "cpu"
|
||||
self.lock = threading.Lock()
|
||||
|
||||
def load(self):
|
||||
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
||||
compute_type = MODEL_COMPUTE_TYPE or (
|
||||
"float16" if self.device == "cuda" else "int8"
|
||||
)
|
||||
self.model = faster_whisper.WhisperModel(
|
||||
MODEL_NAME,
|
||||
device=self.device,
|
||||
compute_type=compute_type,
|
||||
num_workers=MODEL_NUM_WORKERS,
|
||||
download_root=CACHE_PATH,
|
||||
)
|
||||
|
||||
def pad_audio(self, audio_array, sample_rate: int = SAMPLE_RATE):
|
||||
audio_duration = len(audio_array) / sample_rate
|
||||
if audio_duration < VAD_CONFIG["silence_padding"]:
|
||||
silence_samples = int(sample_rate * VAD_CONFIG["silence_padding"])
|
||||
silence = np.zeros(silence_samples, dtype=np.float32)
|
||||
return np.concatenate([audio_array, silence])
|
||||
return audio_array
|
||||
|
||||
def enforce_word_timing_constraints(self, words: list[dict]) -> list[dict]:
|
||||
if len(words) <= 1:
|
||||
return words
|
||||
enforced: list[dict] = []
|
||||
for i, word in enumerate(words):
|
||||
current = dict(word)
|
||||
if i < len(words) - 1:
|
||||
next_start = words[i + 1]["start"]
|
||||
if current["end"] > next_start:
|
||||
current["end"] = next_start
|
||||
enforced.append(current)
|
||||
return enforced
|
||||
|
||||
def transcribe_file(self, file_path: str, language: str = "en") -> dict:
|
||||
input_for_model: str | "object" = file_path
|
||||
try:
|
||||
audio_array, _sample_rate = librosa.load(
|
||||
file_path, sr=SAMPLE_RATE, mono=True
|
||||
)
|
||||
if len(audio_array) / float(SAMPLE_RATE) < VAD_CONFIG["silence_padding"]:
|
||||
input_for_model = self.pad_audio(audio_array, SAMPLE_RATE)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
with self.lock:
|
||||
with NoStdStreams():
|
||||
segments, _ = self.model.transcribe(
|
||||
input_for_model,
|
||||
language=language,
|
||||
beam_size=5,
|
||||
word_timestamps=True,
|
||||
vad_filter=True,
|
||||
vad_parameters={"min_silence_duration_ms": 500},
|
||||
)
|
||||
|
||||
segments = list(segments)
|
||||
text = "".join(segment.text for segment in segments).strip()
|
||||
words = [
|
||||
{
|
||||
"word": word.word,
|
||||
"start": round(float(word.start), 2),
|
||||
"end": round(float(word.end), 2),
|
||||
}
|
||||
for segment in segments
|
||||
for word in segment.words
|
||||
]
|
||||
words = self.enforce_word_timing_constraints(words)
|
||||
return {"text": text, "words": words}
|
||||
|
||||
def transcribe_vad_url_segment(
|
||||
self, file_path: str, timestamp_offset: float = 0.0, language: str = "en"
|
||||
) -> dict:
|
||||
def load_audio_via_ffmpeg(input_path: str, sample_rate: int) -> np.ndarray:
|
||||
ffmpeg_bin = shutil.which("ffmpeg") or "ffmpeg"
|
||||
cmd = [
|
||||
ffmpeg_bin,
|
||||
"-nostdin",
|
||||
"-threads",
|
||||
"1",
|
||||
"-i",
|
||||
input_path,
|
||||
"-f",
|
||||
"f32le",
|
||||
"-acodec",
|
||||
"pcm_f32le",
|
||||
"-ac",
|
||||
"1",
|
||||
"-ar",
|
||||
str(sample_rate),
|
||||
"pipe:1",
|
||||
]
|
||||
try:
|
||||
proc = subprocess.run(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=f"ffmpeg failed: {e}")
|
||||
audio = np.frombuffer(proc.stdout, dtype=np.float32)
|
||||
return audio
|
||||
|
||||
def vad_segments(
|
||||
audio_array,
|
||||
sample_rate: int = SAMPLE_RATE,
|
||||
window_size: int = VAD_CONFIG["window_size"],
|
||||
) -> Generator[tuple[float, float], None, None]:
|
||||
vad_model = load_silero_vad(onnx=False)
|
||||
iterator = VADIterator(vad_model, sampling_rate=sample_rate)
|
||||
start = None
|
||||
for i in range(0, len(audio_array), window_size):
|
||||
chunk = audio_array[i : i + window_size]
|
||||
if len(chunk) < window_size:
|
||||
chunk = np.pad(
|
||||
chunk, (0, window_size - len(chunk)), mode="constant"
|
||||
)
|
||||
speech = iterator(chunk)
|
||||
if not speech:
|
||||
continue
|
||||
if "start" in speech:
|
||||
start = speech["start"]
|
||||
continue
|
||||
if "end" in speech and start is not None:
|
||||
end = speech["end"]
|
||||
yield (start / float(SAMPLE_RATE), end / float(SAMPLE_RATE))
|
||||
start = None
|
||||
iterator.reset_states()
|
||||
|
||||
audio_array = load_audio_via_ffmpeg(file_path, SAMPLE_RATE)
|
||||
|
||||
merged_batches: list[tuple[float, float]] = []
|
||||
batch_start = None
|
||||
batch_end = None
|
||||
max_duration = VAD_CONFIG["batch_max_duration"]
|
||||
for seg_start, seg_end in vad_segments(audio_array):
|
||||
if batch_start is None:
|
||||
batch_start, batch_end = seg_start, seg_end
|
||||
continue
|
||||
if seg_end - batch_start <= max_duration:
|
||||
batch_end = seg_end
|
||||
else:
|
||||
merged_batches.append((batch_start, batch_end))
|
||||
batch_start, batch_end = seg_start, seg_end
|
||||
if batch_start is not None and batch_end is not None:
|
||||
merged_batches.append((batch_start, batch_end))
|
||||
|
||||
all_text = []
|
||||
all_words = []
|
||||
for start_time, end_time in merged_batches:
|
||||
s_idx = int(start_time * SAMPLE_RATE)
|
||||
e_idx = int(end_time * SAMPLE_RATE)
|
||||
segment = audio_array[s_idx:e_idx]
|
||||
segment = self.pad_audio(segment, SAMPLE_RATE)
|
||||
with self.lock:
|
||||
segments, _ = self.model.transcribe(
|
||||
segment,
|
||||
language=language,
|
||||
beam_size=5,
|
||||
word_timestamps=True,
|
||||
vad_filter=True,
|
||||
vad_parameters={"min_silence_duration_ms": 500},
|
||||
)
|
||||
segments = list(segments)
|
||||
text = "".join(seg.text for seg in segments).strip()
|
||||
words = [
|
||||
{
|
||||
"word": w.word,
|
||||
"start": round(float(w.start) + start_time + timestamp_offset, 2),
|
||||
"end": round(float(w.end) + start_time + timestamp_offset, 2),
|
||||
}
|
||||
for seg in segments
|
||||
for w in seg.words
|
||||
]
|
||||
if text:
|
||||
all_text.append(text)
|
||||
all_words.extend(words)
|
||||
|
||||
all_words = self.enforce_word_timing_constraints(all_words)
|
||||
return {"text": " ".join(all_text), "words": all_words}
|
||||
44
gpu/self_hosted/app/services/translator.py
Normal file
44
gpu/self_hosted/app/services/translator.py
Normal file
@@ -0,0 +1,44 @@
|
||||
import threading
|
||||
|
||||
from transformers import MarianMTModel, MarianTokenizer, pipeline
|
||||
|
||||
|
||||
class TextTranslatorService:
|
||||
"""Simple text-to-text translator using HuggingFace MarianMT models.
|
||||
|
||||
This mirrors the modal translator API shape but uses text translation only.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._pipeline = None
|
||||
self._lock = threading.Lock()
|
||||
|
||||
def load(self, source_language: str = "en", target_language: str = "fr"):
|
||||
# Pick a default MarianMT model pair if available; fall back to Helsinki-NLP en->fr
|
||||
model_name = self._resolve_model_name(source_language, target_language)
|
||||
tokenizer = MarianTokenizer.from_pretrained(model_name)
|
||||
model = MarianMTModel.from_pretrained(model_name)
|
||||
self._pipeline = pipeline("translation", model=model, tokenizer=tokenizer)
|
||||
|
||||
def _resolve_model_name(self, src: str, tgt: str) -> str:
|
||||
# Minimal mapping; extend as needed
|
||||
pair = (src.lower(), tgt.lower())
|
||||
mapping = {
|
||||
("en", "fr"): "Helsinki-NLP/opus-mt-en-fr",
|
||||
("fr", "en"): "Helsinki-NLP/opus-mt-fr-en",
|
||||
("en", "es"): "Helsinki-NLP/opus-mt-en-es",
|
||||
("es", "en"): "Helsinki-NLP/opus-mt-es-en",
|
||||
("en", "de"): "Helsinki-NLP/opus-mt-en-de",
|
||||
("de", "en"): "Helsinki-NLP/opus-mt-de-en",
|
||||
}
|
||||
return mapping.get(pair, "Helsinki-NLP/opus-mt-en-fr")
|
||||
|
||||
def translate(self, text: str, source_language: str, target_language: str) -> dict:
|
||||
if self._pipeline is None:
|
||||
self.load(source_language, target_language)
|
||||
with self._lock:
|
||||
results = self._pipeline(
|
||||
text, src_lang=source_language, tgt_lang=target_language
|
||||
)
|
||||
translated = results[0]["translation_text"] if results else ""
|
||||
return {"text": {source_language: text, target_language: translated}}
|
||||
107
gpu/self_hosted/app/utils.py
Normal file
107
gpu/self_hosted/app/utils.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import uuid
|
||||
from contextlib import contextmanager
|
||||
from typing import Mapping
|
||||
from urllib.parse import urlparse
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
from fastapi import HTTPException
|
||||
|
||||
from .config import SUPPORTED_FILE_EXTENSIONS, UPLOADS_PATH
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NoStdStreams:
|
||||
def __init__(self):
|
||||
self.devnull = open(os.devnull, "w")
|
||||
|
||||
def __enter__(self):
|
||||
self._stdout, self._stderr = sys.stdout, sys.stderr
|
||||
self._stdout.flush()
|
||||
self._stderr.flush()
|
||||
sys.stdout, sys.stderr = self.devnull, self.devnull
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
sys.stdout, sys.stderr = self._stdout, self._stderr
|
||||
self.devnull.close()
|
||||
|
||||
|
||||
def ensure_dirs():
|
||||
UPLOADS_PATH.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def detect_audio_format(url: str, headers: Mapping[str, str]) -> str:
|
||||
url_path = urlparse(url).path
|
||||
for ext in SUPPORTED_FILE_EXTENSIONS:
|
||||
if url_path.lower().endswith(f".{ext}"):
|
||||
return ext
|
||||
|
||||
content_type = headers.get("content-type", "").lower()
|
||||
if "audio/mpeg" in content_type or "audio/mp3" in content_type:
|
||||
return "mp3"
|
||||
if "audio/wav" in content_type:
|
||||
return "wav"
|
||||
if "audio/mp4" in content_type:
|
||||
return "mp4"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=(
|
||||
f"Unsupported audio format for URL. Supported extensions: {', '.join(SUPPORTED_FILE_EXTENSIONS)}"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def download_audio_to_uploads(audio_file_url: str) -> tuple[Path, str]:
|
||||
response = requests.head(audio_file_url, allow_redirects=True)
|
||||
if response.status_code == 404:
|
||||
raise HTTPException(status_code=404, detail="Audio file not found")
|
||||
|
||||
response = requests.get(audio_file_url, allow_redirects=True)
|
||||
response.raise_for_status()
|
||||
|
||||
audio_suffix = detect_audio_format(audio_file_url, response.headers)
|
||||
unique_filename = f"{uuid.uuid4()}.{audio_suffix}"
|
||||
file_path: Path = UPLOADS_PATH / unique_filename
|
||||
|
||||
with open(file_path, "wb") as f:
|
||||
f.write(response.content)
|
||||
|
||||
return file_path, audio_suffix
|
||||
|
||||
|
||||
@contextmanager
|
||||
def download_audio_file(audio_file_url: str):
|
||||
"""Download an audio file to UPLOADS_PATH and remove it after use.
|
||||
|
||||
Yields (file_path: Path, audio_suffix: str).
|
||||
"""
|
||||
file_path, audio_suffix = download_audio_to_uploads(audio_file_url)
|
||||
try:
|
||||
yield file_path, audio_suffix
|
||||
finally:
|
||||
try:
|
||||
file_path.unlink(missing_ok=True)
|
||||
except Exception as e:
|
||||
logger.error("Error deleting temporary file %s: %s", file_path, e)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def cleanup_uploaded_files(file_paths: list[Path]):
|
||||
"""Ensure provided file paths are removed after use.
|
||||
|
||||
The provided list can be populated inside the context; all present entries
|
||||
at exit will be deleted.
|
||||
"""
|
||||
try:
|
||||
yield file_paths
|
||||
finally:
|
||||
for path in list(file_paths):
|
||||
try:
|
||||
path.unlink(missing_ok=True)
|
||||
except Exception as e:
|
||||
logger.error("Error deleting temporary file %s: %s", path, e)
|
||||
10
gpu/self_hosted/compose.yml
Normal file
10
gpu/self_hosted/compose.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
services:
|
||||
reflector_gpu:
|
||||
build:
|
||||
context: .
|
||||
ports:
|
||||
- "8000:8000"
|
||||
env_file:
|
||||
- .env
|
||||
volumes:
|
||||
- ./cache:/root/.cache
|
||||
3
gpu/self_hosted/main.py
Normal file
3
gpu/self_hosted/main.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from app.factory import create_app
|
||||
|
||||
app = create_app()
|
||||
19
gpu/self_hosted/pyproject.toml
Normal file
19
gpu/self_hosted/pyproject.toml
Normal file
@@ -0,0 +1,19 @@
|
||||
[project]
|
||||
name = "reflector-gpu"
|
||||
version = "0.1.0"
|
||||
description = "Self-hosted GPU service for speech transcription, diarization, and translation via FastAPI."
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"fastapi[standard]>=0.116.1",
|
||||
"uvicorn[standard]>=0.30.0",
|
||||
"torch>=2.3.0",
|
||||
"faster-whisper>=1.1.0",
|
||||
"librosa==0.10.1",
|
||||
"numpy<2",
|
||||
"silero-vad==5.1.0",
|
||||
"transformers>=4.35.0",
|
||||
"sentencepiece",
|
||||
"pyannote.audio==3.1.0",
|
||||
"torchaudio>=2.3.0",
|
||||
]
|
||||
17
gpu/self_hosted/runserver.sh
Normal file
17
gpu/self_hosted/runserver.sh
Normal file
@@ -0,0 +1,17 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
export PATH="/root/.local/bin:$PATH"
|
||||
cd /app
|
||||
|
||||
# Install Python dependencies at runtime (first run or when FORCE_SYNC=1)
|
||||
if [ ! -d "/app/.venv" ] || [ "$FORCE_SYNC" = "1" ]; then
|
||||
echo "[startup] Installing Python dependencies with uv..."
|
||||
uv sync --compile-bytecode --locked
|
||||
else
|
||||
echo "[startup] Using existing virtual environment at /app/.venv"
|
||||
fi
|
||||
|
||||
exec uv run uvicorn main:app --host 0.0.0.0 --port 8000
|
||||
|
||||
|
||||
3013
gpu/self_hosted/uv.lock
generated
Normal file
3013
gpu/self_hosted/uv.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
118
server/asyncio_loop_analysis.md
Normal file
118
server/asyncio_loop_analysis.md
Normal file
@@ -0,0 +1,118 @@
|
||||
# AsyncIO Event Loop Analysis for test_attendee_parsing_bug.py
|
||||
|
||||
## Problem Summary
|
||||
The test passes but encounters an error during teardown where asyncpg tries to use a different/closed event loop, resulting in:
|
||||
- `RuntimeError: Task got Future attached to a different loop`
|
||||
- `RuntimeError: Event loop is closed`
|
||||
|
||||
## Root Cause Analysis
|
||||
|
||||
### 1. Multiple Event Loop Creation Points
|
||||
|
||||
The test environment creates event loops at different scopes:
|
||||
|
||||
1. **Session-scoped loop** (conftest.py:27-34):
|
||||
- Created once per test session
|
||||
- Used by session-scoped fixtures
|
||||
- Closed after all tests complete
|
||||
|
||||
2. **Function-scoped loop** (pytest-asyncio default):
|
||||
- Created for each async test function
|
||||
- This is the loop that runs the actual test
|
||||
- Closed immediately after test completes
|
||||
|
||||
3. **AsyncPG internal loop**:
|
||||
- AsyncPG connections store a reference to the loop they were created with
|
||||
- Used for connection lifecycle management
|
||||
|
||||
### 2. Event Loop Lifecycle Mismatch
|
||||
|
||||
The issue occurs because:
|
||||
|
||||
1. **Session fixture creates database connection** on session-scoped loop
|
||||
2. **Test runs** on function-scoped loop (different from session loop)
|
||||
3. **During teardown**, the session fixture tries to rollback/close using the original session loop
|
||||
4. **AsyncPG connection** still references the function-scoped loop which is now closed
|
||||
5. **Conflict**: SQLAlchemy tries to use session loop, but asyncpg Future is attached to the closed function loop
|
||||
|
||||
### 3. Configuration Issues
|
||||
|
||||
Current pytest configuration:
|
||||
- `asyncio_mode = "auto"` in pyproject.toml
|
||||
- `asyncio_default_fixture_loop_scope=session` (shown in test output)
|
||||
- `asyncio_default_test_loop_scope=function` (shown in test output)
|
||||
|
||||
This mismatch between fixture loop scope (session) and test loop scope (function) causes the problem.
|
||||
|
||||
## Solutions
|
||||
|
||||
### Option 1: Align Loop Scopes (Recommended)
|
||||
Change pytest-asyncio configuration to use consistent loop scopes:
|
||||
|
||||
```python
|
||||
# pyproject.toml
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_mode = "auto"
|
||||
asyncio_default_fixture_loop_scope = "function" # Change from session to function
|
||||
```
|
||||
|
||||
### Option 2: Use Function-Scoped Database Fixture
|
||||
Change the `session` fixture scope from session to function:
|
||||
|
||||
```python
|
||||
@pytest_asyncio.fixture # Remove scope="session"
|
||||
async def session(setup_database):
|
||||
# ... existing code ...
|
||||
```
|
||||
|
||||
### Option 3: Explicit Loop Management
|
||||
Ensure all async operations use the same loop:
|
||||
|
||||
```python
|
||||
@pytest_asyncio.fixture
|
||||
async def session(setup_database, event_loop):
|
||||
# Force using the current event loop
|
||||
engine = create_async_engine(
|
||||
settings.DATABASE_URL,
|
||||
echo=False,
|
||||
poolclass=NullPool,
|
||||
connect_args={"loop": event_loop} # Pass explicit loop
|
||||
)
|
||||
# ... rest of fixture ...
|
||||
```
|
||||
|
||||
### Option 4: Upgrade pytest-asyncio
|
||||
The current version (1.1.0) has known issues with loop management. Consider upgrading to the latest version which has better loop scope handling.
|
||||
|
||||
## Immediate Workaround
|
||||
|
||||
For the test to run cleanly without the teardown error, you can:
|
||||
|
||||
1. Add explicit cleanup in the test:
|
||||
```python
|
||||
@pytest.mark.asyncio
|
||||
async def test_attendee_parsing_bug(session):
|
||||
# ... existing test code ...
|
||||
|
||||
# Explicit cleanup before fixture teardown
|
||||
await session.commit() # or await session.close()
|
||||
```
|
||||
|
||||
2. Or suppress the teardown error (not recommended for production):
|
||||
```python
|
||||
@pytest.fixture
|
||||
async def session(setup_database):
|
||||
# ... existing setup ...
|
||||
try:
|
||||
yield session
|
||||
await session.rollback()
|
||||
except RuntimeError as e:
|
||||
if "Event loop is closed" not in str(e):
|
||||
raise
|
||||
finally:
|
||||
await session.close()
|
||||
```
|
||||
|
||||
## Recommendation
|
||||
|
||||
The cleanest solution is to align the loop scopes by setting both fixture and test loop scopes to "function" scope. This ensures each test gets its own clean event loop and avoids cross-contamination between tests.
|
||||
@@ -190,5 +190,5 @@ Use the pytest-based conformance tests to validate any new implementation (inclu
|
||||
```
|
||||
TRANSCRIPT_URL=https://<your-deployment-base> \
|
||||
TRANSCRIPT_MODAL_API_KEY=your-api-key \
|
||||
uv run -m pytest -m gpu_modal --no-cov server/tests/test_gpu_modal_transcript.py
|
||||
uv run -m pytest -m model_api --no-cov server/tests/test_model_api_transcript.py
|
||||
```
|
||||
|
||||
@@ -3,7 +3,7 @@ from logging.config import fileConfig
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
from reflector.db import metadata
|
||||
from reflector.db.base import metadata
|
||||
from reflector.settings import settings
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
|
||||
@@ -28,7 +28,7 @@ def upgrade() -> None:
|
||||
transcript = table("transcript", column("id", sa.String), column("topics", sa.JSON))
|
||||
|
||||
# Select all rows from the transcript table
|
||||
results = bind.execute(select([transcript.c.id, transcript.c.topics]))
|
||||
results = bind.execute(select(transcript.c.id, transcript.c.topics))
|
||||
|
||||
for row in results:
|
||||
transcript_id = row["id"]
|
||||
@@ -58,7 +58,7 @@ def downgrade() -> None:
|
||||
transcript = table("transcript", column("id", sa.String), column("topics", sa.JSON))
|
||||
|
||||
# Select all rows from the transcript table
|
||||
results = bind.execute(select([transcript.c.id, transcript.c.topics]))
|
||||
results = bind.execute(select(transcript.c.id, transcript.c.topics))
|
||||
|
||||
for row in results:
|
||||
transcript_id = row["id"]
|
||||
|
||||
@@ -36,9 +36,7 @@ def upgrade() -> None:
|
||||
|
||||
# select only the one with duration = 0
|
||||
results = bind.execute(
|
||||
select([transcript.c.id, transcript.c.duration]).where(
|
||||
transcript.c.duration == 0
|
||||
)
|
||||
select(transcript.c.id, transcript.c.duration).where(transcript.c.duration == 0)
|
||||
)
|
||||
|
||||
data_dir = Path(settings.DATA_DIR)
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
"""remove_one_active_meeting_per_room_constraint
|
||||
|
||||
Revision ID: 6025e9b2bef2
|
||||
Revises: 2ae3db106d4e
|
||||
Create Date: 2025-08-18 18:45:44.418392
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "6025e9b2bef2"
|
||||
down_revision: Union[str, None] = "2ae3db106d4e"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Remove the unique constraint that prevents multiple active meetings per room
|
||||
# This is needed to support calendar integration with overlapping meetings
|
||||
# Check if index exists before trying to drop it
|
||||
from alembic import context
|
||||
|
||||
if context.get_context().dialect.name == "postgresql":
|
||||
conn = op.get_bind()
|
||||
result = conn.execute(
|
||||
sa.text(
|
||||
"SELECT 1 FROM pg_indexes WHERE indexname = 'idx_one_active_meeting_per_room'"
|
||||
)
|
||||
)
|
||||
if result.fetchone():
|
||||
op.drop_index("idx_one_active_meeting_per_room", table_name="meeting")
|
||||
else:
|
||||
# For SQLite, just try to drop it
|
||||
try:
|
||||
op.drop_index("idx_one_active_meeting_per_room", table_name="meeting")
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Restore the unique constraint
|
||||
op.create_index(
|
||||
"idx_one_active_meeting_per_room",
|
||||
"meeting",
|
||||
["room_id"],
|
||||
unique=True,
|
||||
postgresql_where=sa.text("is_active = true"),
|
||||
sqlite_where=sa.text("is_active = 1"),
|
||||
)
|
||||
@@ -8,7 +8,6 @@ Create Date: 2025-09-10 10:47:06.006819
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@@ -21,7 +20,6 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("meeting", schema=None) as batch_op:
|
||||
batch_op.alter_column("room_id", existing_type=sa.VARCHAR(), nullable=False)
|
||||
batch_op.create_foreign_key(
|
||||
None, "room", ["room_id"], ["id"], ondelete="CASCADE"
|
||||
)
|
||||
@@ -33,6 +31,5 @@ def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("meeting", schema=None) as batch_op:
|
||||
batch_op.drop_constraint("meeting_room_id_fkey", type_="foreignkey")
|
||||
batch_op.alter_column("room_id", existing_type=sa.VARCHAR(), nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@@ -28,7 +28,7 @@ def upgrade() -> None:
|
||||
transcript = table("transcript", column("id", sa.String), column("topics", sa.JSON))
|
||||
|
||||
# Select all rows from the transcript table
|
||||
results = bind.execute(select([transcript.c.id, transcript.c.topics]))
|
||||
results = bind.execute(select(transcript.c.id, transcript.c.topics))
|
||||
|
||||
for row in results:
|
||||
transcript_id = row["id"]
|
||||
@@ -58,7 +58,7 @@ def downgrade() -> None:
|
||||
transcript = table("transcript", column("id", sa.String), column("topics", sa.JSON))
|
||||
|
||||
# Select all rows from the transcript table
|
||||
results = bind.execute(select([transcript.c.id, transcript.c.topics]))
|
||||
results = bind.execute(select(transcript.c.id, transcript.c.topics))
|
||||
|
||||
for row in results:
|
||||
transcript_id = row["id"]
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
"""add_grace_period_fields_to_meeting
|
||||
|
||||
Revision ID: d4a1c446458c
|
||||
Revises: 6025e9b2bef2
|
||||
Create Date: 2025-08-18 18:50:37.768052
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "d4a1c446458c"
|
||||
down_revision: Union[str, None] = "6025e9b2bef2"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Add fields to track when participants left for grace period logic
|
||||
op.add_column(
|
||||
"meeting", sa.Column("last_participant_left_at", sa.DateTime(timezone=True))
|
||||
)
|
||||
op.add_column(
|
||||
"meeting",
|
||||
sa.Column("grace_period_minutes", sa.Integer, server_default=sa.text("15")),
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_column("meeting", "grace_period_minutes")
|
||||
op.drop_column("meeting", "last_participant_left_at")
|
||||
129
server/migrations/versions/d8e204bbf615_add_calendar.py
Normal file
129
server/migrations/versions/d8e204bbf615_add_calendar.py
Normal file
@@ -0,0 +1,129 @@
|
||||
"""add calendar
|
||||
|
||||
Revision ID: d8e204bbf615
|
||||
Revises: d4a1c446458c
|
||||
Create Date: 2025-09-10 19:56:22.295756
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "d8e204bbf615"
|
||||
down_revision: Union[str, None] = "d4a1c446458c"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"calendar_event",
|
||||
sa.Column("id", sa.String(), nullable=False),
|
||||
sa.Column("room_id", sa.String(), nullable=False),
|
||||
sa.Column("ics_uid", sa.Text(), nullable=False),
|
||||
sa.Column("title", sa.Text(), nullable=True),
|
||||
sa.Column("description", sa.Text(), nullable=True),
|
||||
sa.Column("start_time", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("end_time", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("attendees", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
|
||||
sa.Column("location", sa.Text(), nullable=True),
|
||||
sa.Column("ics_raw_data", sa.Text(), nullable=True),
|
||||
sa.Column("last_synced", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column(
|
||||
"is_deleted", sa.Boolean(), server_default=sa.text("false"), nullable=False
|
||||
),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["room_id"],
|
||||
["room.id"],
|
||||
name="fk_calendar_event_room_id",
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("room_id", "ics_uid", name="uq_room_calendar_event"),
|
||||
)
|
||||
with op.batch_alter_table("calendar_event", schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
"idx_calendar_event_deleted",
|
||||
["is_deleted"],
|
||||
unique=False,
|
||||
postgresql_where=sa.text("NOT is_deleted"),
|
||||
)
|
||||
batch_op.create_index(
|
||||
"idx_calendar_event_room_start", ["room_id", "start_time"], unique=False
|
||||
)
|
||||
|
||||
with op.batch_alter_table("meeting", schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column("calendar_event_id", sa.String(), nullable=True))
|
||||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"calendar_metadata",
|
||||
postgresql.JSONB(astext_type=sa.Text()),
|
||||
nullable=True,
|
||||
)
|
||||
)
|
||||
batch_op.create_index(
|
||||
"idx_meeting_calendar_event", ["calendar_event_id"], unique=False
|
||||
)
|
||||
batch_op.create_foreign_key(
|
||||
"fk_meeting_calendar_event_id",
|
||||
"calendar_event",
|
||||
["calendar_event_id"],
|
||||
["id"],
|
||||
ondelete="SET NULL",
|
||||
)
|
||||
|
||||
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column("ics_url", sa.Text(), nullable=True))
|
||||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"ics_fetch_interval", sa.Integer(), server_default="300", nullable=True
|
||||
)
|
||||
)
|
||||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"ics_enabled",
|
||||
sa.Boolean(),
|
||||
server_default=sa.text("false"),
|
||||
nullable=False,
|
||||
)
|
||||
)
|
||||
batch_op.add_column(
|
||||
sa.Column("ics_last_sync", sa.DateTime(timezone=True), nullable=True)
|
||||
)
|
||||
batch_op.add_column(sa.Column("ics_last_etag", sa.Text(), nullable=True))
|
||||
batch_op.create_index("idx_room_ics_enabled", ["ics_enabled"], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||
batch_op.drop_index("idx_room_ics_enabled")
|
||||
batch_op.drop_column("ics_last_etag")
|
||||
batch_op.drop_column("ics_last_sync")
|
||||
batch_op.drop_column("ics_enabled")
|
||||
batch_op.drop_column("ics_fetch_interval")
|
||||
batch_op.drop_column("ics_url")
|
||||
|
||||
with op.batch_alter_table("meeting", schema=None) as batch_op:
|
||||
batch_op.drop_constraint("fk_meeting_calendar_event_id", type_="foreignkey")
|
||||
batch_op.drop_index("idx_meeting_calendar_event")
|
||||
batch_op.drop_column("calendar_metadata")
|
||||
batch_op.drop_column("calendar_event_id")
|
||||
|
||||
with op.batch_alter_table("calendar_event", schema=None) as batch_op:
|
||||
batch_op.drop_index("idx_calendar_event_room_start")
|
||||
batch_op.drop_index(
|
||||
"idx_calendar_event_deleted", postgresql_where=sa.text("NOT is_deleted")
|
||||
)
|
||||
|
||||
op.drop_table("calendar_event")
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,43 @@
|
||||
"""remove_grace_period_fields
|
||||
|
||||
Revision ID: dc035ff72fd5
|
||||
Revises: d8e204bbf615
|
||||
Create Date: 2025-09-11 10:36:45.197588
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "dc035ff72fd5"
|
||||
down_revision: Union[str, None] = "d8e204bbf615"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Remove grace period columns from meeting table
|
||||
op.drop_column("meeting", "last_participant_left_at")
|
||||
op.drop_column("meeting", "grace_period_minutes")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Add back grace period columns to meeting table
|
||||
op.add_column(
|
||||
"meeting",
|
||||
sa.Column(
|
||||
"last_participant_left_at", sa.DateTime(timezone=True), nullable=True
|
||||
),
|
||||
)
|
||||
op.add_column(
|
||||
"meeting",
|
||||
sa.Column(
|
||||
"grace_period_minutes",
|
||||
sa.Integer(),
|
||||
server_default=sa.text("15"),
|
||||
nullable=True,
|
||||
),
|
||||
)
|
||||
@@ -12,7 +12,6 @@ dependencies = [
|
||||
"requests>=2.31.0",
|
||||
"aiortc>=1.5.0",
|
||||
"sortedcontainers>=2.4.0",
|
||||
"loguru>=0.7.0",
|
||||
"pydantic-settings>=2.0.2",
|
||||
"structlog>=23.1.0",
|
||||
"uvicorn[standard]>=0.23.1",
|
||||
@@ -20,8 +19,8 @@ dependencies = [
|
||||
"sentry-sdk[fastapi]>=1.29.2",
|
||||
"httpx>=0.24.1",
|
||||
"fastapi-pagination>=0.12.6",
|
||||
"databases[aiosqlite, asyncpg]>=0.7.0",
|
||||
"sqlalchemy<1.5",
|
||||
"sqlalchemy>=2.0.0",
|
||||
"asyncpg>=0.29.0",
|
||||
"alembic>=1.11.3",
|
||||
"nltk>=3.8.1",
|
||||
"prometheus-fastapi-instrumentator>=6.1.0",
|
||||
@@ -39,6 +38,7 @@ dependencies = [
|
||||
"llama-index-llms-openai-like>=0.4.0",
|
||||
"pytest-env>=1.1.5",
|
||||
"webvtt-py>=0.5.0",
|
||||
"icalendar>=6.0.0",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
@@ -46,6 +46,7 @@ dev = [
|
||||
"black>=24.1.1",
|
||||
"stamina>=23.1.0",
|
||||
"pyinstrument>=4.6.1",
|
||||
"pytest-async-sqlalchemy>=0.2.0",
|
||||
]
|
||||
tests = [
|
||||
"pytest-cov>=4.1.0",
|
||||
@@ -111,14 +112,17 @@ source = ["reflector"]
|
||||
|
||||
[tool.pytest_env]
|
||||
ENVIRONMENT = "pytest"
|
||||
DATABASE_URL = "postgresql://test_user:test_password@localhost:15432/reflector_test"
|
||||
DATABASE_URL = "postgresql+asyncpg://test_user:test_password@localhost:15432/reflector_test"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "-ra -q --disable-pytest-warnings --cov --cov-report html -v"
|
||||
testpaths = ["tests"]
|
||||
asyncio_mode = "auto"
|
||||
asyncio_debug = true
|
||||
asyncio_default_fixture_loop_scope = "session"
|
||||
asyncio_default_test_loop_scope = "session"
|
||||
markers = [
|
||||
"gpu_modal: mark test to run only with GPU Modal endpoints (deselect with '-m \"not gpu_modal\"')",
|
||||
"model_api: tests for the unified model-serving HTTP API (backend- and hardware-agnostic)",
|
||||
]
|
||||
|
||||
[tool.ruff.lint]
|
||||
@@ -130,7 +134,7 @@ select = [
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"reflector/processors/summary/summary_builder.py" = ["E501"]
|
||||
"gpu/**.py" = ["PLC0415"]
|
||||
"gpu/modal_deployments/**.py" = ["PLC0415"]
|
||||
"reflector/tools/**.py" = ["PLC0415"]
|
||||
"migrations/versions/**.py" = ["PLC0415"]
|
||||
"tests/**.py" = ["PLC0415"]
|
||||
|
||||
@@ -1,21 +1,14 @@
|
||||
import asyncio
|
||||
import functools
|
||||
|
||||
from reflector.db import get_database
|
||||
|
||||
|
||||
def asynctask(f):
|
||||
@functools.wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
async def run_with_db():
|
||||
database = get_database()
|
||||
await database.connect()
|
||||
try:
|
||||
return await f(*args, **kwargs)
|
||||
finally:
|
||||
await database.disconnect()
|
||||
async def run_async():
|
||||
return await f(*args, **kwargs)
|
||||
|
||||
coro = run_with_db()
|
||||
coro = run_async()
|
||||
try:
|
||||
loop = asyncio.get_running_loop()
|
||||
except RuntimeError:
|
||||
|
||||
@@ -67,7 +67,8 @@ def current_user(
|
||||
try:
|
||||
payload = jwtauth.verify_token(token)
|
||||
sub = payload["sub"]
|
||||
return UserInfo(sub=sub)
|
||||
email = payload["email"]
|
||||
return UserInfo(sub=sub, email=email)
|
||||
except JWTError as e:
|
||||
logger.error(f"JWT error: {e}")
|
||||
raise HTTPException(status_code=401, detail="Invalid authentication")
|
||||
|
||||
@@ -1,47 +1,69 @@
|
||||
import contextvars
|
||||
from typing import Optional
|
||||
from typing import AsyncGenerator
|
||||
|
||||
import databases
|
||||
import sqlalchemy
|
||||
from sqlalchemy.ext.asyncio import (
|
||||
AsyncEngine,
|
||||
AsyncSession,
|
||||
async_sessionmaker,
|
||||
create_async_engine,
|
||||
)
|
||||
|
||||
from reflector.db.base import Base as Base
|
||||
from reflector.db.base import metadata as metadata
|
||||
from reflector.events import subscribers_shutdown, subscribers_startup
|
||||
from reflector.settings import settings
|
||||
|
||||
metadata = sqlalchemy.MetaData()
|
||||
|
||||
_database_context: contextvars.ContextVar[Optional[databases.Database]] = (
|
||||
contextvars.ContextVar("database", default=None)
|
||||
)
|
||||
_engine: AsyncEngine | None = None
|
||||
_session_factory: async_sessionmaker[AsyncSession] | None = None
|
||||
|
||||
|
||||
def get_database() -> databases.Database:
|
||||
"""Get database instance for current asyncio context"""
|
||||
db = _database_context.get()
|
||||
if db is None:
|
||||
db = databases.Database(settings.DATABASE_URL)
|
||||
_database_context.set(db)
|
||||
return db
|
||||
def get_engine() -> AsyncEngine:
|
||||
global _engine
|
||||
if _engine is None:
|
||||
_engine = create_async_engine(
|
||||
settings.DATABASE_URL,
|
||||
echo=False,
|
||||
pool_pre_ping=True,
|
||||
)
|
||||
return _engine
|
||||
|
||||
|
||||
# import models
|
||||
def get_session_factory() -> async_sessionmaker[AsyncSession]:
|
||||
global _session_factory
|
||||
if _session_factory is None:
|
||||
_session_factory = async_sessionmaker(
|
||||
get_engine(),
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
return _session_factory
|
||||
|
||||
|
||||
async def _get_session() -> AsyncGenerator[AsyncSession, None]:
|
||||
# necessary implementation to ease mocking on pytest
|
||||
async with get_session_factory()() as session:
|
||||
yield session
|
||||
|
||||
|
||||
async def get_session() -> AsyncGenerator[AsyncSession, None]:
|
||||
async for session in _get_session():
|
||||
yield session
|
||||
|
||||
|
||||
import reflector.db.calendar_events # noqa
|
||||
import reflector.db.meetings # noqa
|
||||
import reflector.db.recordings # noqa
|
||||
import reflector.db.rooms # noqa
|
||||
import reflector.db.transcripts # noqa
|
||||
|
||||
kwargs = {}
|
||||
if "postgres" not in settings.DATABASE_URL:
|
||||
raise Exception("Only postgres database is supported in reflector")
|
||||
engine = sqlalchemy.create_engine(settings.DATABASE_URL, **kwargs)
|
||||
|
||||
|
||||
@subscribers_startup.append
|
||||
async def database_connect(_):
|
||||
database = get_database()
|
||||
await database.connect()
|
||||
get_engine()
|
||||
|
||||
|
||||
@subscribers_shutdown.append
|
||||
async def database_disconnect(_):
|
||||
database = get_database()
|
||||
await database.disconnect()
|
||||
global _engine
|
||||
if _engine:
|
||||
await _engine.dispose()
|
||||
_engine = None
|
||||
|
||||
237
server/reflector/db/base.py
Normal file
237
server/reflector/db/base.py
Normal file
@@ -0,0 +1,237 @@
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import JSONB, TSVECTOR
|
||||
from sqlalchemy.ext.asyncio import AsyncAttrs
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
||||
|
||||
|
||||
class Base(AsyncAttrs, DeclarativeBase):
|
||||
pass
|
||||
|
||||
|
||||
class TranscriptModel(Base):
|
||||
__tablename__ = "transcript"
|
||||
|
||||
id: Mapped[str] = mapped_column(sa.String, primary_key=True)
|
||||
name: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
status: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
locked: Mapped[Optional[bool]] = mapped_column(sa.Boolean)
|
||||
duration: Mapped[Optional[float]] = mapped_column(sa.Float)
|
||||
created_at: Mapped[Optional[datetime]] = mapped_column(sa.DateTime(timezone=True))
|
||||
title: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
short_summary: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
long_summary: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
topics: Mapped[Optional[list]] = mapped_column(sa.JSON)
|
||||
events: Mapped[Optional[list]] = mapped_column(sa.JSON)
|
||||
participants: Mapped[Optional[list]] = mapped_column(sa.JSON)
|
||||
source_language: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
target_language: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
reviewed: Mapped[bool] = mapped_column(
|
||||
sa.Boolean, nullable=False, server_default=sa.text("false")
|
||||
)
|
||||
audio_location: Mapped[str] = mapped_column(
|
||||
sa.String, nullable=False, server_default="local"
|
||||
)
|
||||
user_id: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
share_mode: Mapped[str] = mapped_column(
|
||||
sa.String, nullable=False, server_default="private"
|
||||
)
|
||||
meeting_id: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
recording_id: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
zulip_message_id: Mapped[Optional[int]] = mapped_column(sa.Integer)
|
||||
source_kind: Mapped[str] = mapped_column(
|
||||
sa.String, nullable=False
|
||||
) # Enum will be handled separately
|
||||
audio_deleted: Mapped[Optional[bool]] = mapped_column(sa.Boolean)
|
||||
room_id: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
webvtt: Mapped[Optional[str]] = mapped_column(sa.Text)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("idx_transcript_recording_id", "recording_id"),
|
||||
sa.Index("idx_transcript_user_id", "user_id"),
|
||||
sa.Index("idx_transcript_created_at", "created_at"),
|
||||
sa.Index("idx_transcript_user_id_recording_id", "user_id", "recording_id"),
|
||||
sa.Index("idx_transcript_room_id", "room_id"),
|
||||
sa.Index("idx_transcript_source_kind", "source_kind"),
|
||||
sa.Index("idx_transcript_room_id_created_at", "room_id", "created_at"),
|
||||
)
|
||||
|
||||
|
||||
TranscriptModel.search_vector_en = sa.Column(
|
||||
"search_vector_en",
|
||||
TSVECTOR,
|
||||
sa.Computed(
|
||||
"setweight(to_tsvector('english', coalesce(title, '')), 'A') || "
|
||||
"setweight(to_tsvector('english', coalesce(long_summary, '')), 'B') || "
|
||||
"setweight(to_tsvector('english', coalesce(webvtt, '')), 'C')",
|
||||
persisted=True,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class RoomModel(Base):
|
||||
__tablename__ = "room"
|
||||
|
||||
id: Mapped[str] = mapped_column(sa.String, primary_key=True)
|
||||
name: Mapped[str] = mapped_column(sa.String, nullable=False, unique=True)
|
||||
user_id: Mapped[str] = mapped_column(sa.String, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime(timezone=True), nullable=False
|
||||
)
|
||||
zulip_auto_post: Mapped[bool] = mapped_column(
|
||||
sa.Boolean, nullable=False, server_default=sa.text("false")
|
||||
)
|
||||
zulip_stream: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
zulip_topic: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
is_locked: Mapped[bool] = mapped_column(
|
||||
sa.Boolean, nullable=False, server_default=sa.text("false")
|
||||
)
|
||||
room_mode: Mapped[str] = mapped_column(
|
||||
sa.String, nullable=False, server_default="normal"
|
||||
)
|
||||
recording_type: Mapped[str] = mapped_column(
|
||||
sa.String, nullable=False, server_default="cloud"
|
||||
)
|
||||
recording_trigger: Mapped[str] = mapped_column(
|
||||
sa.String, nullable=False, server_default="automatic-2nd-participant"
|
||||
)
|
||||
is_shared: Mapped[bool] = mapped_column(
|
||||
sa.Boolean, nullable=False, server_default=sa.text("false")
|
||||
)
|
||||
webhook_url: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
webhook_secret: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
ics_url: Mapped[Optional[str]] = mapped_column(sa.Text)
|
||||
ics_fetch_interval: Mapped[Optional[int]] = mapped_column(
|
||||
sa.Integer, server_default=sa.text("300")
|
||||
)
|
||||
ics_enabled: Mapped[bool] = mapped_column(
|
||||
sa.Boolean, nullable=False, server_default=sa.text("false")
|
||||
)
|
||||
ics_last_sync: Mapped[Optional[datetime]] = mapped_column(
|
||||
sa.DateTime(timezone=True)
|
||||
)
|
||||
ics_last_etag: Mapped[Optional[str]] = mapped_column(sa.Text)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("idx_room_is_shared", "is_shared"),
|
||||
sa.Index("idx_room_ics_enabled", "ics_enabled"),
|
||||
)
|
||||
|
||||
|
||||
class MeetingModel(Base):
|
||||
__tablename__ = "meeting"
|
||||
|
||||
id: Mapped[str] = mapped_column(sa.String, primary_key=True)
|
||||
room_name: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
room_url: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
host_room_url: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
start_date: Mapped[Optional[datetime]] = mapped_column(sa.DateTime(timezone=True))
|
||||
end_date: Mapped[Optional[datetime]] = mapped_column(sa.DateTime(timezone=True))
|
||||
room_id: Mapped[Optional[str]] = mapped_column(
|
||||
sa.String, sa.ForeignKey("room.id", ondelete="CASCADE")
|
||||
)
|
||||
is_locked: Mapped[bool] = mapped_column(
|
||||
sa.Boolean, nullable=False, server_default=sa.text("false")
|
||||
)
|
||||
room_mode: Mapped[str] = mapped_column(
|
||||
sa.String, nullable=False, server_default="normal"
|
||||
)
|
||||
recording_type: Mapped[str] = mapped_column(
|
||||
sa.String, nullable=False, server_default="cloud"
|
||||
)
|
||||
recording_trigger: Mapped[str] = mapped_column(
|
||||
sa.String, nullable=False, server_default="automatic-2nd-participant"
|
||||
)
|
||||
num_clients: Mapped[int] = mapped_column(
|
||||
sa.Integer, nullable=False, server_default=sa.text("0")
|
||||
)
|
||||
is_active: Mapped[bool] = mapped_column(
|
||||
sa.Boolean, nullable=False, server_default=sa.text("true")
|
||||
)
|
||||
calendar_event_id: Mapped[Optional[str]] = mapped_column(
|
||||
sa.String,
|
||||
sa.ForeignKey(
|
||||
"calendar_event.id",
|
||||
ondelete="SET NULL",
|
||||
name="fk_meeting_calendar_event_id",
|
||||
),
|
||||
)
|
||||
calendar_metadata: Mapped[Optional[dict]] = mapped_column(JSONB)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("idx_meeting_room_id", "room_id"),
|
||||
sa.Index("idx_meeting_calendar_event", "calendar_event_id"),
|
||||
)
|
||||
|
||||
|
||||
class MeetingConsentModel(Base):
|
||||
__tablename__ = "meeting_consent"
|
||||
|
||||
id: Mapped[str] = mapped_column(sa.String, primary_key=True)
|
||||
meeting_id: Mapped[str] = mapped_column(
|
||||
sa.String, sa.ForeignKey("meeting.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
user_id: Mapped[Optional[str]] = mapped_column(sa.String)
|
||||
consent_given: Mapped[bool] = mapped_column(sa.Boolean, nullable=False)
|
||||
consent_timestamp: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime(timezone=True), nullable=False
|
||||
)
|
||||
|
||||
|
||||
class RecordingModel(Base):
|
||||
__tablename__ = "recording"
|
||||
|
||||
id: Mapped[str] = mapped_column(sa.String, primary_key=True)
|
||||
meeting_id: Mapped[str] = mapped_column(
|
||||
sa.String, sa.ForeignKey("meeting.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
url: Mapped[str] = mapped_column(sa.String, nullable=False)
|
||||
object_key: Mapped[str] = mapped_column(sa.String, nullable=False)
|
||||
duration: Mapped[Optional[float]] = mapped_column(sa.Float)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime(timezone=True), nullable=False
|
||||
)
|
||||
|
||||
__table_args__ = (sa.Index("idx_recording_meeting_id", "meeting_id"),)
|
||||
|
||||
|
||||
class CalendarEventModel(Base):
|
||||
__tablename__ = "calendar_event"
|
||||
|
||||
id: Mapped[str] = mapped_column(sa.String, primary_key=True)
|
||||
room_id: Mapped[str] = mapped_column(
|
||||
sa.String, sa.ForeignKey("room.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
ics_uid: Mapped[str] = mapped_column(sa.Text, nullable=False)
|
||||
title: Mapped[Optional[str]] = mapped_column(sa.Text)
|
||||
description: Mapped[Optional[str]] = mapped_column(sa.Text)
|
||||
start_time: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime(timezone=True), nullable=False
|
||||
)
|
||||
end_time: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime(timezone=True), nullable=False
|
||||
)
|
||||
attendees: Mapped[Optional[dict]] = mapped_column(JSONB)
|
||||
location: Mapped[Optional[str]] = mapped_column(sa.Text)
|
||||
ics_raw_data: Mapped[Optional[str]] = mapped_column(sa.Text)
|
||||
last_synced: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime(timezone=True), nullable=False
|
||||
)
|
||||
is_deleted: Mapped[bool] = mapped_column(
|
||||
sa.Boolean, nullable=False, server_default=sa.text("false")
|
||||
)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime(timezone=True), nullable=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
sa.DateTime(timezone=True), nullable=False
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
sa.Index("idx_calendar_event_room_start", "room_id", "start_time"),
|
||||
)
|
||||
|
||||
|
||||
metadata = Base.metadata
|
||||
187
server/reflector/db/calendar_events.py
Normal file
187
server/reflector/db/calendar_events.py
Normal file
@@ -0,0 +1,187 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any
|
||||
|
||||
import sqlalchemy as sa
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
from sqlalchemy import delete, select, update
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from reflector.db.base import CalendarEventModel
|
||||
from reflector.utils import generate_uuid4
|
||||
|
||||
|
||||
class CalendarEvent(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: str = Field(default_factory=generate_uuid4)
|
||||
room_id: str
|
||||
ics_uid: str
|
||||
title: str | None = None
|
||||
description: str | None = None
|
||||
start_time: datetime
|
||||
end_time: datetime
|
||||
attendees: list[dict[str, Any]] | None = None
|
||||
location: str | None = None
|
||||
ics_raw_data: str | None = None
|
||||
last_synced: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||
is_deleted: bool = False
|
||||
created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||
updated_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
|
||||
|
||||
|
||||
class CalendarEventController:
|
||||
async def get_upcoming_events(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
room_id: str,
|
||||
current_time: datetime,
|
||||
buffer_minutes: int = 15,
|
||||
) -> list[CalendarEvent]:
|
||||
buffer_time = current_time + timedelta(minutes=buffer_minutes)
|
||||
|
||||
query = (
|
||||
select(CalendarEventModel)
|
||||
.where(
|
||||
sa.and_(
|
||||
CalendarEventModel.room_id == room_id,
|
||||
CalendarEventModel.start_time <= buffer_time,
|
||||
CalendarEventModel.end_time > current_time,
|
||||
)
|
||||
)
|
||||
.order_by(CalendarEventModel.start_time)
|
||||
)
|
||||
|
||||
result = await session.execute(query)
|
||||
return [CalendarEvent.model_validate(row) for row in result.scalars().all()]
|
||||
|
||||
async def get_by_id(
|
||||
self, session: AsyncSession, event_id: str
|
||||
) -> CalendarEvent | None:
|
||||
query = select(CalendarEventModel).where(CalendarEventModel.id == event_id)
|
||||
result = await session.execute(query)
|
||||
row = result.scalar_one_or_none()
|
||||
if not row:
|
||||
return None
|
||||
return CalendarEvent.model_validate(row)
|
||||
|
||||
async def get_by_ics_uid(
|
||||
self, session: AsyncSession, room_id: str, ics_uid: str
|
||||
) -> CalendarEvent | None:
|
||||
query = select(CalendarEventModel).where(
|
||||
sa.and_(
|
||||
CalendarEventModel.room_id == room_id,
|
||||
CalendarEventModel.ics_uid == ics_uid,
|
||||
)
|
||||
)
|
||||
result = await session.execute(query)
|
||||
row = result.scalar_one_or_none()
|
||||
if not row:
|
||||
return None
|
||||
return CalendarEvent.model_validate(row)
|
||||
|
||||
async def upsert(
|
||||
self, session: AsyncSession, event: CalendarEvent
|
||||
) -> CalendarEvent:
|
||||
existing = await self.get_by_ics_uid(session, event.room_id, event.ics_uid)
|
||||
|
||||
if existing:
|
||||
event.updated_at = datetime.now(timezone.utc)
|
||||
query = (
|
||||
update(CalendarEventModel)
|
||||
.where(CalendarEventModel.id == existing.id)
|
||||
.values(**event.model_dump(exclude={"id"}))
|
||||
)
|
||||
await session.execute(query)
|
||||
await session.commit()
|
||||
return event
|
||||
else:
|
||||
new_event = CalendarEventModel(**event.model_dump())
|
||||
session.add(new_event)
|
||||
await session.commit()
|
||||
return event
|
||||
|
||||
async def delete_old_events(
|
||||
self, session: AsyncSession, room_id: str, cutoff_date: datetime
|
||||
) -> int:
|
||||
query = delete(CalendarEventModel).where(
|
||||
sa.and_(
|
||||
CalendarEventModel.room_id == room_id,
|
||||
CalendarEventModel.end_time < cutoff_date,
|
||||
)
|
||||
)
|
||||
result = await session.execute(query)
|
||||
await session.commit()
|
||||
return result.rowcount
|
||||
|
||||
async def delete_events_not_in_list(
|
||||
self, session: AsyncSession, room_id: str, keep_ics_uids: list[str]
|
||||
) -> int:
|
||||
if not keep_ics_uids:
|
||||
query = delete(CalendarEventModel).where(
|
||||
CalendarEventModel.room_id == room_id
|
||||
)
|
||||
else:
|
||||
query = delete(CalendarEventModel).where(
|
||||
sa.and_(
|
||||
CalendarEventModel.room_id == room_id,
|
||||
CalendarEventModel.ics_uid.notin_(keep_ics_uids),
|
||||
)
|
||||
)
|
||||
|
||||
result = await session.execute(query)
|
||||
await session.commit()
|
||||
return result.rowcount
|
||||
|
||||
async def get_by_room(
|
||||
self, session: AsyncSession, room_id: str, include_deleted: bool = True
|
||||
) -> list[CalendarEvent]:
|
||||
query = select(CalendarEventModel).where(CalendarEventModel.room_id == room_id)
|
||||
if not include_deleted:
|
||||
query = query.where(CalendarEventModel.is_deleted == False)
|
||||
result = await session.execute(query)
|
||||
return [CalendarEvent.model_validate(row) for row in result.scalars().all()]
|
||||
|
||||
async def get_upcoming(
|
||||
self, session: AsyncSession, room_id: str, minutes_ahead: int = 120
|
||||
) -> list[CalendarEvent]:
|
||||
now = datetime.now(timezone.utc)
|
||||
buffer_time = now + timedelta(minutes=minutes_ahead)
|
||||
|
||||
query = (
|
||||
select(CalendarEventModel)
|
||||
.where(
|
||||
sa.and_(
|
||||
CalendarEventModel.room_id == room_id,
|
||||
CalendarEventModel.start_time <= buffer_time,
|
||||
CalendarEventModel.end_time > now,
|
||||
CalendarEventModel.is_deleted == False,
|
||||
)
|
||||
)
|
||||
.order_by(CalendarEventModel.start_time)
|
||||
)
|
||||
|
||||
result = await session.execute(query)
|
||||
return [CalendarEvent.model_validate(row) for row in result.scalars().all()]
|
||||
|
||||
async def soft_delete_missing(
|
||||
self, session: AsyncSession, room_id: str, current_ics_uids: list[str]
|
||||
) -> int:
|
||||
query = (
|
||||
update(CalendarEventModel)
|
||||
.where(
|
||||
sa.and_(
|
||||
CalendarEventModel.room_id == room_id,
|
||||
CalendarEventModel.ics_uid.notin_(current_ics_uids)
|
||||
if current_ics_uids
|
||||
else True,
|
||||
CalendarEventModel.end_time > datetime.now(timezone.utc),
|
||||
)
|
||||
)
|
||||
.values(is_deleted=True)
|
||||
)
|
||||
result = await session.execute(query)
|
||||
await session.commit()
|
||||
return result.rowcount
|
||||
|
||||
|
||||
calendar_events_controller = CalendarEventController()
|
||||
@@ -1,75 +1,19 @@
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
from typing import Any, Literal
|
||||
|
||||
import sqlalchemy as sa
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
from sqlalchemy import select, update
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from reflector.db import get_database, metadata
|
||||
from reflector.db.base import MeetingConsentModel, MeetingModel
|
||||
from reflector.db.rooms import Room
|
||||
from reflector.utils import generate_uuid4
|
||||
|
||||
meetings = sa.Table(
|
||||
"meeting",
|
||||
metadata,
|
||||
sa.Column("id", sa.String, primary_key=True),
|
||||
sa.Column("room_name", sa.String),
|
||||
sa.Column("room_url", sa.String),
|
||||
sa.Column("host_room_url", sa.String),
|
||||
sa.Column("start_date", sa.DateTime(timezone=True)),
|
||||
sa.Column("end_date", sa.DateTime(timezone=True)),
|
||||
sa.Column(
|
||||
"room_id",
|
||||
sa.String,
|
||||
sa.ForeignKey("room.id", ondelete="CASCADE"),
|
||||
nullable=True,
|
||||
),
|
||||
sa.Column("is_locked", sa.Boolean, nullable=False, server_default=sa.false()),
|
||||
sa.Column("room_mode", sa.String, nullable=False, server_default="normal"),
|
||||
sa.Column("recording_type", sa.String, nullable=False, server_default="cloud"),
|
||||
sa.Column(
|
||||
"recording_trigger",
|
||||
sa.String,
|
||||
nullable=False,
|
||||
server_default="automatic-2nd-participant",
|
||||
),
|
||||
sa.Column(
|
||||
"num_clients",
|
||||
sa.Integer,
|
||||
nullable=False,
|
||||
server_default=sa.text("0"),
|
||||
),
|
||||
sa.Column(
|
||||
"is_active",
|
||||
sa.Boolean,
|
||||
nullable=False,
|
||||
server_default=sa.true(),
|
||||
),
|
||||
sa.Index("idx_meeting_room_id", "room_id"),
|
||||
sa.Index(
|
||||
"idx_one_active_meeting_per_room",
|
||||
"room_id",
|
||||
unique=True,
|
||||
postgresql_where=sa.text("is_active = true"),
|
||||
),
|
||||
)
|
||||
|
||||
meeting_consent = sa.Table(
|
||||
"meeting_consent",
|
||||
metadata,
|
||||
sa.Column("id", sa.String, primary_key=True),
|
||||
sa.Column(
|
||||
"meeting_id",
|
||||
sa.String,
|
||||
sa.ForeignKey("meeting.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column("user_id", sa.String),
|
||||
sa.Column("consent_given", sa.Boolean, nullable=False),
|
||||
sa.Column("consent_timestamp", sa.DateTime(timezone=True), nullable=False),
|
||||
)
|
||||
|
||||
|
||||
class MeetingConsent(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: str = Field(default_factory=generate_uuid4)
|
||||
meeting_id: str
|
||||
user_id: str | None = None
|
||||
@@ -78,6 +22,8 @@ class MeetingConsent(BaseModel):
|
||||
|
||||
|
||||
class Meeting(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: str
|
||||
room_name: str
|
||||
room_url: str
|
||||
@@ -92,11 +38,15 @@ class Meeting(BaseModel):
|
||||
"none", "prompt", "automatic", "automatic-2nd-participant"
|
||||
] = "automatic-2nd-participant"
|
||||
num_clients: int = 0
|
||||
is_active: bool = True
|
||||
calendar_event_id: str | None = None
|
||||
calendar_metadata: dict[str, Any] | None = None
|
||||
|
||||
|
||||
class MeetingController:
|
||||
async def create(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
id: str,
|
||||
room_name: str,
|
||||
room_url: str,
|
||||
@@ -104,6 +54,8 @@ class MeetingController:
|
||||
start_date: datetime,
|
||||
end_date: datetime,
|
||||
room: Room,
|
||||
calendar_event_id: str | None = None,
|
||||
calendar_metadata: dict[str, Any] | None = None,
|
||||
):
|
||||
meeting = Meeting(
|
||||
id=id,
|
||||
@@ -117,113 +69,201 @@ class MeetingController:
|
||||
room_mode=room.room_mode,
|
||||
recording_type=room.recording_type,
|
||||
recording_trigger=room.recording_trigger,
|
||||
calendar_event_id=calendar_event_id,
|
||||
calendar_metadata=calendar_metadata,
|
||||
)
|
||||
query = meetings.insert().values(**meeting.model_dump())
|
||||
await get_database().execute(query)
|
||||
new_meeting = MeetingModel(**meeting.model_dump())
|
||||
session.add(new_meeting)
|
||||
await session.commit()
|
||||
return meeting
|
||||
|
||||
async def get_all_active(self) -> list[Meeting]:
|
||||
query = meetings.select().where(meetings.c.is_active)
|
||||
return await get_database().fetch_all(query)
|
||||
async def get_all_active(self, session: AsyncSession) -> list[Meeting]:
|
||||
query = select(MeetingModel).where(MeetingModel.is_active)
|
||||
result = await session.execute(query)
|
||||
return [Meeting.model_validate(row) for row in result.scalars().all()]
|
||||
|
||||
async def get_by_room_name(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
room_name: str,
|
||||
) -> Meeting | None:
|
||||
query = meetings.select().where(meetings.c.room_name == room_name)
|
||||
result = await get_database().fetch_one(query)
|
||||
if not result:
|
||||
return None
|
||||
|
||||
return Meeting(**result)
|
||||
|
||||
async def get_active(self, room: Room, current_time: datetime) -> Meeting | None:
|
||||
end_date = getattr(meetings.c, "end_date")
|
||||
"""
|
||||
Get a meeting by room name.
|
||||
For backward compatibility, returns the most recent meeting.
|
||||
"""
|
||||
query = (
|
||||
meetings.select()
|
||||
select(MeetingModel)
|
||||
.where(MeetingModel.room_name == room_name)
|
||||
.order_by(MeetingModel.end_date.desc())
|
||||
)
|
||||
result = await session.execute(query)
|
||||
row = result.scalar_one_or_none()
|
||||
if not row:
|
||||
return None
|
||||
return Meeting.model_validate(row)
|
||||
|
||||
async def get_active(
|
||||
self, session: AsyncSession, room: Room, current_time: datetime
|
||||
) -> Meeting | None:
|
||||
"""
|
||||
Get latest active meeting for a room.
|
||||
For backward compatibility, returns the most recent active meeting.
|
||||
"""
|
||||
query = (
|
||||
select(MeetingModel)
|
||||
.where(
|
||||
sa.and_(
|
||||
meetings.c.room_id == room.id,
|
||||
meetings.c.end_date > current_time,
|
||||
meetings.c.is_active,
|
||||
MeetingModel.room_id == room.id,
|
||||
MeetingModel.end_date > current_time,
|
||||
MeetingModel.is_active,
|
||||
)
|
||||
)
|
||||
.order_by(end_date.desc())
|
||||
.order_by(MeetingModel.end_date.desc())
|
||||
)
|
||||
result = await get_database().fetch_one(query)
|
||||
if not result:
|
||||
result = await session.execute(query)
|
||||
row = result.scalar_one_or_none()
|
||||
if not row:
|
||||
return None
|
||||
return Meeting.model_validate(row)
|
||||
|
||||
return Meeting(**result)
|
||||
async def get_all_active_for_room(
|
||||
self, session: AsyncSession, room: Room, current_time: datetime
|
||||
) -> list[Meeting]:
|
||||
query = (
|
||||
select(MeetingModel)
|
||||
.where(
|
||||
sa.and_(
|
||||
MeetingModel.room_id == room.id,
|
||||
MeetingModel.end_date > current_time,
|
||||
MeetingModel.is_active,
|
||||
)
|
||||
)
|
||||
.order_by(MeetingModel.end_date.desc())
|
||||
)
|
||||
result = await session.execute(query)
|
||||
return [Meeting.model_validate(row) for row in result.scalars().all()]
|
||||
|
||||
async def get_by_id(self, meeting_id: str, **kwargs) -> Meeting | None:
|
||||
query = meetings.select().where(meetings.c.id == meeting_id)
|
||||
result = await get_database().fetch_one(query)
|
||||
if not result:
|
||||
async def get_active_by_calendar_event(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
room: Room,
|
||||
calendar_event_id: str,
|
||||
current_time: datetime,
|
||||
) -> Meeting | None:
|
||||
"""
|
||||
Get active meeting for a specific calendar event.
|
||||
"""
|
||||
query = select(MeetingModel).where(
|
||||
sa.and_(
|
||||
MeetingModel.room_id == room.id,
|
||||
MeetingModel.calendar_event_id == calendar_event_id,
|
||||
MeetingModel.end_date > current_time,
|
||||
MeetingModel.is_active,
|
||||
)
|
||||
)
|
||||
result = await session.execute(query)
|
||||
row = result.scalar_one_or_none()
|
||||
if not row:
|
||||
return None
|
||||
return Meeting(**result)
|
||||
return Meeting.model_validate(row)
|
||||
|
||||
async def update_meeting(self, meeting_id: str, **kwargs):
|
||||
query = meetings.update().where(meetings.c.id == meeting_id).values(**kwargs)
|
||||
await get_database().execute(query)
|
||||
async def get_by_id(
|
||||
self, session: AsyncSession, meeting_id: str, **kwargs
|
||||
) -> Meeting | None:
|
||||
query = select(MeetingModel).where(MeetingModel.id == meeting_id)
|
||||
result = await session.execute(query)
|
||||
row = result.scalar_one_or_none()
|
||||
if not row:
|
||||
return None
|
||||
return Meeting.model_validate(row)
|
||||
|
||||
async def get_by_calendar_event(
|
||||
self, session: AsyncSession, calendar_event_id: str
|
||||
) -> Meeting | None:
|
||||
query = select(MeetingModel).where(
|
||||
MeetingModel.calendar_event_id == calendar_event_id
|
||||
)
|
||||
result = await session.execute(query)
|
||||
row = result.scalar_one_or_none()
|
||||
if not row:
|
||||
return None
|
||||
return Meeting.model_validate(row)
|
||||
|
||||
async def update_meeting(self, session: AsyncSession, meeting_id: str, **kwargs):
|
||||
query = (
|
||||
update(MeetingModel).where(MeetingModel.id == meeting_id).values(**kwargs)
|
||||
)
|
||||
await session.execute(query)
|
||||
await session.commit()
|
||||
|
||||
|
||||
class MeetingConsentController:
|
||||
async def get_by_meeting_id(self, meeting_id: str) -> list[MeetingConsent]:
|
||||
query = meeting_consent.select().where(
|
||||
meeting_consent.c.meeting_id == meeting_id
|
||||
async def get_by_meeting_id(
|
||||
self, session: AsyncSession, meeting_id: str
|
||||
) -> list[MeetingConsent]:
|
||||
query = select(MeetingConsentModel).where(
|
||||
MeetingConsentModel.meeting_id == meeting_id
|
||||
)
|
||||
results = await get_database().fetch_all(query)
|
||||
return [MeetingConsent(**result) for result in results]
|
||||
result = await session.execute(query)
|
||||
return [MeetingConsent.model_validate(row) for row in result.scalars().all()]
|
||||
|
||||
async def get_by_meeting_and_user(
|
||||
self, meeting_id: str, user_id: str
|
||||
self, session: AsyncSession, meeting_id: str, user_id: str
|
||||
) -> MeetingConsent | None:
|
||||
"""Get existing consent for a specific user and meeting"""
|
||||
query = meeting_consent.select().where(
|
||||
meeting_consent.c.meeting_id == meeting_id,
|
||||
meeting_consent.c.user_id == user_id,
|
||||
query = select(MeetingConsentModel).where(
|
||||
sa.and_(
|
||||
MeetingConsentModel.meeting_id == meeting_id,
|
||||
MeetingConsentModel.user_id == user_id,
|
||||
)
|
||||
)
|
||||
result = await get_database().fetch_one(query)
|
||||
if result is None:
|
||||
result = await session.execute(query)
|
||||
row = result.scalar_one_or_none()
|
||||
if row is None:
|
||||
return None
|
||||
return MeetingConsent(**result)
|
||||
return MeetingConsent.model_validate(row)
|
||||
|
||||
async def upsert(self, consent: MeetingConsent) -> MeetingConsent:
|
||||
"""Create new consent or update existing one for authenticated users"""
|
||||
async def upsert(
|
||||
self, session: AsyncSession, consent: MeetingConsent
|
||||
) -> MeetingConsent:
|
||||
if consent.user_id:
|
||||
# For authenticated users, check if consent already exists
|
||||
# not transactional but we're ok with that; the consents ain't deleted anyways
|
||||
existing = await self.get_by_meeting_and_user(
|
||||
consent.meeting_id, consent.user_id
|
||||
session, consent.meeting_id, consent.user_id
|
||||
)
|
||||
if existing:
|
||||
query = (
|
||||
meeting_consent.update()
|
||||
.where(meeting_consent.c.id == existing.id)
|
||||
update(MeetingConsentModel)
|
||||
.where(MeetingConsentModel.id == existing.id)
|
||||
.values(
|
||||
consent_given=consent.consent_given,
|
||||
consent_timestamp=consent.consent_timestamp,
|
||||
)
|
||||
)
|
||||
await get_database().execute(query)
|
||||
await session.execute(query)
|
||||
await session.commit()
|
||||
|
||||
existing.consent_given = consent.consent_given
|
||||
existing.consent_timestamp = consent.consent_timestamp
|
||||
return existing
|
||||
existing.consent_given = consent.consent_given
|
||||
existing.consent_timestamp = consent.consent_timestamp
|
||||
return existing
|
||||
|
||||
query = meeting_consent.insert().values(**consent.model_dump())
|
||||
await get_database().execute(query)
|
||||
new_consent = MeetingConsentModel(**consent.model_dump())
|
||||
session.add(new_consent)
|
||||
await session.commit()
|
||||
return consent
|
||||
|
||||
async def has_any_denial(self, meeting_id: str) -> bool:
|
||||
async def has_any_denial(self, session: AsyncSession, meeting_id: str) -> bool:
|
||||
"""Check if any participant denied consent for this meeting"""
|
||||
query = meeting_consent.select().where(
|
||||
meeting_consent.c.meeting_id == meeting_id,
|
||||
meeting_consent.c.consent_given.is_(False),
|
||||
query = select(MeetingConsentModel).where(
|
||||
sa.and_(
|
||||
MeetingConsentModel.meeting_id == meeting_id,
|
||||
MeetingConsentModel.consent_given.is_(False),
|
||||
)
|
||||
)
|
||||
result = await get_database().fetch_one(query)
|
||||
return result is not None
|
||||
result = await session.execute(query)
|
||||
row = result.scalar_one_or_none()
|
||||
return row is not None
|
||||
|
||||
|
||||
meetings_controller = MeetingController()
|
||||
|
||||
@@ -1,61 +1,79 @@
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import sqlalchemy as sa
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
from sqlalchemy import delete, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from reflector.db import get_database, metadata
|
||||
from reflector.db.base import RecordingModel
|
||||
from reflector.utils import generate_uuid4
|
||||
|
||||
recordings = sa.Table(
|
||||
"recording",
|
||||
metadata,
|
||||
sa.Column("id", sa.String, primary_key=True),
|
||||
sa.Column("bucket_name", sa.String, nullable=False),
|
||||
sa.Column("object_key", sa.String, nullable=False),
|
||||
sa.Column("recorded_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column(
|
||||
"status",
|
||||
sa.String,
|
||||
nullable=False,
|
||||
server_default="pending",
|
||||
),
|
||||
sa.Column("meeting_id", sa.String),
|
||||
sa.Index("idx_recording_meeting_id", "meeting_id"),
|
||||
)
|
||||
|
||||
|
||||
class Recording(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: str = Field(default_factory=generate_uuid4)
|
||||
bucket_name: str
|
||||
meeting_id: str
|
||||
url: str
|
||||
object_key: str
|
||||
recorded_at: datetime
|
||||
status: Literal["pending", "processing", "completed", "failed"] = "pending"
|
||||
meeting_id: str | None = None
|
||||
duration: float | None = None
|
||||
created_at: datetime
|
||||
|
||||
|
||||
class RecordingController:
|
||||
async def create(self, recording: Recording):
|
||||
query = recordings.insert().values(**recording.model_dump())
|
||||
await get_database().execute(query)
|
||||
async def create(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
meeting_id: str,
|
||||
url: str,
|
||||
object_key: str,
|
||||
duration: float | None = None,
|
||||
created_at: datetime | None = None,
|
||||
):
|
||||
if created_at is None:
|
||||
created_at = datetime.now(timezone.utc)
|
||||
|
||||
recording = Recording(
|
||||
meeting_id=meeting_id,
|
||||
url=url,
|
||||
object_key=object_key,
|
||||
duration=duration,
|
||||
created_at=created_at,
|
||||
)
|
||||
new_recording = RecordingModel(**recording.model_dump())
|
||||
session.add(new_recording)
|
||||
await session.commit()
|
||||
return recording
|
||||
|
||||
async def get_by_id(self, id: str) -> Recording:
|
||||
query = recordings.select().where(recordings.c.id == id)
|
||||
result = await get_database().fetch_one(query)
|
||||
return Recording(**result) if result else None
|
||||
async def get_by_id(
|
||||
self, session: AsyncSession, recording_id: str
|
||||
) -> Recording | None:
|
||||
"""
|
||||
Get a recording by id
|
||||
"""
|
||||
query = select(RecordingModel).where(RecordingModel.id == recording_id)
|
||||
result = await session.execute(query)
|
||||
row = result.scalar_one_or_none()
|
||||
if not row:
|
||||
return None
|
||||
return Recording.model_validate(row)
|
||||
|
||||
async def get_by_object_key(self, bucket_name: str, object_key: str) -> Recording:
|
||||
query = recordings.select().where(
|
||||
recordings.c.bucket_name == bucket_name,
|
||||
recordings.c.object_key == object_key,
|
||||
)
|
||||
result = await get_database().fetch_one(query)
|
||||
return Recording(**result) if result else None
|
||||
async def get_by_meeting_id(
|
||||
self, session: AsyncSession, meeting_id: str
|
||||
) -> list[Recording]:
|
||||
"""
|
||||
Get all recordings for a meeting
|
||||
"""
|
||||
query = select(RecordingModel).where(RecordingModel.meeting_id == meeting_id)
|
||||
result = await session.execute(query)
|
||||
return [Recording.model_validate(row) for row in result.scalars().all()]
|
||||
|
||||
async def remove_by_id(self, id: str) -> None:
|
||||
query = recordings.delete().where(recordings.c.id == id)
|
||||
await get_database().execute(query)
|
||||
async def remove_by_id(self, session: AsyncSession, recording_id: str) -> None:
|
||||
"""
|
||||
Remove a recording by id
|
||||
"""
|
||||
query = delete(RecordingModel).where(RecordingModel.id == recording_id)
|
||||
await session.execute(query)
|
||||
await session.commit()
|
||||
|
||||
|
||||
recordings_controller = RecordingController()
|
||||
|
||||
@@ -3,51 +3,19 @@ from datetime import datetime, timezone
|
||||
from sqlite3 import IntegrityError
|
||||
from typing import Literal
|
||||
|
||||
import sqlalchemy
|
||||
from fastapi import HTTPException
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy.sql import false, or_
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
from sqlalchemy import delete, select, update
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.sql import or_
|
||||
|
||||
from reflector.db import get_database, metadata
|
||||
from reflector.db.base import RoomModel
|
||||
from reflector.utils import generate_uuid4
|
||||
|
||||
rooms = sqlalchemy.Table(
|
||||
"room",
|
||||
metadata,
|
||||
sqlalchemy.Column("id", sqlalchemy.String, primary_key=True),
|
||||
sqlalchemy.Column("name", sqlalchemy.String, nullable=False, unique=True),
|
||||
sqlalchemy.Column("user_id", sqlalchemy.String, nullable=False),
|
||||
sqlalchemy.Column("created_at", sqlalchemy.DateTime(timezone=True), nullable=False),
|
||||
sqlalchemy.Column(
|
||||
"zulip_auto_post", sqlalchemy.Boolean, nullable=False, server_default=false()
|
||||
),
|
||||
sqlalchemy.Column("zulip_stream", sqlalchemy.String),
|
||||
sqlalchemy.Column("zulip_topic", sqlalchemy.String),
|
||||
sqlalchemy.Column(
|
||||
"is_locked", sqlalchemy.Boolean, nullable=False, server_default=false()
|
||||
),
|
||||
sqlalchemy.Column(
|
||||
"room_mode", sqlalchemy.String, nullable=False, server_default="normal"
|
||||
),
|
||||
sqlalchemy.Column(
|
||||
"recording_type", sqlalchemy.String, nullable=False, server_default="cloud"
|
||||
),
|
||||
sqlalchemy.Column(
|
||||
"recording_trigger",
|
||||
sqlalchemy.String,
|
||||
nullable=False,
|
||||
server_default="automatic-2nd-participant",
|
||||
),
|
||||
sqlalchemy.Column(
|
||||
"is_shared", sqlalchemy.Boolean, nullable=False, server_default=false()
|
||||
),
|
||||
sqlalchemy.Column("webhook_url", sqlalchemy.String, nullable=True),
|
||||
sqlalchemy.Column("webhook_secret", sqlalchemy.String, nullable=True),
|
||||
sqlalchemy.Index("idx_room_is_shared", "is_shared"),
|
||||
)
|
||||
|
||||
|
||||
class Room(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: str = Field(default_factory=generate_uuid4)
|
||||
name: str
|
||||
user_id: str
|
||||
@@ -64,11 +32,17 @@ class Room(BaseModel):
|
||||
is_shared: bool = False
|
||||
webhook_url: str | None = None
|
||||
webhook_secret: str | None = None
|
||||
ics_url: str | None = None
|
||||
ics_fetch_interval: int = 300
|
||||
ics_enabled: bool = False
|
||||
ics_last_sync: datetime | None = None
|
||||
ics_last_etag: str | None = None
|
||||
|
||||
|
||||
class RoomController:
|
||||
async def get_all(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
user_id: str | None = None,
|
||||
order_by: str | None = None,
|
||||
return_query: bool = False,
|
||||
@@ -82,14 +56,14 @@ class RoomController:
|
||||
Parameters:
|
||||
- `order_by`: field to order by, e.g. "-created_at"
|
||||
"""
|
||||
query = rooms.select()
|
||||
query = select(RoomModel)
|
||||
if user_id is not None:
|
||||
query = query.where(or_(rooms.c.user_id == user_id, rooms.c.is_shared))
|
||||
query = query.where(or_(RoomModel.user_id == user_id, RoomModel.is_shared))
|
||||
else:
|
||||
query = query.where(rooms.c.is_shared)
|
||||
query = query.where(RoomModel.is_shared)
|
||||
|
||||
if order_by is not None:
|
||||
field = getattr(rooms.c, order_by[1:])
|
||||
field = getattr(RoomModel, order_by[1:])
|
||||
if order_by.startswith("-"):
|
||||
field = field.desc()
|
||||
query = query.order_by(field)
|
||||
@@ -97,11 +71,12 @@ class RoomController:
|
||||
if return_query:
|
||||
return query
|
||||
|
||||
results = await get_database().fetch_all(query)
|
||||
return results
|
||||
result = await session.execute(query)
|
||||
return [Room.model_validate(row) for row in result.scalars().all()]
|
||||
|
||||
async def add(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
name: str,
|
||||
user_id: str,
|
||||
zulip_auto_post: bool,
|
||||
@@ -114,6 +89,9 @@ class RoomController:
|
||||
is_shared: bool,
|
||||
webhook_url: str = "",
|
||||
webhook_secret: str = "",
|
||||
ics_url: str | None = None,
|
||||
ics_fetch_interval: int = 300,
|
||||
ics_enabled: bool = False,
|
||||
):
|
||||
"""
|
||||
Add a new room
|
||||
@@ -134,24 +112,31 @@ class RoomController:
|
||||
is_shared=is_shared,
|
||||
webhook_url=webhook_url,
|
||||
webhook_secret=webhook_secret,
|
||||
ics_url=ics_url,
|
||||
ics_fetch_interval=ics_fetch_interval,
|
||||
ics_enabled=ics_enabled,
|
||||
)
|
||||
query = rooms.insert().values(**room.model_dump())
|
||||
new_room = RoomModel(**room.model_dump())
|
||||
session.add(new_room)
|
||||
try:
|
||||
await get_database().execute(query)
|
||||
await session.flush()
|
||||
except IntegrityError:
|
||||
raise HTTPException(status_code=400, detail="Room name is not unique")
|
||||
return room
|
||||
|
||||
async def update(self, room: Room, values: dict, mutate=True):
|
||||
async def update(
|
||||
self, session: AsyncSession, room: Room, values: dict, mutate=True
|
||||
):
|
||||
"""
|
||||
Update a room fields with key/values in values
|
||||
"""
|
||||
if values.get("webhook_url") and not values.get("webhook_secret"):
|
||||
values["webhook_secret"] = secrets.token_urlsafe(32)
|
||||
|
||||
query = rooms.update().where(rooms.c.id == room.id).values(**values)
|
||||
query = update(RoomModel).where(RoomModel.id == room.id).values(**values)
|
||||
try:
|
||||
await get_database().execute(query)
|
||||
await session.execute(query)
|
||||
await session.flush()
|
||||
except IntegrityError:
|
||||
raise HTTPException(status_code=400, detail="Room name is not unique")
|
||||
|
||||
@@ -159,60 +144,79 @@ class RoomController:
|
||||
for key, value in values.items():
|
||||
setattr(room, key, value)
|
||||
|
||||
async def get_by_id(self, room_id: str, **kwargs) -> Room | None:
|
||||
async def get_by_id(
|
||||
self, session: AsyncSession, room_id: str, **kwargs
|
||||
) -> Room | None:
|
||||
"""
|
||||
Get a room by id
|
||||
"""
|
||||
query = rooms.select().where(rooms.c.id == room_id)
|
||||
query = select(RoomModel).where(RoomModel.id == room_id)
|
||||
if "user_id" in kwargs:
|
||||
query = query.where(rooms.c.user_id == kwargs["user_id"])
|
||||
result = await get_database().fetch_one(query)
|
||||
if not result:
|
||||
query = query.where(RoomModel.user_id == kwargs["user_id"])
|
||||
result = await session.execute(query)
|
||||
row = result.scalars().first()
|
||||
if not row:
|
||||
return None
|
||||
return Room(**result)
|
||||
return Room.model_validate(row)
|
||||
|
||||
async def get_by_name(self, room_name: str, **kwargs) -> Room | None:
|
||||
async def get_by_name(
|
||||
self, session: AsyncSession, room_name: str, **kwargs
|
||||
) -> Room | None:
|
||||
"""
|
||||
Get a room by name
|
||||
"""
|
||||
query = rooms.select().where(rooms.c.name == room_name)
|
||||
query = select(RoomModel).where(RoomModel.name == room_name)
|
||||
if "user_id" in kwargs:
|
||||
query = query.where(rooms.c.user_id == kwargs["user_id"])
|
||||
result = await get_database().fetch_one(query)
|
||||
if not result:
|
||||
query = query.where(RoomModel.user_id == kwargs["user_id"])
|
||||
result = await session.execute(query)
|
||||
row = result.scalars().first()
|
||||
if not row:
|
||||
return None
|
||||
return Room(**result)
|
||||
return Room.model_validate(row)
|
||||
|
||||
async def get_by_id_for_http(self, meeting_id: str, user_id: str | None) -> Room:
|
||||
async def get_by_id_for_http(
|
||||
self, session: AsyncSession, meeting_id: str, user_id: str | None
|
||||
) -> Room:
|
||||
"""
|
||||
Get a room by ID for HTTP request.
|
||||
|
||||
If not found, it will raise a 404 error.
|
||||
"""
|
||||
query = rooms.select().where(rooms.c.id == meeting_id)
|
||||
result = await get_database().fetch_one(query)
|
||||
if not result:
|
||||
query = select(RoomModel).where(RoomModel.id == meeting_id)
|
||||
result = await session.execute(query)
|
||||
row = result.scalars().first()
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
|
||||
room = Room(**result)
|
||||
room = Room.model_validate(row)
|
||||
|
||||
return room
|
||||
|
||||
async def get_ics_enabled(self, session: AsyncSession) -> list[Room]:
|
||||
query = select(RoomModel).where(
|
||||
RoomModel.ics_enabled == True, RoomModel.ics_url != None
|
||||
)
|
||||
result = await session.execute(query)
|
||||
results = result.scalars().all()
|
||||
return [Room(**row.__dict__) for row in results]
|
||||
|
||||
async def remove_by_id(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
room_id: str,
|
||||
user_id: str | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
Remove a room by id
|
||||
"""
|
||||
room = await self.get_by_id(room_id, user_id=user_id)
|
||||
room = await self.get_by_id(session, room_id, user_id=user_id)
|
||||
if not room:
|
||||
return
|
||||
if user_id is not None and room.user_id != user_id:
|
||||
return
|
||||
query = rooms.delete().where(rooms.c.id == room_id)
|
||||
await get_database().execute(query)
|
||||
query = delete(RoomModel).where(RoomModel.id == room_id)
|
||||
await session.execute(query)
|
||||
await session.flush()
|
||||
|
||||
|
||||
rooms_controller = RoomController()
|
||||
|
||||
@@ -8,7 +8,6 @@ from typing import Annotated, Any, Dict, Iterator
|
||||
|
||||
import sqlalchemy
|
||||
import webvtt
|
||||
from databases.interfaces import Record as DbRecord
|
||||
from fastapi import HTTPException
|
||||
from pydantic import (
|
||||
BaseModel,
|
||||
@@ -20,11 +19,10 @@ from pydantic import (
|
||||
constr,
|
||||
field_serializer,
|
||||
)
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from reflector.db import get_database
|
||||
from reflector.db.rooms import rooms
|
||||
from reflector.db.transcripts import SourceKind, TranscriptStatus, transcripts
|
||||
from reflector.db.utils import is_postgresql
|
||||
from reflector.db.base import RoomModel, TranscriptModel
|
||||
from reflector.db.transcripts import SourceKind, TranscriptStatus
|
||||
from reflector.logger import logger
|
||||
from reflector.utils.string import NonEmptyString, try_parse_non_empty_string
|
||||
|
||||
@@ -331,36 +329,30 @@ class SearchController:
|
||||
|
||||
@classmethod
|
||||
async def search_transcripts(
|
||||
cls, params: SearchParameters
|
||||
cls, session: AsyncSession, params: SearchParameters
|
||||
) -> tuple[list[SearchResult], int]:
|
||||
"""
|
||||
Full-text search for transcripts using PostgreSQL tsvector.
|
||||
Returns (results, total_count).
|
||||
"""
|
||||
|
||||
if not is_postgresql():
|
||||
logger.warning(
|
||||
"Full-text search requires PostgreSQL. Returning empty results."
|
||||
)
|
||||
return [], 0
|
||||
|
||||
base_columns = [
|
||||
transcripts.c.id,
|
||||
transcripts.c.title,
|
||||
transcripts.c.created_at,
|
||||
transcripts.c.duration,
|
||||
transcripts.c.status,
|
||||
transcripts.c.user_id,
|
||||
transcripts.c.room_id,
|
||||
transcripts.c.source_kind,
|
||||
transcripts.c.webvtt,
|
||||
transcripts.c.long_summary,
|
||||
TranscriptModel.id,
|
||||
TranscriptModel.title,
|
||||
TranscriptModel.created_at,
|
||||
TranscriptModel.duration,
|
||||
TranscriptModel.status,
|
||||
TranscriptModel.user_id,
|
||||
TranscriptModel.room_id,
|
||||
TranscriptModel.source_kind,
|
||||
TranscriptModel.webvtt,
|
||||
TranscriptModel.long_summary,
|
||||
sqlalchemy.case(
|
||||
(
|
||||
transcripts.c.room_id.isnot(None) & rooms.c.id.is_(None),
|
||||
TranscriptModel.room_id.isnot(None) & RoomModel.id.is_(None),
|
||||
"Deleted Room",
|
||||
),
|
||||
else_=rooms.c.name,
|
||||
else_=RoomModel.name,
|
||||
).label("room_name"),
|
||||
]
|
||||
search_query = None
|
||||
@@ -369,7 +361,7 @@ class SearchController:
|
||||
"english", params.query_text
|
||||
)
|
||||
rank_column = sqlalchemy.func.ts_rank(
|
||||
transcripts.c.search_vector_en,
|
||||
TranscriptModel.search_vector_en,
|
||||
search_query,
|
||||
32, # normalization flag: rank/(rank+1) for 0-1 range
|
||||
).label("rank")
|
||||
@@ -377,47 +369,51 @@ class SearchController:
|
||||
rank_column = sqlalchemy.cast(1.0, sqlalchemy.Float).label("rank")
|
||||
|
||||
columns = base_columns + [rank_column]
|
||||
base_query = sqlalchemy.select(columns).select_from(
|
||||
transcripts.join(rooms, transcripts.c.room_id == rooms.c.id, isouter=True)
|
||||
base_query = (
|
||||
sqlalchemy.select(*columns)
|
||||
.select_from(TranscriptModel)
|
||||
.outerjoin(RoomModel, TranscriptModel.room_id == RoomModel.id)
|
||||
)
|
||||
|
||||
if params.query_text is not None:
|
||||
# because already initialized based on params.query_text presence above
|
||||
assert search_query is not None
|
||||
base_query = base_query.where(
|
||||
transcripts.c.search_vector_en.op("@@")(search_query)
|
||||
TranscriptModel.search_vector_en.op("@@")(search_query)
|
||||
)
|
||||
|
||||
if params.user_id:
|
||||
base_query = base_query.where(
|
||||
sqlalchemy.or_(
|
||||
transcripts.c.user_id == params.user_id, rooms.c.is_shared
|
||||
TranscriptModel.user_id == params.user_id, RoomModel.is_shared
|
||||
)
|
||||
)
|
||||
else:
|
||||
base_query = base_query.where(rooms.c.is_shared)
|
||||
base_query = base_query.where(RoomModel.is_shared)
|
||||
if params.room_id:
|
||||
base_query = base_query.where(transcripts.c.room_id == params.room_id)
|
||||
base_query = base_query.where(TranscriptModel.room_id == params.room_id)
|
||||
if params.source_kind:
|
||||
base_query = base_query.where(
|
||||
transcripts.c.source_kind == params.source_kind
|
||||
TranscriptModel.source_kind == params.source_kind
|
||||
)
|
||||
|
||||
if params.query_text is not None:
|
||||
order_by = sqlalchemy.desc(sqlalchemy.text("rank"))
|
||||
else:
|
||||
order_by = sqlalchemy.desc(transcripts.c.created_at)
|
||||
order_by = sqlalchemy.desc(TranscriptModel.created_at)
|
||||
|
||||
query = base_query.order_by(order_by).limit(params.limit).offset(params.offset)
|
||||
|
||||
rs = await get_database().fetch_all(query)
|
||||
result = await session.execute(query)
|
||||
rs = result.mappings().all()
|
||||
|
||||
count_query = sqlalchemy.select([sqlalchemy.func.count()]).select_from(
|
||||
count_query = sqlalchemy.select(sqlalchemy.func.count()).select_from(
|
||||
base_query.alias("search_results")
|
||||
)
|
||||
total = await get_database().fetch_val(count_query)
|
||||
count_result = await session.execute(count_query)
|
||||
total = count_result.scalar()
|
||||
|
||||
def _process_result(r: DbRecord) -> SearchResult:
|
||||
def _process_result(r: dict) -> SearchResult:
|
||||
r_dict: Dict[str, Any] = dict(r)
|
||||
|
||||
webvtt_raw: str | None = r_dict.pop("webvtt", None)
|
||||
|
||||
@@ -7,17 +7,14 @@ from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal
|
||||
|
||||
import sqlalchemy
|
||||
from fastapi import HTTPException
|
||||
from pydantic import BaseModel, ConfigDict, Field, field_serializer
|
||||
from sqlalchemy import Enum
|
||||
from sqlalchemy.dialects.postgresql import TSVECTOR
|
||||
from sqlalchemy.sql import false, or_
|
||||
from sqlalchemy import delete, insert, select, update
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.sql import or_
|
||||
|
||||
from reflector.db import get_database, metadata
|
||||
from reflector.db.base import RoomModel, TranscriptModel
|
||||
from reflector.db.recordings import recordings_controller
|
||||
from reflector.db.rooms import rooms
|
||||
from reflector.db.utils import is_postgresql
|
||||
from reflector.logger import logger
|
||||
from reflector.processors.types import Word as ProcessorWord
|
||||
from reflector.settings import settings
|
||||
@@ -32,91 +29,6 @@ class SourceKind(enum.StrEnum):
|
||||
FILE = enum.auto()
|
||||
|
||||
|
||||
transcripts = sqlalchemy.Table(
|
||||
"transcript",
|
||||
metadata,
|
||||
sqlalchemy.Column("id", sqlalchemy.String, primary_key=True),
|
||||
sqlalchemy.Column("name", sqlalchemy.String),
|
||||
sqlalchemy.Column("status", sqlalchemy.String),
|
||||
sqlalchemy.Column("locked", sqlalchemy.Boolean),
|
||||
sqlalchemy.Column("duration", sqlalchemy.Float),
|
||||
sqlalchemy.Column("created_at", sqlalchemy.DateTime(timezone=True)),
|
||||
sqlalchemy.Column("title", sqlalchemy.String),
|
||||
sqlalchemy.Column("short_summary", sqlalchemy.String),
|
||||
sqlalchemy.Column("long_summary", sqlalchemy.String),
|
||||
sqlalchemy.Column("topics", sqlalchemy.JSON),
|
||||
sqlalchemy.Column("events", sqlalchemy.JSON),
|
||||
sqlalchemy.Column("participants", sqlalchemy.JSON),
|
||||
sqlalchemy.Column("source_language", sqlalchemy.String),
|
||||
sqlalchemy.Column("target_language", sqlalchemy.String),
|
||||
sqlalchemy.Column(
|
||||
"reviewed", sqlalchemy.Boolean, nullable=False, server_default=false()
|
||||
),
|
||||
sqlalchemy.Column(
|
||||
"audio_location",
|
||||
sqlalchemy.String,
|
||||
nullable=False,
|
||||
server_default="local",
|
||||
),
|
||||
# with user attached, optional
|
||||
sqlalchemy.Column("user_id", sqlalchemy.String),
|
||||
sqlalchemy.Column(
|
||||
"share_mode",
|
||||
sqlalchemy.String,
|
||||
nullable=False,
|
||||
server_default="private",
|
||||
),
|
||||
sqlalchemy.Column(
|
||||
"meeting_id",
|
||||
sqlalchemy.String,
|
||||
),
|
||||
sqlalchemy.Column("recording_id", sqlalchemy.String),
|
||||
sqlalchemy.Column("zulip_message_id", sqlalchemy.Integer),
|
||||
sqlalchemy.Column(
|
||||
"source_kind",
|
||||
Enum(SourceKind, values_callable=lambda obj: [e.value for e in obj]),
|
||||
nullable=False,
|
||||
),
|
||||
# indicative field: whether associated audio is deleted
|
||||
# the main "audio deleted" is the presence of the audio itself / consents not-given
|
||||
# same field could've been in recording/meeting, and it's maybe even ok to dupe it at need
|
||||
sqlalchemy.Column("audio_deleted", sqlalchemy.Boolean),
|
||||
sqlalchemy.Column("room_id", sqlalchemy.String),
|
||||
sqlalchemy.Column("webvtt", sqlalchemy.Text),
|
||||
sqlalchemy.Index("idx_transcript_recording_id", "recording_id"),
|
||||
sqlalchemy.Index("idx_transcript_user_id", "user_id"),
|
||||
sqlalchemy.Index("idx_transcript_created_at", "created_at"),
|
||||
sqlalchemy.Index("idx_transcript_user_id_recording_id", "user_id", "recording_id"),
|
||||
sqlalchemy.Index("idx_transcript_room_id", "room_id"),
|
||||
sqlalchemy.Index("idx_transcript_source_kind", "source_kind"),
|
||||
sqlalchemy.Index("idx_transcript_room_id_created_at", "room_id", "created_at"),
|
||||
)
|
||||
|
||||
# Add PostgreSQL-specific full-text search column
|
||||
# This matches the migration in migrations/versions/116b2f287eab_add_full_text_search.py
|
||||
if is_postgresql():
|
||||
transcripts.append_column(
|
||||
sqlalchemy.Column(
|
||||
"search_vector_en",
|
||||
TSVECTOR,
|
||||
sqlalchemy.Computed(
|
||||
"setweight(to_tsvector('english', coalesce(title, '')), 'A') || "
|
||||
"setweight(to_tsvector('english', coalesce(long_summary, '')), 'B') || "
|
||||
"setweight(to_tsvector('english', coalesce(webvtt, '')), 'C')",
|
||||
persisted=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
# Add GIN index for the search vector
|
||||
transcripts.append_constraint(
|
||||
sqlalchemy.Index(
|
||||
"idx_transcript_search_vector_en",
|
||||
"search_vector_en",
|
||||
postgresql_using="gin",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def generate_transcript_name() -> str:
|
||||
now = datetime.now(timezone.utc)
|
||||
return f"Transcript {now.strftime('%Y-%m-%d %H:%M:%S')}"
|
||||
@@ -191,6 +103,8 @@ class TranscriptParticipant(BaseModel):
|
||||
class Transcript(BaseModel):
|
||||
"""Full transcript model with all fields."""
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: str = Field(default_factory=generate_uuid4)
|
||||
user_id: str | None = None
|
||||
name: str = Field(default_factory=generate_transcript_name)
|
||||
@@ -359,6 +273,7 @@ class Transcript(BaseModel):
|
||||
class TranscriptController:
|
||||
async def get_all(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
user_id: str | None = None,
|
||||
order_by: str | None = None,
|
||||
filter_empty: bool | None = False,
|
||||
@@ -383,102 +298,114 @@ class TranscriptController:
|
||||
- `search_term`: filter transcripts by search term
|
||||
"""
|
||||
|
||||
query = transcripts.select().join(
|
||||
rooms, transcripts.c.room_id == rooms.c.id, isouter=True
|
||||
query = select(TranscriptModel).join(
|
||||
RoomModel, TranscriptModel.room_id == RoomModel.id, isouter=True
|
||||
)
|
||||
|
||||
if user_id:
|
||||
query = query.where(
|
||||
or_(transcripts.c.user_id == user_id, rooms.c.is_shared)
|
||||
or_(TranscriptModel.user_id == user_id, RoomModel.is_shared)
|
||||
)
|
||||
else:
|
||||
query = query.where(rooms.c.is_shared)
|
||||
query = query.where(RoomModel.is_shared)
|
||||
|
||||
if source_kind:
|
||||
query = query.where(transcripts.c.source_kind == source_kind)
|
||||
query = query.where(TranscriptModel.source_kind == source_kind)
|
||||
|
||||
if room_id:
|
||||
query = query.where(transcripts.c.room_id == room_id)
|
||||
query = query.where(TranscriptModel.room_id == room_id)
|
||||
|
||||
if search_term:
|
||||
query = query.where(transcripts.c.title.ilike(f"%{search_term}%"))
|
||||
query = query.where(TranscriptModel.title.ilike(f"%{search_term}%"))
|
||||
|
||||
# Exclude heavy JSON columns from list queries
|
||||
# Get all ORM column attributes except excluded ones
|
||||
transcript_columns = [
|
||||
col for col in transcripts.c if col.name not in exclude_columns
|
||||
getattr(TranscriptModel, col.name)
|
||||
for col in TranscriptModel.__table__.c
|
||||
if col.name not in exclude_columns
|
||||
]
|
||||
|
||||
query = query.with_only_columns(
|
||||
transcript_columns
|
||||
+ [
|
||||
rooms.c.name.label("room_name"),
|
||||
]
|
||||
*transcript_columns,
|
||||
RoomModel.name.label("room_name"),
|
||||
)
|
||||
|
||||
if order_by is not None:
|
||||
field = getattr(transcripts.c, order_by[1:])
|
||||
field = getattr(TranscriptModel, order_by[1:])
|
||||
if order_by.startswith("-"):
|
||||
field = field.desc()
|
||||
query = query.order_by(field)
|
||||
|
||||
if filter_empty:
|
||||
query = query.filter(transcripts.c.status != "idle")
|
||||
query = query.filter(TranscriptModel.status != "idle")
|
||||
|
||||
if filter_recording:
|
||||
query = query.filter(transcripts.c.status != "recording")
|
||||
query = query.filter(TranscriptModel.status != "recording")
|
||||
|
||||
# print(query.compile(compile_kwargs={"literal_binds": True}))
|
||||
|
||||
if return_query:
|
||||
return query
|
||||
|
||||
results = await get_database().fetch_all(query)
|
||||
return results
|
||||
result = await session.execute(query)
|
||||
return [dict(row) for row in result.mappings().all()]
|
||||
|
||||
async def get_by_id(self, transcript_id: str, **kwargs) -> Transcript | None:
|
||||
async def get_by_id(
|
||||
self, session: AsyncSession, transcript_id: str, **kwargs
|
||||
) -> Transcript | None:
|
||||
"""
|
||||
Get a transcript by id
|
||||
"""
|
||||
query = transcripts.select().where(transcripts.c.id == transcript_id)
|
||||
query = select(TranscriptModel).where(TranscriptModel.id == transcript_id)
|
||||
if "user_id" in kwargs:
|
||||
query = query.where(transcripts.c.user_id == kwargs["user_id"])
|
||||
result = await get_database().fetch_one(query)
|
||||
if not result:
|
||||
query = query.where(TranscriptModel.user_id == kwargs["user_id"])
|
||||
result = await session.execute(query)
|
||||
row = result.scalar_one_or_none()
|
||||
if not row:
|
||||
return None
|
||||
return Transcript(**result)
|
||||
return Transcript.model_validate(row)
|
||||
|
||||
async def get_by_recording_id(
|
||||
self, recording_id: str, **kwargs
|
||||
self, session: AsyncSession, recording_id: str, **kwargs
|
||||
) -> Transcript | None:
|
||||
"""
|
||||
Get a transcript by recording_id
|
||||
"""
|
||||
query = transcripts.select().where(transcripts.c.recording_id == recording_id)
|
||||
query = select(TranscriptModel).where(
|
||||
TranscriptModel.recording_id == recording_id
|
||||
)
|
||||
if "user_id" in kwargs:
|
||||
query = query.where(transcripts.c.user_id == kwargs["user_id"])
|
||||
result = await get_database().fetch_one(query)
|
||||
if not result:
|
||||
query = query.where(TranscriptModel.user_id == kwargs["user_id"])
|
||||
result = await session.execute(query)
|
||||
row = result.scalar_one_or_none()
|
||||
if not row:
|
||||
return None
|
||||
return Transcript(**result)
|
||||
return Transcript.model_validate(row)
|
||||
|
||||
async def get_by_room_id(self, room_id: str, **kwargs) -> list[Transcript]:
|
||||
async def get_by_room_id(
|
||||
self, session: AsyncSession, room_id: str, **kwargs
|
||||
) -> list[Transcript]:
|
||||
"""
|
||||
Get transcripts by room_id (direct access without joins)
|
||||
"""
|
||||
query = transcripts.select().where(transcripts.c.room_id == room_id)
|
||||
query = select(TranscriptModel).where(TranscriptModel.room_id == room_id)
|
||||
if "user_id" in kwargs:
|
||||
query = query.where(transcripts.c.user_id == kwargs["user_id"])
|
||||
query = query.where(TranscriptModel.user_id == kwargs["user_id"])
|
||||
if "order_by" in kwargs:
|
||||
order_by = kwargs["order_by"]
|
||||
field = getattr(transcripts.c, order_by[1:])
|
||||
field = getattr(TranscriptModel, order_by[1:])
|
||||
if order_by.startswith("-"):
|
||||
field = field.desc()
|
||||
query = query.order_by(field)
|
||||
results = await get_database().fetch_all(query)
|
||||
return [Transcript(**result) for result in results]
|
||||
results = await session.execute(query)
|
||||
return [
|
||||
Transcript.model_validate(dict(row)) for row in results.mappings().all()
|
||||
]
|
||||
|
||||
async def get_by_id_for_http(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
transcript_id: str,
|
||||
user_id: str | None,
|
||||
) -> Transcript:
|
||||
@@ -491,13 +418,14 @@ class TranscriptController:
|
||||
This method checks the share mode of the transcript and the user_id
|
||||
to determine if the user can access the transcript.
|
||||
"""
|
||||
query = transcripts.select().where(transcripts.c.id == transcript_id)
|
||||
result = await get_database().fetch_one(query)
|
||||
if not result:
|
||||
query = select(TranscriptModel).where(TranscriptModel.id == transcript_id)
|
||||
result = await session.execute(query)
|
||||
row = result.scalar_one_or_none()
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Transcript not found")
|
||||
|
||||
# if the transcript is anonymous, share mode is not checked
|
||||
transcript = Transcript(**result)
|
||||
transcript = Transcript.model_validate(row)
|
||||
if transcript.user_id is None:
|
||||
return transcript
|
||||
|
||||
@@ -520,6 +448,7 @@ class TranscriptController:
|
||||
|
||||
async def add(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
name: str,
|
||||
source_kind: SourceKind,
|
||||
source_language: str = "en",
|
||||
@@ -544,14 +473,15 @@ class TranscriptController:
|
||||
meeting_id=meeting_id,
|
||||
room_id=room_id,
|
||||
)
|
||||
query = transcripts.insert().values(**transcript.model_dump())
|
||||
await get_database().execute(query)
|
||||
query = insert(TranscriptModel).values(**transcript.model_dump())
|
||||
await session.execute(query)
|
||||
await session.commit()
|
||||
return transcript
|
||||
|
||||
# TODO investigate why mutate= is used. it's used in one place currently, maybe because of ORM field updates.
|
||||
# using mutate=True is discouraged
|
||||
async def update(
|
||||
self, transcript: Transcript, values: dict, mutate=False
|
||||
self, session: AsyncSession, transcript: Transcript, values: dict, mutate=False
|
||||
) -> Transcript:
|
||||
"""
|
||||
Update a transcript fields with key/values in values.
|
||||
@@ -560,11 +490,12 @@ class TranscriptController:
|
||||
values = TranscriptController._handle_topics_update(values)
|
||||
|
||||
query = (
|
||||
transcripts.update()
|
||||
.where(transcripts.c.id == transcript.id)
|
||||
update(TranscriptModel)
|
||||
.where(TranscriptModel.id == transcript.id)
|
||||
.values(**values)
|
||||
)
|
||||
await get_database().execute(query)
|
||||
await session.execute(query)
|
||||
await session.commit()
|
||||
if mutate:
|
||||
for key, value in values.items():
|
||||
setattr(transcript, key, value)
|
||||
@@ -593,13 +524,14 @@ class TranscriptController:
|
||||
|
||||
async def remove_by_id(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
transcript_id: str,
|
||||
user_id: str | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
Remove a transcript by id
|
||||
"""
|
||||
transcript = await self.get_by_id(transcript_id)
|
||||
transcript = await self.get_by_id(session, transcript_id)
|
||||
if not transcript:
|
||||
return
|
||||
if user_id is not None and transcript.user_id != user_id:
|
||||
@@ -619,7 +551,7 @@ class TranscriptController:
|
||||
if transcript.recording_id:
|
||||
try:
|
||||
recording = await recordings_controller.get_by_id(
|
||||
transcript.recording_id
|
||||
session, transcript.recording_id
|
||||
)
|
||||
if recording:
|
||||
try:
|
||||
@@ -630,33 +562,40 @@ class TranscriptController:
|
||||
exc_info=e,
|
||||
recording_id=transcript.recording_id,
|
||||
)
|
||||
await recordings_controller.remove_by_id(transcript.recording_id)
|
||||
await recordings_controller.remove_by_id(
|
||||
session, transcript.recording_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to delete recording row",
|
||||
exc_info=e,
|
||||
recording_id=transcript.recording_id,
|
||||
)
|
||||
query = transcripts.delete().where(transcripts.c.id == transcript_id)
|
||||
await get_database().execute(query)
|
||||
query = delete(TranscriptModel).where(TranscriptModel.id == transcript_id)
|
||||
await session.execute(query)
|
||||
await session.commit()
|
||||
|
||||
async def remove_by_recording_id(self, recording_id: str):
|
||||
async def remove_by_recording_id(self, session: AsyncSession, recording_id: str):
|
||||
"""
|
||||
Remove a transcript by recording_id
|
||||
"""
|
||||
query = transcripts.delete().where(transcripts.c.recording_id == recording_id)
|
||||
await get_database().execute(query)
|
||||
query = delete(TranscriptModel).where(
|
||||
TranscriptModel.recording_id == recording_id
|
||||
)
|
||||
await session.execute(query)
|
||||
await session.commit()
|
||||
|
||||
@asynccontextmanager
|
||||
async def transaction(self):
|
||||
async def transaction(self, session: AsyncSession):
|
||||
"""
|
||||
A context manager for database transaction
|
||||
"""
|
||||
async with get_database().transaction(isolation="serializable"):
|
||||
async with session.begin():
|
||||
yield
|
||||
|
||||
async def append_event(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
transcript: Transcript,
|
||||
event: str,
|
||||
data: Any,
|
||||
@@ -665,11 +604,12 @@ class TranscriptController:
|
||||
Append an event to a transcript
|
||||
"""
|
||||
resp = transcript.add_event(event=event, data=data)
|
||||
await self.update(transcript, {"events": transcript.events_dump()})
|
||||
await self.update(session, transcript, {"events": transcript.events_dump()})
|
||||
return resp
|
||||
|
||||
async def upsert_topic(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
transcript: Transcript,
|
||||
topic: TranscriptTopic,
|
||||
) -> TranscriptEvent:
|
||||
@@ -677,9 +617,9 @@ class TranscriptController:
|
||||
Upsert topics to a transcript
|
||||
"""
|
||||
transcript.upsert_topic(topic)
|
||||
await self.update(transcript, {"topics": transcript.topics_dump()})
|
||||
await self.update(session, transcript, {"topics": transcript.topics_dump()})
|
||||
|
||||
async def move_mp3_to_storage(self, transcript: Transcript):
|
||||
async def move_mp3_to_storage(self, session: AsyncSession, transcript: Transcript):
|
||||
"""
|
||||
Move mp3 file to storage
|
||||
"""
|
||||
@@ -703,12 +643,16 @@ class TranscriptController:
|
||||
|
||||
# indicate on the transcript that the audio is now on storage
|
||||
# mutates transcript argument
|
||||
await self.update(transcript, {"audio_location": "storage"}, mutate=True)
|
||||
await self.update(
|
||||
session, transcript, {"audio_location": "storage"}, mutate=True
|
||||
)
|
||||
|
||||
# unlink the local file
|
||||
transcript.audio_mp3_filename.unlink(missing_ok=True)
|
||||
|
||||
async def download_mp3_from_storage(self, transcript: Transcript):
|
||||
async def download_mp3_from_storage(
|
||||
self, session: AsyncSession, transcript: Transcript
|
||||
):
|
||||
"""
|
||||
Download audio from storage
|
||||
"""
|
||||
@@ -720,6 +664,7 @@ class TranscriptController:
|
||||
|
||||
async def upsert_participant(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
transcript: Transcript,
|
||||
participant: TranscriptParticipant,
|
||||
) -> TranscriptParticipant:
|
||||
@@ -727,11 +672,14 @@ class TranscriptController:
|
||||
Add/update a participant to a transcript
|
||||
"""
|
||||
result = transcript.upsert_participant(participant)
|
||||
await self.update(transcript, {"participants": transcript.participants_dump()})
|
||||
await self.update(
|
||||
session, transcript, {"participants": transcript.participants_dump()}
|
||||
)
|
||||
return result
|
||||
|
||||
async def delete_participant(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
transcript: Transcript,
|
||||
participant_id: str,
|
||||
):
|
||||
@@ -739,28 +687,31 @@ class TranscriptController:
|
||||
Delete a participant from a transcript
|
||||
"""
|
||||
transcript.delete_participant(participant_id)
|
||||
await self.update(transcript, {"participants": transcript.participants_dump()})
|
||||
await self.update(
|
||||
session, transcript, {"participants": transcript.participants_dump()}
|
||||
)
|
||||
|
||||
async def set_status(
|
||||
self, transcript_id: str, status: TranscriptStatus
|
||||
self, session: AsyncSession, transcript_id: str, status: TranscriptStatus
|
||||
) -> TranscriptEvent | None:
|
||||
"""
|
||||
Update the status of a transcript
|
||||
|
||||
Will add an event STATUS + update the status field of transcript
|
||||
"""
|
||||
async with self.transaction():
|
||||
transcript = await self.get_by_id(transcript_id)
|
||||
async with self.transaction(session):
|
||||
transcript = await self.get_by_id(session, transcript_id)
|
||||
if not transcript:
|
||||
raise Exception(f"Transcript {transcript_id} not found")
|
||||
if transcript.status == status:
|
||||
return
|
||||
resp = await self.append_event(
|
||||
session,
|
||||
transcript=transcript,
|
||||
event="STATUS",
|
||||
data=StrValue(value=status),
|
||||
)
|
||||
await self.update(transcript, {"status": status})
|
||||
await self.update(session, transcript, {"status": status})
|
||||
return resp
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
"""Database utility functions."""
|
||||
|
||||
from reflector.db import get_database
|
||||
|
||||
|
||||
def is_postgresql() -> bool:
|
||||
return get_database().url.scheme and get_database().url.scheme.startswith(
|
||||
"postgresql"
|
||||
)
|
||||
@@ -13,8 +13,10 @@ from pathlib import Path
|
||||
import av
|
||||
import structlog
|
||||
from celery import chain, shared_task
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from reflector.asynctask import asynctask
|
||||
from reflector.db import get_session_factory
|
||||
from reflector.db.rooms import rooms_controller
|
||||
from reflector.db.transcripts import (
|
||||
SourceKind,
|
||||
@@ -53,6 +55,7 @@ from reflector.processors.types import (
|
||||
)
|
||||
from reflector.settings import settings
|
||||
from reflector.storage import get_transcripts_storage
|
||||
from reflector.worker.session_decorator import with_session
|
||||
from reflector.worker.webhook import send_transcript_webhook
|
||||
|
||||
|
||||
@@ -97,17 +100,23 @@ class PipelineMainFile(PipelineMainBase):
|
||||
@broadcast_to_sockets
|
||||
async def set_status(self, transcript_id: str, status: TranscriptStatus):
|
||||
async with self.lock_transaction():
|
||||
return await transcripts_controller.set_status(transcript_id, status)
|
||||
async with get_session_factory()() as session:
|
||||
return await transcripts_controller.set_status(
|
||||
session, transcript_id, status
|
||||
)
|
||||
|
||||
async def process(self, file_path: Path):
|
||||
"""Main entry point for file processing"""
|
||||
self.logger.info(f"Starting file pipeline for {file_path}")
|
||||
|
||||
transcript = await self.get_transcript()
|
||||
async with get_session_factory()() as session:
|
||||
transcript = await transcripts_controller.get_by_id(
|
||||
session, self.transcript_id
|
||||
)
|
||||
|
||||
# Clear transcript as we're going to regenerate everything
|
||||
async with self.transaction():
|
||||
# Clear transcript as we're going to regenerate everything
|
||||
await transcripts_controller.update(
|
||||
session,
|
||||
transcript,
|
||||
{
|
||||
"events": [],
|
||||
@@ -123,6 +132,7 @@ class PipelineMainFile(PipelineMainBase):
|
||||
|
||||
# Run parallel processing
|
||||
await self.run_parallel_processing(
|
||||
session,
|
||||
audio_path,
|
||||
audio_url,
|
||||
transcript.source_language,
|
||||
@@ -131,7 +141,8 @@ class PipelineMainFile(PipelineMainBase):
|
||||
|
||||
self.logger.info("File pipeline complete")
|
||||
|
||||
await transcripts_controller.set_status(transcript.id, "ended")
|
||||
async with get_session_factory()() as session:
|
||||
await transcripts_controller.set_status(session, transcript.id, "ended")
|
||||
|
||||
async def extract_and_write_audio(
|
||||
self, file_path: Path, transcript: Transcript
|
||||
@@ -193,6 +204,7 @@ class PipelineMainFile(PipelineMainBase):
|
||||
|
||||
async def run_parallel_processing(
|
||||
self,
|
||||
session,
|
||||
audio_path: Path,
|
||||
audio_url: str,
|
||||
source_language: str,
|
||||
@@ -206,7 +218,7 @@ class PipelineMainFile(PipelineMainBase):
|
||||
# Phase 1: Parallel processing of independent tasks
|
||||
transcription_task = self.transcribe_file(audio_url, source_language)
|
||||
diarization_task = self.diarize_file(audio_url)
|
||||
waveform_task = self.generate_waveform(audio_path)
|
||||
waveform_task = self.generate_waveform(session, audio_path)
|
||||
|
||||
results = await asyncio.gather(
|
||||
transcription_task, diarization_task, waveform_task, return_exceptions=True
|
||||
@@ -254,7 +266,7 @@ class PipelineMainFile(PipelineMainBase):
|
||||
)
|
||||
results = await asyncio.gather(
|
||||
self.generate_title(topics),
|
||||
self.generate_summaries(topics),
|
||||
self.generate_summaries(session, topics),
|
||||
return_exceptions=True,
|
||||
)
|
||||
|
||||
@@ -306,9 +318,9 @@ class PipelineMainFile(PipelineMainBase):
|
||||
self.logger.error(f"Diarization failed: {e}")
|
||||
return None
|
||||
|
||||
async def generate_waveform(self, audio_path: Path):
|
||||
async def generate_waveform(self, session: AsyncSession, audio_path: Path):
|
||||
"""Generate and save waveform"""
|
||||
transcript = await self.get_transcript()
|
||||
transcript = await transcripts_controller.get_by_id(session, self.transcript_id)
|
||||
|
||||
processor = AudioWaveformProcessor(
|
||||
audio_path=audio_path,
|
||||
@@ -361,13 +373,13 @@ class PipelineMainFile(PipelineMainBase):
|
||||
|
||||
await processor.flush()
|
||||
|
||||
async def generate_summaries(self, topics: list[TitleSummary]):
|
||||
async def generate_summaries(self, session, topics: list[TitleSummary]):
|
||||
"""Generate long and short summaries from topics"""
|
||||
if not topics:
|
||||
self.logger.warning("No topics for summary generation")
|
||||
return
|
||||
|
||||
transcript = await self.get_transcript()
|
||||
transcript = await transcripts_controller.get_by_id(session, self.transcript_id)
|
||||
processor = TranscriptFinalSummaryProcessor(
|
||||
transcript=transcript,
|
||||
callback=self.on_long_summary,
|
||||
@@ -383,14 +395,15 @@ class PipelineMainFile(PipelineMainBase):
|
||||
|
||||
@shared_task
|
||||
@asynctask
|
||||
async def task_send_webhook_if_needed(*, transcript_id: str):
|
||||
@with_session
|
||||
async def task_send_webhook_if_needed(session, *, transcript_id: str):
|
||||
"""Send webhook if this is a room recording with webhook configured"""
|
||||
transcript = await transcripts_controller.get_by_id(transcript_id)
|
||||
transcript = await transcripts_controller.get_by_id(session, transcript_id)
|
||||
if not transcript:
|
||||
return
|
||||
|
||||
if transcript.source_kind == SourceKind.ROOM and transcript.room_id:
|
||||
room = await rooms_controller.get_by_id(transcript.room_id)
|
||||
room = await rooms_controller.get_by_id(session, transcript.room_id)
|
||||
if room and room.webhook_url:
|
||||
logger.info(
|
||||
"Dispatching webhook",
|
||||
@@ -405,10 +418,10 @@ async def task_send_webhook_if_needed(*, transcript_id: str):
|
||||
|
||||
@shared_task
|
||||
@asynctask
|
||||
async def task_pipeline_file_process(*, transcript_id: str):
|
||||
@with_session
|
||||
async def task_pipeline_file_process(session, *, transcript_id: str):
|
||||
"""Celery task for file pipeline processing"""
|
||||
|
||||
transcript = await transcripts_controller.get_by_id(transcript_id)
|
||||
transcript = await transcripts_controller.get_by_id(session, transcript_id)
|
||||
if not transcript:
|
||||
raise Exception(f"Transcript {transcript_id} not found")
|
||||
|
||||
|
||||
@@ -20,9 +20,11 @@ import av
|
||||
import boto3
|
||||
from celery import chord, current_task, group, shared_task
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from structlog import BoundLogger as Logger
|
||||
|
||||
from reflector.asynctask import asynctask
|
||||
from reflector.db import get_session_factory
|
||||
from reflector.db.meetings import meeting_consent_controller, meetings_controller
|
||||
from reflector.db.recordings import recordings_controller
|
||||
from reflector.db.rooms import rooms_controller
|
||||
@@ -62,6 +64,7 @@ from reflector.processors.types import (
|
||||
from reflector.processors.types import Transcript as TranscriptProcessorType
|
||||
from reflector.settings import settings
|
||||
from reflector.storage import get_transcripts_storage
|
||||
from reflector.worker.session_decorator import with_session_and_transcript
|
||||
from reflector.ws_manager import WebsocketManager, get_ws_manager
|
||||
from reflector.zulip import (
|
||||
get_zulip_message,
|
||||
@@ -96,9 +99,10 @@ def get_transcript(func):
|
||||
@functools.wraps(func)
|
||||
async def wrapper(**kwargs):
|
||||
transcript_id = kwargs.pop("transcript_id")
|
||||
transcript = await transcripts_controller.get_by_id(transcript_id=transcript_id)
|
||||
async with get_session_factory()() as session:
|
||||
transcript = await transcripts_controller.get_by_id(session, transcript_id)
|
||||
if not transcript:
|
||||
raise Exception("Transcript {transcript_id} not found")
|
||||
raise Exception(f"Transcript {transcript_id} not found")
|
||||
|
||||
# Enhanced logger with Celery task context
|
||||
tlogger = logger.bind(transcript_id=transcript.id)
|
||||
@@ -139,11 +143,9 @@ class PipelineMainBase(PipelineRunner[PipelineMessage], Generic[PipelineMessage]
|
||||
self._ws_manager = get_ws_manager()
|
||||
return self._ws_manager
|
||||
|
||||
async def get_transcript(self) -> Transcript:
|
||||
async def get_transcript(self, session: AsyncSession) -> Transcript:
|
||||
# fetch the transcript
|
||||
result = await transcripts_controller.get_by_id(
|
||||
transcript_id=self.transcript_id
|
||||
)
|
||||
result = await transcripts_controller.get_by_id(session, self.transcript_id)
|
||||
if not result:
|
||||
raise Exception("Transcript not found")
|
||||
return result
|
||||
@@ -175,8 +177,8 @@ class PipelineMainBase(PipelineRunner[PipelineMessage], Generic[PipelineMessage]
|
||||
@asynccontextmanager
|
||||
async def transaction(self):
|
||||
async with self.lock_transaction():
|
||||
async with transcripts_controller.transaction():
|
||||
yield
|
||||
async with get_session_factory()() as session:
|
||||
yield session
|
||||
|
||||
@broadcast_to_sockets
|
||||
async def on_status(self, status):
|
||||
@@ -207,13 +209,17 @@ class PipelineMainBase(PipelineRunner[PipelineMessage], Generic[PipelineMessage]
|
||||
|
||||
# when the status of the pipeline changes, update the transcript
|
||||
async with self._lock:
|
||||
return await transcripts_controller.set_status(self.transcript_id, status)
|
||||
async with get_session_factory()() as session:
|
||||
return await transcripts_controller.set_status(
|
||||
session, self.transcript_id, status
|
||||
)
|
||||
|
||||
@broadcast_to_sockets
|
||||
async def on_transcript(self, data):
|
||||
async with self.transaction():
|
||||
transcript = await self.get_transcript()
|
||||
async with self.transaction() as session:
|
||||
transcript = await self.get_transcript(session)
|
||||
return await transcripts_controller.append_event(
|
||||
session,
|
||||
transcript=transcript,
|
||||
event="TRANSCRIPT",
|
||||
data=TranscriptText(text=data.text, translation=data.translation),
|
||||
@@ -230,10 +236,11 @@ class PipelineMainBase(PipelineRunner[PipelineMessage], Generic[PipelineMessage]
|
||||
)
|
||||
if isinstance(data, TitleSummaryWithIdProcessorType):
|
||||
topic.id = data.id
|
||||
async with self.transaction():
|
||||
transcript = await self.get_transcript()
|
||||
await transcripts_controller.upsert_topic(transcript, topic)
|
||||
async with self.transaction() as session:
|
||||
transcript = await self.get_transcript(session)
|
||||
await transcripts_controller.upsert_topic(session, transcript, topic)
|
||||
return await transcripts_controller.append_event(
|
||||
session,
|
||||
transcript=transcript,
|
||||
event="TOPIC",
|
||||
data=topic,
|
||||
@@ -242,16 +249,18 @@ class PipelineMainBase(PipelineRunner[PipelineMessage], Generic[PipelineMessage]
|
||||
@broadcast_to_sockets
|
||||
async def on_title(self, data):
|
||||
final_title = TranscriptFinalTitle(title=data.title)
|
||||
async with self.transaction():
|
||||
transcript = await self.get_transcript()
|
||||
async with self.transaction() as session:
|
||||
transcript = await self.get_transcript(session)
|
||||
if not transcript.title:
|
||||
await transcripts_controller.update(
|
||||
session,
|
||||
transcript,
|
||||
{
|
||||
"title": final_title.title,
|
||||
},
|
||||
)
|
||||
return await transcripts_controller.append_event(
|
||||
session,
|
||||
transcript=transcript,
|
||||
event="FINAL_TITLE",
|
||||
data=final_title,
|
||||
@@ -260,15 +269,17 @@ class PipelineMainBase(PipelineRunner[PipelineMessage], Generic[PipelineMessage]
|
||||
@broadcast_to_sockets
|
||||
async def on_long_summary(self, data):
|
||||
final_long_summary = TranscriptFinalLongSummary(long_summary=data.long_summary)
|
||||
async with self.transaction():
|
||||
transcript = await self.get_transcript()
|
||||
async with self.transaction() as session:
|
||||
transcript = await self.get_transcript(session)
|
||||
await transcripts_controller.update(
|
||||
session,
|
||||
transcript,
|
||||
{
|
||||
"long_summary": final_long_summary.long_summary,
|
||||
},
|
||||
)
|
||||
return await transcripts_controller.append_event(
|
||||
session,
|
||||
transcript=transcript,
|
||||
event="FINAL_LONG_SUMMARY",
|
||||
data=final_long_summary,
|
||||
@@ -279,15 +290,17 @@ class PipelineMainBase(PipelineRunner[PipelineMessage], Generic[PipelineMessage]
|
||||
final_short_summary = TranscriptFinalShortSummary(
|
||||
short_summary=data.short_summary
|
||||
)
|
||||
async with self.transaction():
|
||||
transcript = await self.get_transcript()
|
||||
async with self.transaction() as session:
|
||||
transcript = await self.get_transcript(session)
|
||||
await transcripts_controller.update(
|
||||
session,
|
||||
transcript,
|
||||
{
|
||||
"short_summary": final_short_summary.short_summary,
|
||||
},
|
||||
)
|
||||
return await transcripts_controller.append_event(
|
||||
session,
|
||||
transcript=transcript,
|
||||
event="FINAL_SHORT_SUMMARY",
|
||||
data=final_short_summary,
|
||||
@@ -295,29 +308,30 @@ class PipelineMainBase(PipelineRunner[PipelineMessage], Generic[PipelineMessage]
|
||||
|
||||
@broadcast_to_sockets
|
||||
async def on_duration(self, data):
|
||||
async with self.transaction():
|
||||
async with self.transaction() as session:
|
||||
duration = TranscriptDuration(duration=data)
|
||||
|
||||
transcript = await self.get_transcript()
|
||||
transcript = await self.get_transcript(session)
|
||||
await transcripts_controller.update(
|
||||
session,
|
||||
transcript,
|
||||
{
|
||||
"duration": duration.duration,
|
||||
},
|
||||
)
|
||||
return await transcripts_controller.append_event(
|
||||
transcript=transcript, event="DURATION", data=duration
|
||||
session, transcript=transcript, event="DURATION", data=duration
|
||||
)
|
||||
|
||||
@broadcast_to_sockets
|
||||
async def on_waveform(self, data):
|
||||
async with self.transaction():
|
||||
async with self.transaction() as session:
|
||||
waveform = TranscriptWaveform(waveform=data)
|
||||
|
||||
transcript = await self.get_transcript()
|
||||
transcript = await self.get_transcript(session)
|
||||
|
||||
return await transcripts_controller.append_event(
|
||||
transcript=transcript, event="WAVEFORM", data=waveform
|
||||
session, transcript=transcript, event="WAVEFORM", data=waveform
|
||||
)
|
||||
|
||||
|
||||
@@ -330,7 +344,8 @@ class PipelineMainLive(PipelineMainBase):
|
||||
async def create(self) -> Pipeline:
|
||||
# create a context for the whole rtc transaction
|
||||
# add a customised logger to the context
|
||||
transcript = await self.get_transcript()
|
||||
async with get_session_factory()() as session:
|
||||
transcript = await self.get_transcript(session)
|
||||
|
||||
processors = [
|
||||
AudioFileWriterProcessor(
|
||||
@@ -378,7 +393,8 @@ class PipelineMainDiarization(PipelineMainBase[AudioDiarizationInput]):
|
||||
# now let's start the pipeline by pushing information to the
|
||||
# first processor diarization processor
|
||||
# XXX translation is lost when converting our data model to the processor model
|
||||
transcript = await self.get_transcript()
|
||||
async with get_session_factory()() as session:
|
||||
transcript = await self.get_transcript(session)
|
||||
|
||||
# diarization works only if the file is uploaded to an external storage
|
||||
if transcript.audio_location == "local":
|
||||
@@ -411,7 +427,8 @@ class PipelineMainFromTopics(PipelineMainBase[TitleSummaryWithIdProcessorType]):
|
||||
|
||||
async def create(self) -> Pipeline:
|
||||
# get transcript
|
||||
self._transcript = transcript = await self.get_transcript()
|
||||
async with get_session_factory()() as session:
|
||||
self._transcript = transcript = await self.get_transcript(session)
|
||||
|
||||
# create pipeline
|
||||
processors = self.get_processors()
|
||||
@@ -516,8 +533,7 @@ async def pipeline_convert_to_mp3(transcript: Transcript, logger: Logger):
|
||||
logger.info("Convert to mp3 done")
|
||||
|
||||
|
||||
@get_transcript
|
||||
async def pipeline_upload_mp3(transcript: Transcript, logger: Logger):
|
||||
async def pipeline_upload_mp3(session, transcript: Transcript, logger: Logger):
|
||||
if not settings.TRANSCRIPT_STORAGE_BACKEND:
|
||||
logger.info("No storage backend configured, skipping mp3 upload")
|
||||
return
|
||||
@@ -535,7 +551,7 @@ async def pipeline_upload_mp3(transcript: Transcript, logger: Logger):
|
||||
return
|
||||
|
||||
# Upload to external storage and delete the file
|
||||
await transcripts_controller.move_mp3_to_storage(transcript)
|
||||
await transcripts_controller.move_mp3_to_storage(session, transcript)
|
||||
|
||||
logger.info("Upload mp3 done")
|
||||
|
||||
@@ -564,20 +580,23 @@ async def pipeline_summaries(transcript: Transcript, logger: Logger):
|
||||
logger.info("Summaries done")
|
||||
|
||||
|
||||
@get_transcript
|
||||
async def cleanup_consent(transcript: Transcript, logger: Logger):
|
||||
async def cleanup_consent(session, transcript: Transcript, logger: Logger):
|
||||
logger.info("Starting consent cleanup")
|
||||
|
||||
consent_denied = False
|
||||
recording = None
|
||||
try:
|
||||
if transcript.recording_id:
|
||||
recording = await recordings_controller.get_by_id(transcript.recording_id)
|
||||
recording = await recordings_controller.get_by_id(
|
||||
session, transcript.recording_id
|
||||
)
|
||||
if recording and recording.meeting_id:
|
||||
meeting = await meetings_controller.get_by_id(recording.meeting_id)
|
||||
meeting = await meetings_controller.get_by_id(
|
||||
session, recording.meeting_id
|
||||
)
|
||||
if meeting:
|
||||
consent_denied = await meeting_consent_controller.has_any_denial(
|
||||
meeting.id
|
||||
session, meeting.id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get fetch consent: {e}", exc_info=e)
|
||||
@@ -606,7 +625,7 @@ async def cleanup_consent(transcript: Transcript, logger: Logger):
|
||||
logger.error(f"Failed to delete Whereby recording: {e}", exc_info=e)
|
||||
|
||||
# non-transactional, files marked for deletion not actually deleted is possible
|
||||
await transcripts_controller.update(transcript, {"audio_deleted": True})
|
||||
await transcripts_controller.update(session, transcript, {"audio_deleted": True})
|
||||
# 2. Delete processed audio from transcript storage S3 bucket
|
||||
if transcript.audio_location == "storage":
|
||||
storage = get_transcripts_storage()
|
||||
@@ -630,15 +649,14 @@ async def cleanup_consent(transcript: Transcript, logger: Logger):
|
||||
logger.info("Consent cleanup done")
|
||||
|
||||
|
||||
@get_transcript
|
||||
async def pipeline_post_to_zulip(transcript: Transcript, logger: Logger):
|
||||
async def pipeline_post_to_zulip(session, transcript: Transcript, logger: Logger):
|
||||
logger.info("Starting post to zulip")
|
||||
|
||||
if not transcript.recording_id:
|
||||
logger.info("Transcript has no recording")
|
||||
return
|
||||
|
||||
recording = await recordings_controller.get_by_id(transcript.recording_id)
|
||||
recording = await recordings_controller.get_by_id(session, transcript.recording_id)
|
||||
if not recording:
|
||||
logger.info("Recording not found")
|
||||
return
|
||||
@@ -647,12 +665,12 @@ async def pipeline_post_to_zulip(transcript: Transcript, logger: Logger):
|
||||
logger.info("Recording has no meeting")
|
||||
return
|
||||
|
||||
meeting = await meetings_controller.get_by_id(recording.meeting_id)
|
||||
meeting = await meetings_controller.get_by_id(session, recording.meeting_id)
|
||||
if not meeting:
|
||||
logger.info("No meeting found for this recording")
|
||||
return
|
||||
|
||||
room = await rooms_controller.get_by_id(meeting.room_id)
|
||||
room = await rooms_controller.get_by_id(session, meeting.room_id)
|
||||
if not room:
|
||||
logger.error(f"Missing room for a meeting {meeting.id}")
|
||||
return
|
||||
@@ -678,7 +696,7 @@ async def pipeline_post_to_zulip(transcript: Transcript, logger: Logger):
|
||||
room.zulip_stream, room.zulip_topic, message
|
||||
)
|
||||
await transcripts_controller.update(
|
||||
transcript, {"zulip_message_id": response["id"]}
|
||||
session, transcript, {"zulip_message_id": response["id"]}
|
||||
)
|
||||
|
||||
logger.info("Posted to zulip")
|
||||
@@ -709,8 +727,11 @@ async def task_pipeline_convert_to_mp3(*, transcript_id: str):
|
||||
|
||||
@shared_task
|
||||
@asynctask
|
||||
async def task_pipeline_upload_mp3(*, transcript_id: str):
|
||||
await pipeline_upload_mp3(transcript_id=transcript_id)
|
||||
@with_session_and_transcript
|
||||
async def task_pipeline_upload_mp3(
|
||||
session, *, transcript: Transcript, logger: Logger, transcript_id: str
|
||||
):
|
||||
await pipeline_upload_mp3(session, transcript=transcript, logger=logger)
|
||||
|
||||
|
||||
@shared_task
|
||||
@@ -733,14 +754,20 @@ async def task_pipeline_final_summaries(*, transcript_id: str):
|
||||
|
||||
@shared_task
|
||||
@asynctask
|
||||
async def task_cleanup_consent(*, transcript_id: str):
|
||||
await cleanup_consent(transcript_id=transcript_id)
|
||||
@with_session_and_transcript
|
||||
async def task_cleanup_consent(
|
||||
session, *, transcript: Transcript, logger: Logger, transcript_id: str
|
||||
):
|
||||
await cleanup_consent(session, transcript=transcript, logger=logger)
|
||||
|
||||
|
||||
@shared_task
|
||||
@asynctask
|
||||
async def task_pipeline_post_to_zulip(*, transcript_id: str):
|
||||
await pipeline_post_to_zulip(transcript_id=transcript_id)
|
||||
@with_session_and_transcript
|
||||
async def task_pipeline_post_to_zulip(
|
||||
session, *, transcript: Transcript, logger: Logger, transcript_id: str
|
||||
):
|
||||
await pipeline_post_to_zulip(session, transcript=transcript, logger=logger)
|
||||
|
||||
|
||||
def pipeline_post(*, transcript_id: str):
|
||||
@@ -772,14 +799,16 @@ def pipeline_post(*, transcript_id: str):
|
||||
async def pipeline_process(transcript: Transcript, logger: Logger):
|
||||
try:
|
||||
if transcript.audio_location == "storage":
|
||||
await transcripts_controller.download_mp3_from_storage(transcript)
|
||||
transcript.audio_waveform_filename.unlink(missing_ok=True)
|
||||
await transcripts_controller.update(
|
||||
transcript,
|
||||
{
|
||||
"topics": [],
|
||||
},
|
||||
)
|
||||
async with get_session_factory()() as session:
|
||||
await transcripts_controller.download_mp3_from_storage(transcript)
|
||||
transcript.audio_waveform_filename.unlink(missing_ok=True)
|
||||
await transcripts_controller.update(
|
||||
session,
|
||||
transcript,
|
||||
{
|
||||
"topics": [],
|
||||
},
|
||||
)
|
||||
|
||||
# open audio
|
||||
audio_filename = next(transcript.data_path.glob("upload.*"), None)
|
||||
@@ -811,12 +840,14 @@ async def pipeline_process(transcript: Transcript, logger: Logger):
|
||||
|
||||
except Exception as exc:
|
||||
logger.error("Pipeline error", exc_info=exc)
|
||||
await transcripts_controller.update(
|
||||
transcript,
|
||||
{
|
||||
"status": "error",
|
||||
},
|
||||
)
|
||||
async with get_session_factory()() as session:
|
||||
await transcripts_controller.update(
|
||||
session,
|
||||
transcript,
|
||||
{
|
||||
"status": "error",
|
||||
},
|
||||
)
|
||||
raise
|
||||
|
||||
logger.info("Pipeline ended")
|
||||
|
||||
@@ -1,10 +1,17 @@
|
||||
import asyncio
|
||||
import functools
|
||||
import json
|
||||
from typing import Optional
|
||||
|
||||
import redis
|
||||
import redis.asyncio as redis_async
|
||||
import structlog
|
||||
from redis.exceptions import LockError
|
||||
|
||||
from reflector.settings import settings
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
redis_clients = {}
|
||||
|
||||
|
||||
@@ -21,6 +28,12 @@ def get_redis_client(db=0):
|
||||
return redis_clients[db]
|
||||
|
||||
|
||||
async def get_async_redis_client(db: int = 0):
|
||||
return await redis_async.from_url(
|
||||
f"redis://{settings.REDIS_HOST}:{settings.REDIS_PORT}/{db}"
|
||||
)
|
||||
|
||||
|
||||
def redis_cache(prefix="cache", duration=3600, db=settings.REDIS_CACHE_DB, argidx=1):
|
||||
"""
|
||||
Cache the result of a function in Redis.
|
||||
@@ -49,3 +62,87 @@ def redis_cache(prefix="cache", duration=3600, db=settings.REDIS_CACHE_DB, argid
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
class RedisAsyncLock:
|
||||
def __init__(
|
||||
self,
|
||||
key: str,
|
||||
timeout: int = 120,
|
||||
extend_interval: int = 30,
|
||||
skip_if_locked: bool = False,
|
||||
blocking: bool = True,
|
||||
blocking_timeout: Optional[float] = None,
|
||||
):
|
||||
self.key = f"async_lock:{key}"
|
||||
self.timeout = timeout
|
||||
self.extend_interval = extend_interval
|
||||
self.skip_if_locked = skip_if_locked
|
||||
self.blocking = blocking
|
||||
self.blocking_timeout = blocking_timeout
|
||||
self._lock = None
|
||||
self._redis = None
|
||||
self._extend_task = None
|
||||
self._acquired = False
|
||||
|
||||
async def _extend_lock_periodically(self):
|
||||
while True:
|
||||
try:
|
||||
await asyncio.sleep(self.extend_interval)
|
||||
if self._lock:
|
||||
await self._lock.extend(self.timeout, replace_ttl=True)
|
||||
logger.debug("Extended lock", key=self.key)
|
||||
except LockError:
|
||||
logger.warning("Failed to extend lock", key=self.key)
|
||||
break
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error("Error extending lock", key=self.key, error=str(e))
|
||||
break
|
||||
|
||||
async def __aenter__(self):
|
||||
self._redis = await get_async_redis_client()
|
||||
self._lock = self._redis.lock(
|
||||
self.key,
|
||||
timeout=self.timeout,
|
||||
blocking=self.blocking,
|
||||
blocking_timeout=self.blocking_timeout,
|
||||
)
|
||||
|
||||
self._acquired = await self._lock.acquire()
|
||||
|
||||
if not self._acquired:
|
||||
if self.skip_if_locked:
|
||||
logger.warning(
|
||||
"Lock already acquired by another process, skipping", key=self.key
|
||||
)
|
||||
return self
|
||||
else:
|
||||
raise LockError(f"Failed to acquire lock: {self.key}")
|
||||
|
||||
self._extend_task = asyncio.create_task(self._extend_lock_periodically())
|
||||
logger.info("Acquired lock", key=self.key)
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
if self._extend_task:
|
||||
self._extend_task.cancel()
|
||||
try:
|
||||
await self._extend_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
if self._acquired and self._lock:
|
||||
try:
|
||||
await self._lock.release()
|
||||
logger.info("Released lock", key=self.key)
|
||||
except LockError:
|
||||
logger.debug("Lock already released or expired", key=self.key)
|
||||
|
||||
if self._redis:
|
||||
await self._redis.aclose()
|
||||
|
||||
@property
|
||||
def acquired(self) -> bool:
|
||||
return self._acquired
|
||||
|
||||
412
server/reflector/services/ics_sync.py
Normal file
412
server/reflector/services/ics_sync.py
Normal file
@@ -0,0 +1,412 @@
|
||||
"""
|
||||
ICS Calendar Synchronization Service
|
||||
|
||||
This module provides services for fetching, parsing, and synchronizing ICS (iCalendar)
|
||||
calendar feeds with room booking data in the database.
|
||||
|
||||
Key Components:
|
||||
- ICSFetchService: Handles HTTP fetching and parsing of ICS calendar data
|
||||
- ICSSyncService: Manages the synchronization process between ICS feeds and database
|
||||
|
||||
Example Usage:
|
||||
# Sync a room's calendar
|
||||
room = Room(id="room1", name="conference-room", ics_url="https://cal.example.com/room.ics")
|
||||
result = await ics_sync_service.sync_room_calendar(room)
|
||||
|
||||
# Result structure:
|
||||
{
|
||||
"status": "success", # success|unchanged|error|skipped
|
||||
"hash": "abc123...", # MD5 hash of ICS content
|
||||
"events_found": 5, # Events matching this room
|
||||
"total_events": 12, # Total events in calendar within time window
|
||||
"events_created": 2, # New events added to database
|
||||
"events_updated": 3, # Existing events modified
|
||||
"events_deleted": 1 # Events soft-deleted (no longer in calendar)
|
||||
}
|
||||
|
||||
Event Matching:
|
||||
Events are matched to rooms by checking if the room's full URL appears in the
|
||||
event's LOCATION or DESCRIPTION fields. Only events within a 25-hour window
|
||||
(1 hour ago to 24 hours from now) are processed.
|
||||
|
||||
Input: ICS calendar URL (e.g., "https://calendar.google.com/calendar/ical/...")
|
||||
Output: EventData objects with structured calendar information:
|
||||
{
|
||||
"ics_uid": "event123@google.com",
|
||||
"title": "Team Meeting",
|
||||
"description": "Weekly sync meeting",
|
||||
"location": "https://meet.company.com/conference-room",
|
||||
"start_time": datetime(2024, 1, 15, 14, 0, tzinfo=UTC),
|
||||
"end_time": datetime(2024, 1, 15, 15, 0, tzinfo=UTC),
|
||||
"attendees": [
|
||||
{"email": "user@company.com", "name": "John Doe", "role": "ORGANIZER"},
|
||||
{"email": "attendee@company.com", "name": "Jane Smith", "status": "ACCEPTED"}
|
||||
],
|
||||
"ics_raw_data": "BEGIN:VEVENT\nUID:event123@google.com\n..."
|
||||
}
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from enum import Enum
|
||||
from typing import TypedDict
|
||||
|
||||
import httpx
|
||||
import pytz
|
||||
import structlog
|
||||
from icalendar import Calendar, Event
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from reflector.db.calendar_events import CalendarEvent, calendar_events_controller
|
||||
from reflector.db.rooms import Room, rooms_controller
|
||||
from reflector.redis_cache import RedisAsyncLock
|
||||
from reflector.settings import settings
|
||||
|
||||
logger = structlog.get_logger()
|
||||
|
||||
EVENT_WINDOW_DELTA_START = timedelta(hours=-1)
|
||||
EVENT_WINDOW_DELTA_END = timedelta(hours=24)
|
||||
|
||||
|
||||
class SyncStatus(str, Enum):
|
||||
SUCCESS = "success"
|
||||
UNCHANGED = "unchanged"
|
||||
ERROR = "error"
|
||||
SKIPPED = "skipped"
|
||||
|
||||
|
||||
class AttendeeData(TypedDict, total=False):
|
||||
email: str | None
|
||||
name: str | None
|
||||
status: str | None
|
||||
role: str | None
|
||||
|
||||
|
||||
class EventData(TypedDict):
|
||||
ics_uid: str
|
||||
title: str | None
|
||||
description: str | None
|
||||
location: str | None
|
||||
start_time: datetime
|
||||
end_time: datetime
|
||||
attendees: list[AttendeeData]
|
||||
ics_raw_data: str
|
||||
|
||||
|
||||
class SyncStats(TypedDict):
|
||||
events_created: int
|
||||
events_updated: int
|
||||
events_deleted: int
|
||||
|
||||
|
||||
class SyncResultBase(TypedDict):
|
||||
status: SyncStatus
|
||||
|
||||
|
||||
class SyncResult(SyncResultBase, total=False):
|
||||
hash: str | None
|
||||
events_found: int
|
||||
total_events: int
|
||||
events_created: int
|
||||
events_updated: int
|
||||
events_deleted: int
|
||||
error: str | None
|
||||
reason: str | None
|
||||
|
||||
|
||||
class ICSFetchService:
|
||||
def __init__(self):
|
||||
self.client = httpx.AsyncClient(
|
||||
timeout=30.0, headers={"User-Agent": "Reflector/1.0"}
|
||||
)
|
||||
|
||||
async def fetch_ics(self, url: str) -> str:
|
||||
response = await self.client.get(url)
|
||||
response.raise_for_status()
|
||||
|
||||
return response.text
|
||||
|
||||
def parse_ics(self, ics_content: str) -> Calendar:
|
||||
return Calendar.from_ical(ics_content)
|
||||
|
||||
def extract_room_events(
|
||||
self, calendar: Calendar, room_name: str, room_url: str
|
||||
) -> tuple[list[EventData], int]:
|
||||
events = []
|
||||
total_events = 0
|
||||
now = datetime.now(timezone.utc)
|
||||
window_start = now + EVENT_WINDOW_DELTA_START
|
||||
window_end = now + EVENT_WINDOW_DELTA_END
|
||||
|
||||
for component in calendar.walk():
|
||||
if component.name != "VEVENT":
|
||||
continue
|
||||
|
||||
status = component.get("STATUS", "").upper()
|
||||
if status == "CANCELLED":
|
||||
continue
|
||||
|
||||
# Count total non-cancelled events in the time window
|
||||
event_data = self._parse_event(component)
|
||||
if event_data and window_start <= event_data["start_time"] <= window_end:
|
||||
total_events += 1
|
||||
|
||||
# Check if event matches this room
|
||||
if self._event_matches_room(component, room_name, room_url):
|
||||
events.append(event_data)
|
||||
|
||||
return events, total_events
|
||||
|
||||
def _event_matches_room(self, event: Event, room_name: str, room_url: str) -> bool:
|
||||
location = str(event.get("LOCATION", ""))
|
||||
description = str(event.get("DESCRIPTION", ""))
|
||||
|
||||
# Only match full room URL
|
||||
# XXX leaved here as a patterns, to later be extended with tinyurl or such too
|
||||
patterns = [
|
||||
room_url,
|
||||
]
|
||||
|
||||
# Check location and description for patterns
|
||||
text_to_check = f"{location} {description}".lower()
|
||||
for pattern in patterns:
|
||||
if pattern.lower() in text_to_check:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _parse_event(self, event: Event) -> EventData | None:
|
||||
uid = str(event.get("UID", ""))
|
||||
summary = str(event.get("SUMMARY", ""))
|
||||
description = str(event.get("DESCRIPTION", ""))
|
||||
location = str(event.get("LOCATION", ""))
|
||||
dtstart = event.get("DTSTART")
|
||||
dtend = event.get("DTEND")
|
||||
|
||||
if not dtstart:
|
||||
return None
|
||||
|
||||
# Convert fields
|
||||
start_time = self._normalize_datetime(
|
||||
dtstart.dt if hasattr(dtstart, "dt") else dtstart
|
||||
)
|
||||
end_time = (
|
||||
self._normalize_datetime(dtend.dt if hasattr(dtend, "dt") else dtend)
|
||||
if dtend
|
||||
else start_time + timedelta(hours=1)
|
||||
)
|
||||
attendees = self._parse_attendees(event)
|
||||
|
||||
# Get raw event data for storage
|
||||
raw_data = event.to_ical().decode("utf-8")
|
||||
|
||||
return {
|
||||
"ics_uid": uid,
|
||||
"title": summary,
|
||||
"description": description,
|
||||
"location": location,
|
||||
"start_time": start_time,
|
||||
"end_time": end_time,
|
||||
"attendees": attendees,
|
||||
"ics_raw_data": raw_data,
|
||||
}
|
||||
|
||||
def _normalize_datetime(self, dt) -> datetime:
|
||||
# Ensure datetime is with timezone, if not, assume UTC
|
||||
if isinstance(dt, date) and not isinstance(dt, datetime):
|
||||
dt = datetime.combine(dt, datetime.min.time())
|
||||
dt = pytz.UTC.localize(dt)
|
||||
elif isinstance(dt, datetime):
|
||||
if dt.tzinfo is None:
|
||||
dt = pytz.UTC.localize(dt)
|
||||
else:
|
||||
dt = dt.astimezone(pytz.UTC)
|
||||
|
||||
return dt
|
||||
|
||||
def _parse_attendees(self, event: Event) -> list[AttendeeData]:
|
||||
# Extracts attendee information from both ATTENDEE and ORGANIZER properties.
|
||||
# Handles malformed comma-separated email addresses in single ATTENDEE fields
|
||||
# by splitting them into separate attendee entries. Returns a list of attendee
|
||||
# data including email, name, status, and role information.
|
||||
final_attendees = []
|
||||
|
||||
attendees = event.get("ATTENDEE", [])
|
||||
if not isinstance(attendees, list):
|
||||
attendees = [attendees]
|
||||
for att in attendees:
|
||||
email_str = str(att).replace("mailto:", "") if att else None
|
||||
|
||||
# Handle malformed comma-separated email addresses in a single ATTENDEE field
|
||||
if email_str and "," in email_str:
|
||||
# Split comma-separated emails and create separate attendee entries
|
||||
email_parts = [email.strip() for email in email_str.split(",")]
|
||||
for email in email_parts:
|
||||
if email and "@" in email:
|
||||
clean_email = email.replace("MAILTO:", "").replace(
|
||||
"mailto:", ""
|
||||
)
|
||||
att_data: AttendeeData = {
|
||||
"email": clean_email,
|
||||
"name": att.params.get("CN")
|
||||
if hasattr(att, "params") and email == email_parts[0]
|
||||
else None,
|
||||
"status": att.params.get("PARTSTAT")
|
||||
if hasattr(att, "params") and email == email_parts[0]
|
||||
else None,
|
||||
"role": att.params.get("ROLE")
|
||||
if hasattr(att, "params") and email == email_parts[0]
|
||||
else None,
|
||||
}
|
||||
final_attendees.append(att_data)
|
||||
else:
|
||||
# Normal single attendee
|
||||
att_data: AttendeeData = {
|
||||
"email": email_str,
|
||||
"name": att.params.get("CN") if hasattr(att, "params") else None,
|
||||
"status": att.params.get("PARTSTAT")
|
||||
if hasattr(att, "params")
|
||||
else None,
|
||||
"role": att.params.get("ROLE") if hasattr(att, "params") else None,
|
||||
}
|
||||
final_attendees.append(att_data)
|
||||
|
||||
# Add organizer
|
||||
organizer = event.get("ORGANIZER")
|
||||
if organizer:
|
||||
org_email = (
|
||||
str(organizer).replace("mailto:", "").replace("MAILTO:", "")
|
||||
if organizer
|
||||
else None
|
||||
)
|
||||
org_data: AttendeeData = {
|
||||
"email": org_email,
|
||||
"name": organizer.params.get("CN")
|
||||
if hasattr(organizer, "params")
|
||||
else None,
|
||||
"role": "ORGANIZER",
|
||||
}
|
||||
final_attendees.append(org_data)
|
||||
|
||||
return final_attendees
|
||||
|
||||
|
||||
class ICSSyncService:
|
||||
def __init__(self):
|
||||
self.fetch_service = ICSFetchService()
|
||||
|
||||
async def sync_room_calendar(self, session: AsyncSession, room: Room) -> SyncResult:
|
||||
async with RedisAsyncLock(
|
||||
f"ics_sync_room:{room.id}", skip_if_locked=True
|
||||
) as lock:
|
||||
if not lock.acquired:
|
||||
logger.warning("ICS sync already in progress for room", room_id=room.id)
|
||||
return {
|
||||
"status": SyncStatus.SKIPPED,
|
||||
"reason": "Sync already in progress",
|
||||
}
|
||||
|
||||
return await self._sync_room_calendar(session, room)
|
||||
|
||||
async def _sync_room_calendar(
|
||||
self, session: AsyncSession, room: Room
|
||||
) -> SyncResult:
|
||||
if not room.ics_enabled or not room.ics_url:
|
||||
return {"status": SyncStatus.SKIPPED, "reason": "ICS not configured"}
|
||||
|
||||
try:
|
||||
if not self._should_sync(room):
|
||||
return {"status": SyncStatus.SKIPPED, "reason": "Not time to sync yet"}
|
||||
|
||||
ics_content = await self.fetch_service.fetch_ics(room.ics_url)
|
||||
calendar = self.fetch_service.parse_ics(ics_content)
|
||||
|
||||
content_hash = hashlib.md5(ics_content.encode()).hexdigest()
|
||||
if room.ics_last_etag == content_hash:
|
||||
logger.info("No changes in ICS for room", room_id=room.id)
|
||||
room_url = f"{settings.UI_BASE_URL}/{room.name}"
|
||||
events, total_events = self.fetch_service.extract_room_events(
|
||||
calendar, room.name, room_url
|
||||
)
|
||||
return {
|
||||
"status": SyncStatus.UNCHANGED,
|
||||
"hash": content_hash,
|
||||
"events_found": len(events),
|
||||
"total_events": total_events,
|
||||
"events_created": 0,
|
||||
"events_updated": 0,
|
||||
"events_deleted": 0,
|
||||
}
|
||||
|
||||
# Extract matching events
|
||||
room_url = f"{settings.UI_BASE_URL}/{room.name}"
|
||||
events, total_events = self.fetch_service.extract_room_events(
|
||||
calendar, room.name, room_url
|
||||
)
|
||||
sync_result = await self._sync_events_to_database(session, room.id, events)
|
||||
|
||||
# Update room sync metadata
|
||||
await rooms_controller.update(
|
||||
session,
|
||||
room,
|
||||
{
|
||||
"ics_last_sync": datetime.now(timezone.utc),
|
||||
"ics_last_etag": content_hash,
|
||||
},
|
||||
mutate=False,
|
||||
)
|
||||
|
||||
return {
|
||||
"status": SyncStatus.SUCCESS,
|
||||
"hash": content_hash,
|
||||
"events_found": len(events),
|
||||
"total_events": total_events,
|
||||
**sync_result,
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to sync ICS for room", room_id=room.id, error=str(e))
|
||||
return {"status": SyncStatus.ERROR, "error": str(e)}
|
||||
|
||||
def _should_sync(self, room: Room) -> bool:
|
||||
if not room.ics_last_sync:
|
||||
return True
|
||||
|
||||
time_since_sync = datetime.now(timezone.utc) - room.ics_last_sync
|
||||
return time_since_sync.total_seconds() >= room.ics_fetch_interval
|
||||
|
||||
async def _sync_events_to_database(
|
||||
self, session: AsyncSession, room_id: str, events: list[EventData]
|
||||
) -> SyncStats:
|
||||
created = 0
|
||||
updated = 0
|
||||
|
||||
current_ics_uids = []
|
||||
|
||||
for event_data in events:
|
||||
calendar_event = CalendarEvent(room_id=room_id, **event_data)
|
||||
existing = await calendar_events_controller.get_by_ics_uid(
|
||||
session, room_id, event_data["ics_uid"]
|
||||
)
|
||||
|
||||
if existing:
|
||||
updated += 1
|
||||
else:
|
||||
created += 1
|
||||
|
||||
await calendar_events_controller.upsert(session, calendar_event)
|
||||
current_ics_uids.append(event_data["ics_uid"])
|
||||
|
||||
# Soft delete events that are no longer in calendar
|
||||
deleted = await calendar_events_controller.soft_delete_missing(
|
||||
session, room_id, current_ics_uids
|
||||
)
|
||||
|
||||
return {
|
||||
"events_created": created,
|
||||
"events_updated": updated,
|
||||
"events_deleted": deleted,
|
||||
}
|
||||
|
||||
|
||||
ics_sync_service = ICSSyncService()
|
||||
@@ -9,12 +9,12 @@ async def export_db(filename: str) -> None:
|
||||
filename = pathlib.Path(filename).resolve()
|
||||
settings.DATABASE_URL = f"sqlite:///{filename}"
|
||||
|
||||
from reflector.db import get_database, transcripts
|
||||
from reflector.db import get_session_factory
|
||||
from reflector.db.transcripts import transcripts_controller
|
||||
|
||||
database = get_database()
|
||||
await database.connect()
|
||||
transcripts = await database.fetch_all(transcripts.select())
|
||||
await database.disconnect()
|
||||
session_factory = get_session_factory()
|
||||
async with session_factory() as session:
|
||||
transcripts = await transcripts_controller.get_all(session)
|
||||
|
||||
def export_transcript(transcript, output_dir):
|
||||
for topic in transcript.topics:
|
||||
|
||||
@@ -8,12 +8,12 @@ async def export_db(filename: str) -> None:
|
||||
filename = pathlib.Path(filename).resolve()
|
||||
settings.DATABASE_URL = f"sqlite:///{filename}"
|
||||
|
||||
from reflector.db import get_database, transcripts
|
||||
from reflector.db import get_session_factory
|
||||
from reflector.db.transcripts import transcripts_controller
|
||||
|
||||
database = get_database()
|
||||
await database.connect()
|
||||
transcripts = await database.fetch_all(transcripts.select())
|
||||
await database.disconnect()
|
||||
session_factory = get_session_factory()
|
||||
async with session_factory() as session:
|
||||
transcripts = await transcripts_controller.get_all(session)
|
||||
|
||||
def export_transcript(transcript):
|
||||
tid = transcript.id
|
||||
|
||||
@@ -11,6 +11,9 @@ import time
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Literal
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from reflector.db import get_session_factory
|
||||
from reflector.db.transcripts import SourceKind, TranscriptTopic, transcripts_controller
|
||||
from reflector.logger import logger
|
||||
from reflector.pipelines.main_file_pipeline import (
|
||||
@@ -50,6 +53,7 @@ TranscriptId = str
|
||||
# common interface for every flow: it needs an Entry in db with specific ceremony (file path + status + actual file in file system)
|
||||
# ideally we want to get rid of it at some point
|
||||
async def prepare_entry(
|
||||
session: AsyncSession,
|
||||
source_path: str,
|
||||
source_language: str,
|
||||
target_language: str,
|
||||
@@ -57,6 +61,7 @@ async def prepare_entry(
|
||||
file_path = Path(source_path)
|
||||
|
||||
transcript = await transcripts_controller.add(
|
||||
session,
|
||||
file_path.name,
|
||||
# note that the real file upload has SourceKind: LIVE for the reason of it's an error
|
||||
source_kind=SourceKind.FILE,
|
||||
@@ -78,16 +83,20 @@ async def prepare_entry(
|
||||
logger.info(f"Copied {source_path} to {upload_path}")
|
||||
|
||||
# pipelines expect entity status "uploaded"
|
||||
await transcripts_controller.update(transcript, {"status": "uploaded"})
|
||||
await transcripts_controller.update(session, transcript, {"status": "uploaded"})
|
||||
|
||||
return transcript.id
|
||||
|
||||
|
||||
# same reason as prepare_entry
|
||||
async def extract_result_from_entry(
|
||||
transcript_id: TranscriptId, output_path: str
|
||||
session: AsyncSession,
|
||||
transcript_id: TranscriptId,
|
||||
output_path: str,
|
||||
) -> None:
|
||||
post_final_transcript = await transcripts_controller.get_by_id(transcript_id)
|
||||
post_final_transcript = await transcripts_controller.get_by_id(
|
||||
session, transcript_id
|
||||
)
|
||||
|
||||
# assert post_final_transcript.status == "ended"
|
||||
# File pipeline doesn't set status to "ended", only live pipeline does https://github.com/Monadical-SAS/reflector/issues/582
|
||||
@@ -115,6 +124,7 @@ async def extract_result_from_entry(
|
||||
|
||||
|
||||
async def process_live_pipeline(
|
||||
session: AsyncSession,
|
||||
transcript_id: TranscriptId,
|
||||
):
|
||||
"""Process transcript_id with transcription and diarization"""
|
||||
@@ -123,7 +133,9 @@ async def process_live_pipeline(
|
||||
await live_pipeline_process(transcript_id=transcript_id)
|
||||
print(f"Processing complete for transcript {transcript_id}", file=sys.stderr)
|
||||
|
||||
pre_final_transcript = await transcripts_controller.get_by_id(transcript_id)
|
||||
pre_final_transcript = await transcripts_controller.get_by_id(
|
||||
session, transcript_id
|
||||
)
|
||||
|
||||
# assert documented behaviour: after process, the pipeline isn't ended. this is the reason of calling pipeline_post
|
||||
assert pre_final_transcript.status != "ended"
|
||||
@@ -160,21 +172,17 @@ async def process(
|
||||
pipeline: Literal["live", "file"],
|
||||
output_path: str = None,
|
||||
):
|
||||
from reflector.db import get_database
|
||||
|
||||
database = get_database()
|
||||
# db connect is a part of ceremony
|
||||
await database.connect()
|
||||
|
||||
try:
|
||||
session_factory = get_session_factory()
|
||||
async with session_factory() as session:
|
||||
transcript_id = await prepare_entry(
|
||||
session,
|
||||
source_path,
|
||||
source_language,
|
||||
target_language,
|
||||
)
|
||||
|
||||
pipeline_handlers = {
|
||||
"live": process_live_pipeline,
|
||||
"live": lambda tid: process_live_pipeline(session, tid),
|
||||
"file": process_file_pipeline,
|
||||
}
|
||||
|
||||
@@ -184,9 +192,7 @@ async def process(
|
||||
|
||||
await handler(transcript_id)
|
||||
|
||||
await extract_result_from_entry(transcript_id, output_path)
|
||||
finally:
|
||||
await database.disconnect()
|
||||
await extract_result_from_entry(session, transcript_id, output_path)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -10,6 +10,7 @@ from reflector.db.meetings import (
|
||||
meeting_consent_controller,
|
||||
meetings_controller,
|
||||
)
|
||||
from reflector.db.rooms import rooms_controller
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@@ -41,3 +42,34 @@ async def meeting_audio_consent(
|
||||
updated_consent = await meeting_consent_controller.upsert(consent)
|
||||
|
||||
return {"status": "success", "consent_id": updated_consent.id}
|
||||
|
||||
|
||||
@router.patch("/meetings/{meeting_id}/deactivate")
|
||||
async def meeting_deactivate(
|
||||
meeting_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user)],
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
if not user_id:
|
||||
raise HTTPException(status_code=401, detail="Authentication required")
|
||||
|
||||
meeting = await meetings_controller.get_by_id(meeting_id)
|
||||
if not meeting:
|
||||
raise HTTPException(status_code=404, detail="Meeting not found")
|
||||
|
||||
if not meeting.is_active:
|
||||
return {"status": "success", "meeting_id": meeting_id}
|
||||
|
||||
# Only room owner or meeting creator can deactivate
|
||||
room = await rooms_controller.get_by_id(meeting.room_id)
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
|
||||
if user_id != room.user_id and user_id != meeting.user_id:
|
||||
raise HTTPException(
|
||||
status_code=403, detail="Only the room owner can deactivate meetings"
|
||||
)
|
||||
|
||||
await meetings_controller.update_meeting(meeting_id, is_active=False)
|
||||
|
||||
return {"status": "success", "meeting_id": meeting_id}
|
||||
|
||||
@@ -1,34 +1,28 @@
|
||||
import logging
|
||||
import sqlite3
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Annotated, Literal, Optional
|
||||
from enum import Enum
|
||||
from typing import Annotated, Any, Literal, Optional
|
||||
|
||||
import asyncpg.exceptions
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from fastapi_pagination import Page
|
||||
from fastapi_pagination.ext.databases import apaginate
|
||||
from fastapi_pagination.ext.sqlalchemy import paginate
|
||||
from pydantic import BaseModel
|
||||
from redis.exceptions import LockError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
import reflector.auth as auth
|
||||
from reflector.db import get_database
|
||||
from reflector.db import get_session
|
||||
from reflector.db.calendar_events import calendar_events_controller
|
||||
from reflector.db.meetings import meetings_controller
|
||||
from reflector.db.rooms import rooms_controller
|
||||
from reflector.redis_cache import RedisAsyncLock
|
||||
from reflector.services.ics_sync import ics_sync_service
|
||||
from reflector.settings import settings
|
||||
from reflector.whereby import create_meeting, upload_logo
|
||||
from reflector.worker.webhook import test_webhook
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def parse_datetime_with_timezone(iso_string: str) -> datetime:
|
||||
"""Parse ISO datetime string and ensure timezone awareness (defaults to UTC if naive)."""
|
||||
dt = datetime.fromisoformat(iso_string)
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt
|
||||
|
||||
|
||||
class Room(BaseModel):
|
||||
id: str
|
||||
@@ -43,6 +37,11 @@ class Room(BaseModel):
|
||||
recording_type: str
|
||||
recording_trigger: str
|
||||
is_shared: bool
|
||||
ics_url: Optional[str] = None
|
||||
ics_fetch_interval: int = 300
|
||||
ics_enabled: bool = False
|
||||
ics_last_sync: Optional[datetime] = None
|
||||
ics_last_etag: Optional[str] = None
|
||||
|
||||
|
||||
class RoomDetails(Room):
|
||||
@@ -54,10 +53,22 @@ class Meeting(BaseModel):
|
||||
id: str
|
||||
room_name: str
|
||||
room_url: str
|
||||
# TODO it's not always present, | None
|
||||
host_room_url: str
|
||||
start_date: datetime
|
||||
end_date: datetime
|
||||
user_id: str | None = None
|
||||
room_id: str | None = None
|
||||
is_locked: bool = False
|
||||
room_mode: Literal["normal", "group"] = "normal"
|
||||
recording_type: Literal["none", "local", "cloud"] = "cloud"
|
||||
recording_trigger: Literal[
|
||||
"none", "prompt", "automatic", "automatic-2nd-participant"
|
||||
] = "automatic-2nd-participant"
|
||||
num_clients: int = 0
|
||||
is_active: bool = True
|
||||
calendar_event_id: str | None = None
|
||||
calendar_metadata: dict[str, Any] | None = None
|
||||
|
||||
|
||||
class CreateRoom(BaseModel):
|
||||
@@ -72,20 +83,30 @@ class CreateRoom(BaseModel):
|
||||
is_shared: bool
|
||||
webhook_url: str
|
||||
webhook_secret: str
|
||||
ics_url: Optional[str] = None
|
||||
ics_fetch_interval: int = 300
|
||||
ics_enabled: bool = False
|
||||
|
||||
|
||||
class UpdateRoom(BaseModel):
|
||||
name: str
|
||||
zulip_auto_post: bool
|
||||
zulip_stream: str
|
||||
zulip_topic: str
|
||||
is_locked: bool
|
||||
room_mode: str
|
||||
recording_type: str
|
||||
recording_trigger: str
|
||||
is_shared: bool
|
||||
webhook_url: str
|
||||
webhook_secret: str
|
||||
name: Optional[str] = None
|
||||
zulip_auto_post: Optional[bool] = None
|
||||
zulip_stream: Optional[str] = None
|
||||
zulip_topic: Optional[str] = None
|
||||
is_locked: Optional[bool] = None
|
||||
room_mode: Optional[str] = None
|
||||
recording_type: Optional[str] = None
|
||||
recording_trigger: Optional[str] = None
|
||||
is_shared: Optional[bool] = None
|
||||
webhook_url: Optional[str] = None
|
||||
webhook_secret: Optional[str] = None
|
||||
ics_url: Optional[str] = None
|
||||
ics_fetch_interval: Optional[int] = None
|
||||
ics_enabled: Optional[bool] = None
|
||||
|
||||
|
||||
class CreateRoomMeeting(BaseModel):
|
||||
allow_duplicated: Optional[bool] = False
|
||||
|
||||
|
||||
class DeletionStatus(BaseModel):
|
||||
@@ -100,43 +121,123 @@ class WebhookTestResult(BaseModel):
|
||||
response_preview: str | None = None
|
||||
|
||||
|
||||
class ICSStatus(BaseModel):
|
||||
status: Literal["enabled", "disabled"]
|
||||
last_sync: Optional[datetime] = None
|
||||
next_sync: Optional[datetime] = None
|
||||
last_etag: Optional[str] = None
|
||||
events_count: int = 0
|
||||
|
||||
|
||||
class SyncStatus(str, Enum):
|
||||
success = "success"
|
||||
unchanged = "unchanged"
|
||||
error = "error"
|
||||
skipped = "skipped"
|
||||
|
||||
|
||||
class ICSSyncResult(BaseModel):
|
||||
status: SyncStatus
|
||||
hash: Optional[str] = None
|
||||
events_found: int = 0
|
||||
total_events: int = 0
|
||||
events_created: int = 0
|
||||
events_updated: int = 0
|
||||
events_deleted: int = 0
|
||||
error: Optional[str] = None
|
||||
reason: Optional[str] = None
|
||||
|
||||
|
||||
class CalendarEventResponse(BaseModel):
|
||||
id: str
|
||||
room_id: str
|
||||
ics_uid: str
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
start_time: datetime
|
||||
end_time: datetime
|
||||
attendees: Optional[list[dict]] = None
|
||||
location: Optional[str] = None
|
||||
last_synced: datetime
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def parse_datetime_with_timezone(iso_string: str) -> datetime:
|
||||
"""Parse ISO datetime string and ensure timezone awareness (defaults to UTC if naive)."""
|
||||
dt = datetime.fromisoformat(iso_string)
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt
|
||||
|
||||
|
||||
@router.get("/rooms", response_model=Page[RoomDetails])
|
||||
async def rooms_list(
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> list[RoomDetails]:
|
||||
if not user and not settings.PUBLIC_MODE:
|
||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
||||
|
||||
user_id = user["sub"] if user else None
|
||||
|
||||
return await apaginate(
|
||||
get_database(),
|
||||
await rooms_controller.get_all(
|
||||
user_id=user_id, order_by="-created_at", return_query=True
|
||||
),
|
||||
query = await rooms_controller.get_all(
|
||||
session, user_id=user_id, order_by="-created_at", return_query=True
|
||||
)
|
||||
return await paginate(session, query)
|
||||
|
||||
|
||||
@router.get("/rooms/{room_id}", response_model=RoomDetails)
|
||||
async def rooms_get(
|
||||
room_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
room = await rooms_controller.get_by_id_for_http(room_id, user_id=user_id)
|
||||
room = await rooms_controller.get_by_id_for_http(session, room_id, user_id=user_id)
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
return room
|
||||
|
||||
|
||||
@router.get("/rooms/name/{room_name}", response_model=RoomDetails)
|
||||
async def rooms_get_by_name(
|
||||
room_name: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
room = await rooms_controller.get_by_name(session, room_name)
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
|
||||
# Convert to RoomDetails format (add webhook fields if user is owner)
|
||||
room_dict = room.__dict__.copy()
|
||||
if user_id == room.user_id:
|
||||
# User is owner, include webhook details if available
|
||||
room_dict["webhook_url"] = getattr(room, "webhook_url", None)
|
||||
room_dict["webhook_secret"] = getattr(room, "webhook_secret", None)
|
||||
else:
|
||||
# Non-owner, hide webhook details
|
||||
room_dict["webhook_url"] = None
|
||||
room_dict["webhook_secret"] = None
|
||||
|
||||
return RoomDetails(**room_dict)
|
||||
|
||||
|
||||
@router.post("/rooms", response_model=Room)
|
||||
async def rooms_create(
|
||||
room: CreateRoom,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
|
||||
return await rooms_controller.add(
|
||||
session,
|
||||
name=room.name,
|
||||
user_id=user_id,
|
||||
zulip_auto_post=room.zulip_auto_post,
|
||||
@@ -149,6 +250,9 @@ async def rooms_create(
|
||||
is_shared=room.is_shared,
|
||||
webhook_url=room.webhook_url,
|
||||
webhook_secret=room.webhook_secret,
|
||||
ics_url=room.ics_url,
|
||||
ics_fetch_interval=room.ics_fetch_interval,
|
||||
ics_enabled=room.ics_enabled,
|
||||
)
|
||||
|
||||
|
||||
@@ -157,13 +261,14 @@ async def rooms_update(
|
||||
room_id: str,
|
||||
info: UpdateRoom,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
room = await rooms_controller.get_by_id_for_http(room_id, user_id=user_id)
|
||||
room = await rooms_controller.get_by_id_for_http(session, room_id, user_id=user_id)
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
values = info.dict(exclude_unset=True)
|
||||
await rooms_controller.update(room, values)
|
||||
await rooms_controller.update(session, room, values)
|
||||
return room
|
||||
|
||||
|
||||
@@ -171,69 +276,67 @@ async def rooms_update(
|
||||
async def rooms_delete(
|
||||
room_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
room = await rooms_controller.get_by_id(room_id, user_id=user_id)
|
||||
room = await rooms_controller.get_by_id(session, room_id, user_id=user_id)
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
await rooms_controller.remove_by_id(room.id, user_id=user_id)
|
||||
await rooms_controller.remove_by_id(session, room.id, user_id=user_id)
|
||||
return DeletionStatus(status="ok")
|
||||
|
||||
|
||||
@router.post("/rooms/{room_name}/meeting", response_model=Meeting)
|
||||
async def rooms_create_meeting(
|
||||
room_name: str,
|
||||
info: CreateRoomMeeting,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
room = await rooms_controller.get_by_name(room_name)
|
||||
room = await rooms_controller.get_by_name(session, room_name)
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
|
||||
current_time = datetime.now(timezone.utc)
|
||||
meeting = await meetings_controller.get_active(room=room, current_time=current_time)
|
||||
try:
|
||||
async with RedisAsyncLock(
|
||||
f"create_meeting:{room_name}",
|
||||
timeout=30,
|
||||
extend_interval=10,
|
||||
blocking_timeout=5.0,
|
||||
) as lock:
|
||||
current_time = datetime.now(timezone.utc)
|
||||
|
||||
if meeting is None:
|
||||
end_date = current_time + timedelta(hours=8)
|
||||
meeting = None
|
||||
if not info.allow_duplicated:
|
||||
meeting = await meetings_controller.get_active(
|
||||
session, room=room, current_time=current_time
|
||||
)
|
||||
|
||||
whereby_meeting = await create_meeting("", end_date=end_date, room=room)
|
||||
|
||||
await upload_logo(whereby_meeting["roomName"], "./images/logo.png")
|
||||
|
||||
# Now try to save to database
|
||||
try:
|
||||
meeting = await meetings_controller.create(
|
||||
id=whereby_meeting["meetingId"],
|
||||
room_name=whereby_meeting["roomName"],
|
||||
room_url=whereby_meeting["roomUrl"],
|
||||
host_room_url=whereby_meeting["hostRoomUrl"],
|
||||
start_date=parse_datetime_with_timezone(whereby_meeting["startDate"]),
|
||||
end_date=parse_datetime_with_timezone(whereby_meeting["endDate"]),
|
||||
room=room,
|
||||
)
|
||||
except (asyncpg.exceptions.UniqueViolationError, sqlite3.IntegrityError):
|
||||
# Another request already created a meeting for this room
|
||||
# Log this race condition occurrence
|
||||
logger.warning(
|
||||
"Race condition detected for room %s and meeting %s - fetching existing meeting",
|
||||
room.name,
|
||||
whereby_meeting["meetingId"],
|
||||
)
|
||||
|
||||
# Fetch the meeting that was created by the other request
|
||||
meeting = await meetings_controller.get_active(
|
||||
room=room, current_time=current_time
|
||||
)
|
||||
if meeting is None:
|
||||
# Edge case: meeting was created but expired/deleted between checks
|
||||
logger.error(
|
||||
"Meeting disappeared after race condition for room %s",
|
||||
room.name,
|
||||
exc_info=True,
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=503, detail="Unable to join meeting - please try again"
|
||||
end_date = current_time + timedelta(hours=8)
|
||||
|
||||
whereby_meeting = await create_meeting("", end_date=end_date, room=room)
|
||||
|
||||
await upload_logo(whereby_meeting["roomName"], "./images/logo.png")
|
||||
|
||||
meeting = await meetings_controller.create(
|
||||
session,
|
||||
id=whereby_meeting["meetingId"],
|
||||
room_name=whereby_meeting["roomName"],
|
||||
room_url=whereby_meeting["roomUrl"],
|
||||
host_room_url=whereby_meeting["hostRoomUrl"],
|
||||
start_date=parse_datetime_with_timezone(
|
||||
whereby_meeting["startDate"]
|
||||
),
|
||||
end_date=parse_datetime_with_timezone(whereby_meeting["endDate"]),
|
||||
room=room,
|
||||
)
|
||||
except LockError:
|
||||
logger.warning("Failed to acquire lock for room %s within timeout", room_name)
|
||||
raise HTTPException(
|
||||
status_code=503, detail="Meeting creation in progress, please try again"
|
||||
)
|
||||
|
||||
if user_id != room.user_id:
|
||||
meeting.host_room_url = ""
|
||||
@@ -245,11 +348,12 @@ async def rooms_create_meeting(
|
||||
async def rooms_test_webhook(
|
||||
room_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""Test webhook configuration by sending a sample payload."""
|
||||
user_id = user["sub"] if user else None
|
||||
|
||||
room = await rooms_controller.get_by_id(room_id)
|
||||
room = await rooms_controller.get_by_id(session, room_id)
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
|
||||
@@ -260,3 +364,209 @@ async def rooms_test_webhook(
|
||||
|
||||
result = await test_webhook(room_id)
|
||||
return WebhookTestResult(**result)
|
||||
|
||||
|
||||
@router.post("/rooms/{room_name}/ics/sync", response_model=ICSSyncResult)
|
||||
async def rooms_sync_ics(
|
||||
room_name: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
room = await rooms_controller.get_by_name(session, room_name)
|
||||
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
|
||||
if user_id != room.user_id:
|
||||
raise HTTPException(
|
||||
status_code=403, detail="Only room owner can trigger ICS sync"
|
||||
)
|
||||
|
||||
if not room.ics_enabled or not room.ics_url:
|
||||
raise HTTPException(status_code=400, detail="ICS not configured for this room")
|
||||
|
||||
result = await ics_sync_service.sync_room_calendar(session, room)
|
||||
|
||||
if result["status"] == "error":
|
||||
raise HTTPException(
|
||||
status_code=500, detail=result.get("error", "Unknown error")
|
||||
)
|
||||
|
||||
return ICSSyncResult(**result)
|
||||
|
||||
|
||||
@router.get("/rooms/{room_name}/ics/status", response_model=ICSStatus)
|
||||
async def rooms_ics_status(
|
||||
room_name: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
room = await rooms_controller.get_by_name(session, room_name)
|
||||
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
|
||||
if user_id != room.user_id:
|
||||
raise HTTPException(
|
||||
status_code=403, detail="Only room owner can view ICS status"
|
||||
)
|
||||
|
||||
next_sync = None
|
||||
if room.ics_enabled and room.ics_last_sync:
|
||||
next_sync = room.ics_last_sync + timedelta(seconds=room.ics_fetch_interval)
|
||||
|
||||
events = await calendar_events_controller.get_by_room(
|
||||
session, room.id, include_deleted=False
|
||||
)
|
||||
|
||||
return ICSStatus(
|
||||
status="enabled" if room.ics_enabled else "disabled",
|
||||
last_sync=room.ics_last_sync,
|
||||
next_sync=next_sync,
|
||||
last_etag=room.ics_last_etag,
|
||||
events_count=len(events),
|
||||
)
|
||||
|
||||
|
||||
@router.get("/rooms/{room_name}/meetings", response_model=list[CalendarEventResponse])
|
||||
async def rooms_list_meetings(
|
||||
room_name: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
room = await rooms_controller.get_by_name(session, room_name)
|
||||
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
|
||||
events = await calendar_events_controller.get_by_room(
|
||||
session, room.id, include_deleted=False
|
||||
)
|
||||
|
||||
if user_id != room.user_id:
|
||||
for event in events:
|
||||
event.description = None
|
||||
event.attendees = None
|
||||
|
||||
return events
|
||||
|
||||
|
||||
@router.get(
|
||||
"/rooms/{room_name}/meetings/upcoming", response_model=list[CalendarEventResponse]
|
||||
)
|
||||
async def rooms_list_upcoming_meetings(
|
||||
room_name: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
minutes_ahead: int = 120,
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
room = await rooms_controller.get_by_name(session, room_name)
|
||||
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
|
||||
events = await calendar_events_controller.get_upcoming(
|
||||
session, room.id, minutes_ahead=minutes_ahead
|
||||
)
|
||||
|
||||
if user_id != room.user_id:
|
||||
for event in events:
|
||||
event.description = None
|
||||
event.attendees = None
|
||||
|
||||
return events
|
||||
|
||||
|
||||
@router.get("/rooms/{room_name}/meetings/active", response_model=list[Meeting])
|
||||
async def rooms_list_active_meetings(
|
||||
room_name: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
room = await rooms_controller.get_by_name(session, room_name)
|
||||
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
|
||||
current_time = datetime.now(timezone.utc)
|
||||
meetings = await meetings_controller.get_all_active_for_room(
|
||||
session, room=room, current_time=current_time
|
||||
)
|
||||
|
||||
# Hide host URLs from non-owners
|
||||
if user_id != room.user_id:
|
||||
for meeting in meetings:
|
||||
meeting.host_room_url = ""
|
||||
|
||||
return meetings
|
||||
|
||||
|
||||
@router.get("/rooms/{room_name}/meetings/{meeting_id}", response_model=Meeting)
|
||||
async def rooms_get_meeting(
|
||||
room_name: str,
|
||||
meeting_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""Get a single meeting by ID within a specific room."""
|
||||
user_id = user["sub"] if user else None
|
||||
|
||||
room = await rooms_controller.get_by_name(session, room_name)
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
|
||||
meeting = await meetings_controller.get_by_id(session, meeting_id)
|
||||
if not meeting:
|
||||
raise HTTPException(status_code=404, detail="Meeting not found")
|
||||
|
||||
if meeting.room_id != room.id:
|
||||
raise HTTPException(
|
||||
status_code=403, detail="Meeting does not belong to this room"
|
||||
)
|
||||
|
||||
if user_id != room.user_id and not room.is_shared:
|
||||
meeting.host_room_url = ""
|
||||
|
||||
return meeting
|
||||
|
||||
|
||||
@router.post("/rooms/{room_name}/meetings/{meeting_id}/join", response_model=Meeting)
|
||||
async def rooms_join_meeting(
|
||||
room_name: str,
|
||||
meeting_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
room = await rooms_controller.get_by_name(session, room_name)
|
||||
|
||||
if not room:
|
||||
raise HTTPException(status_code=404, detail="Room not found")
|
||||
|
||||
meeting = await meetings_controller.get_by_id(session, meeting_id)
|
||||
|
||||
if not meeting:
|
||||
raise HTTPException(status_code=404, detail="Meeting not found")
|
||||
|
||||
if meeting.room_id != room.id:
|
||||
raise HTTPException(
|
||||
status_code=403, detail="Meeting does not belong to this room"
|
||||
)
|
||||
|
||||
if not meeting.is_active:
|
||||
raise HTTPException(status_code=400, detail="Meeting is not active")
|
||||
|
||||
current_time = datetime.now(timezone.utc)
|
||||
if meeting.end_date <= current_time:
|
||||
raise HTTPException(status_code=400, detail="Meeting has ended")
|
||||
|
||||
# Hide host URL from non-owners
|
||||
if user_id != room.user_id:
|
||||
meeting.host_room_url = ""
|
||||
|
||||
return meeting
|
||||
|
||||
@@ -3,12 +3,13 @@ from typing import Annotated, Literal, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from fastapi_pagination import Page
|
||||
from fastapi_pagination.ext.databases import apaginate
|
||||
from fastapi_pagination.ext.sqlalchemy import paginate
|
||||
from jose import jwt
|
||||
from pydantic import BaseModel, Field, constr, field_serializer
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
import reflector.auth as auth
|
||||
from reflector.db import get_database
|
||||
from reflector.db import get_session
|
||||
from reflector.db.meetings import meetings_controller
|
||||
from reflector.db.rooms import rooms_controller
|
||||
from reflector.db.search import (
|
||||
@@ -149,24 +150,25 @@ async def transcripts_list(
|
||||
source_kind: SourceKind | None = None,
|
||||
room_id: str | None = None,
|
||||
search_term: str | None = None,
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
if not user and not settings.PUBLIC_MODE:
|
||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
||||
|
||||
user_id = user["sub"] if user else None
|
||||
|
||||
return await apaginate(
|
||||
get_database(),
|
||||
await transcripts_controller.get_all(
|
||||
user_id=user_id,
|
||||
source_kind=SourceKind(source_kind) if source_kind else None,
|
||||
room_id=room_id,
|
||||
search_term=search_term,
|
||||
order_by="-created_at",
|
||||
return_query=True,
|
||||
),
|
||||
query = await transcripts_controller.get_all(
|
||||
session,
|
||||
user_id=user_id,
|
||||
source_kind=SourceKind(source_kind) if source_kind else None,
|
||||
room_id=room_id,
|
||||
search_term=search_term,
|
||||
order_by="-created_at",
|
||||
return_query=True,
|
||||
)
|
||||
|
||||
return await paginate(session, query)
|
||||
|
||||
|
||||
@router.get("/transcripts/search", response_model=SearchResponse)
|
||||
async def transcripts_search(
|
||||
@@ -178,6 +180,7 @@ async def transcripts_search(
|
||||
user: Annotated[
|
||||
Optional[auth.UserInfo], Depends(auth.current_user_optional)
|
||||
] = None,
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""
|
||||
Full-text search across transcript titles and content.
|
||||
@@ -196,7 +199,7 @@ async def transcripts_search(
|
||||
source_kind=source_kind,
|
||||
)
|
||||
|
||||
results, total = await search_controller.search_transcripts(search_params)
|
||||
results, total = await search_controller.search_transcripts(session, search_params)
|
||||
|
||||
return SearchResponse(
|
||||
results=results,
|
||||
@@ -211,9 +214,11 @@ async def transcripts_search(
|
||||
async def transcripts_create(
|
||||
info: CreateTranscript,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
return await transcripts_controller.add(
|
||||
session,
|
||||
info.name,
|
||||
source_kind=info.source_kind or SourceKind.LIVE,
|
||||
source_language=info.source_language,
|
||||
@@ -333,10 +338,11 @@ class GetTranscriptTopicWithWordsPerSpeaker(GetTranscriptTopic):
|
||||
async def transcript_get(
|
||||
transcript_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
return await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
|
||||
|
||||
@@ -345,13 +351,16 @@ async def transcript_update(
|
||||
transcript_id: str,
|
||||
info: UpdateTranscript,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
values = info.dict(exclude_unset=True)
|
||||
updated_transcript = await transcripts_controller.update(transcript, values)
|
||||
updated_transcript = await transcripts_controller.update(
|
||||
session, transcript, values
|
||||
)
|
||||
return updated_transcript
|
||||
|
||||
|
||||
@@ -359,19 +368,20 @@ async def transcript_update(
|
||||
async def transcript_delete(
|
||||
transcript_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id(transcript_id)
|
||||
transcript = await transcripts_controller.get_by_id(session, transcript_id)
|
||||
if not transcript:
|
||||
raise HTTPException(status_code=404, detail="Transcript not found")
|
||||
|
||||
if transcript.meeting_id:
|
||||
meeting = await meetings_controller.get_by_id(transcript.meeting_id)
|
||||
room = await rooms_controller.get_by_id(meeting.room_id)
|
||||
meeting = await meetings_controller.get_by_id(session, transcript.meeting_id)
|
||||
room = await rooms_controller.get_by_id(session, meeting.room_id)
|
||||
if room.is_shared:
|
||||
user_id = None
|
||||
|
||||
await transcripts_controller.remove_by_id(transcript.id, user_id=user_id)
|
||||
await transcripts_controller.remove_by_id(session, transcript.id, user_id=user_id)
|
||||
return DeletionStatus(status="ok")
|
||||
|
||||
|
||||
@@ -382,10 +392,11 @@ async def transcript_delete(
|
||||
async def transcript_get_topics(
|
||||
transcript_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
|
||||
# convert to GetTranscriptTopic
|
||||
@@ -401,10 +412,11 @@ async def transcript_get_topics(
|
||||
async def transcript_get_topics_with_words(
|
||||
transcript_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
|
||||
# convert to GetTranscriptTopicWithWords
|
||||
@@ -422,10 +434,11 @@ async def transcript_get_topics_with_words_per_speaker(
|
||||
transcript_id: str,
|
||||
topic_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
|
||||
# get the topic from the transcript
|
||||
@@ -444,10 +457,11 @@ async def transcript_post_to_zulip(
|
||||
topic: str,
|
||||
include_topics: bool,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
if not transcript:
|
||||
raise HTTPException(status_code=404, detail="Transcript not found")
|
||||
@@ -467,5 +481,5 @@ async def transcript_post_to_zulip(
|
||||
if not message_updated:
|
||||
response = await send_message_to_zulip(stream, topic, content)
|
||||
await transcripts_controller.update(
|
||||
transcript, {"zulip_message_id": response["id"]}
|
||||
session, transcript, {"zulip_message_id": response["id"]}
|
||||
)
|
||||
|
||||
@@ -9,8 +9,10 @@ from typing import Annotated, Optional
|
||||
import httpx
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, Response, status
|
||||
from jose import jwt
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
import reflector.auth as auth
|
||||
from reflector.db import get_session
|
||||
from reflector.db.transcripts import AudioWaveform, transcripts_controller
|
||||
from reflector.settings import settings
|
||||
from reflector.views.transcripts import ALGORITHM
|
||||
@@ -32,6 +34,7 @@ async def transcript_get_audio_mp3(
|
||||
request: Request,
|
||||
transcript_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
token: str | None = None,
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
@@ -48,7 +51,7 @@ async def transcript_get_audio_mp3(
|
||||
raise unauthorized_exception
|
||||
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
|
||||
if transcript.audio_location == "storage":
|
||||
@@ -86,7 +89,7 @@ async def transcript_get_audio_mp3(
|
||||
|
||||
return range_requests_response(
|
||||
request,
|
||||
transcript.audio_mp3_filename,
|
||||
transcript.audio_mp3_filename.as_posix(),
|
||||
content_type="audio/mpeg",
|
||||
content_disposition=f"attachment; filename={filename}",
|
||||
)
|
||||
@@ -96,13 +99,18 @@ async def transcript_get_audio_mp3(
|
||||
async def transcript_get_audio_waveform(
|
||||
transcript_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> AudioWaveform:
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
|
||||
if not transcript.audio_waveform_filename.exists():
|
||||
raise HTTPException(status_code=404, detail="Audio not found")
|
||||
|
||||
return transcript.audio_waveform
|
||||
audio_waveform = transcript.audio_waveform
|
||||
if not audio_waveform:
|
||||
raise HTTPException(status_code=404, detail="Audio waveform not found")
|
||||
|
||||
return audio_waveform
|
||||
|
||||
@@ -8,8 +8,10 @@ from typing import Annotated, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
import reflector.auth as auth
|
||||
from reflector.db import get_session
|
||||
from reflector.db.transcripts import TranscriptParticipant, transcripts_controller
|
||||
from reflector.views.types import DeletionStatus
|
||||
|
||||
@@ -37,10 +39,11 @@ class UpdateParticipant(BaseModel):
|
||||
async def transcript_get_participants(
|
||||
transcript_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> list[Participant]:
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
|
||||
if transcript.participants is None:
|
||||
@@ -57,10 +60,11 @@ async def transcript_add_participant(
|
||||
transcript_id: str,
|
||||
participant: CreateParticipant,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> Participant:
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
|
||||
# ensure the speaker is unique
|
||||
@@ -73,7 +77,7 @@ async def transcript_add_participant(
|
||||
)
|
||||
|
||||
obj = await transcripts_controller.upsert_participant(
|
||||
transcript, TranscriptParticipant(**participant.dict())
|
||||
session, transcript, TranscriptParticipant(**participant.dict())
|
||||
)
|
||||
return Participant.model_validate(obj)
|
||||
|
||||
@@ -83,10 +87,11 @@ async def transcript_get_participant(
|
||||
transcript_id: str,
|
||||
participant_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> Participant:
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
|
||||
for p in transcript.participants:
|
||||
@@ -102,10 +107,11 @@ async def transcript_update_participant(
|
||||
participant_id: str,
|
||||
participant: UpdateParticipant,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> Participant:
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
|
||||
# ensure the speaker is unique
|
||||
@@ -130,7 +136,7 @@ async def transcript_update_participant(
|
||||
fields = participant.dict(exclude_unset=True)
|
||||
obj = obj.copy(update=fields)
|
||||
|
||||
await transcripts_controller.upsert_participant(transcript, obj)
|
||||
await transcripts_controller.upsert_participant(session, transcript, obj)
|
||||
return Participant.model_validate(obj)
|
||||
|
||||
|
||||
@@ -139,10 +145,11 @@ async def transcript_delete_participant(
|
||||
transcript_id: str,
|
||||
participant_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> DeletionStatus:
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
await transcripts_controller.delete_participant(transcript, participant_id)
|
||||
await transcripts_controller.delete_participant(session, transcript, participant_id)
|
||||
return DeletionStatus(status="ok")
|
||||
|
||||
@@ -3,8 +3,10 @@ from typing import Annotated, Optional
|
||||
import celery
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
import reflector.auth as auth
|
||||
from reflector.db import get_session
|
||||
from reflector.db.transcripts import transcripts_controller
|
||||
from reflector.pipelines.main_file_pipeline import task_pipeline_file_process
|
||||
|
||||
@@ -19,10 +21,11 @@ class ProcessStatus(BaseModel):
|
||||
async def transcript_process(
|
||||
transcript_id: str,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
|
||||
if transcript.locked:
|
||||
|
||||
@@ -8,8 +8,10 @@ from typing import Annotated, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
import reflector.auth as auth
|
||||
from reflector.db import get_session
|
||||
from reflector.db.transcripts import transcripts_controller
|
||||
|
||||
router = APIRouter()
|
||||
@@ -36,10 +38,11 @@ async def transcript_assign_speaker(
|
||||
transcript_id: str,
|
||||
assignment: SpeakerAssignment,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> SpeakerAssignmentStatus:
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
|
||||
if not transcript:
|
||||
@@ -79,7 +82,9 @@ async def transcript_assign_speaker(
|
||||
# if the participant does not have a speaker, create one
|
||||
if participant.speaker is None:
|
||||
participant.speaker = transcript.find_empty_speaker()
|
||||
await transcripts_controller.upsert_participant(transcript, participant)
|
||||
await transcripts_controller.upsert_participant(
|
||||
session, transcript, participant
|
||||
)
|
||||
|
||||
speaker = participant.speaker
|
||||
|
||||
@@ -100,6 +105,7 @@ async def transcript_assign_speaker(
|
||||
for topic in changed_topics:
|
||||
transcript.upsert_topic(topic)
|
||||
await transcripts_controller.update(
|
||||
session,
|
||||
transcript,
|
||||
{
|
||||
"topics": transcript.topics_dump(),
|
||||
@@ -114,10 +120,11 @@ async def transcript_merge_speaker(
|
||||
transcript_id: str,
|
||||
merge: SpeakerMerge,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
) -> SpeakerAssignmentStatus:
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
|
||||
if not transcript:
|
||||
@@ -163,6 +170,7 @@ async def transcript_merge_speaker(
|
||||
for topic in changed_topics:
|
||||
transcript.upsert_topic(topic)
|
||||
await transcripts_controller.update(
|
||||
session,
|
||||
transcript,
|
||||
{
|
||||
"topics": transcript.topics_dump(),
|
||||
|
||||
@@ -3,8 +3,10 @@ from typing import Annotated, Optional
|
||||
import av
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
import reflector.auth as auth
|
||||
from reflector.db import get_session
|
||||
from reflector.db.transcripts import transcripts_controller
|
||||
from reflector.pipelines.main_file_pipeline import task_pipeline_file_process
|
||||
|
||||
@@ -22,10 +24,11 @@ async def transcript_record_upload(
|
||||
total_chunks: int,
|
||||
chunk: UploadFile,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
|
||||
if transcript.locked:
|
||||
@@ -89,7 +92,7 @@ async def transcript_record_upload(
|
||||
container.close()
|
||||
|
||||
# set the status to "uploaded"
|
||||
await transcripts_controller.update(transcript, {"status": "uploaded"})
|
||||
await transcripts_controller.update(session, transcript, {"status": "uploaded"})
|
||||
|
||||
# launch a background task to process the file
|
||||
task_pipeline_file_process.delay(transcript_id=transcript_id)
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
from typing import Annotated, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
import reflector.auth as auth
|
||||
from reflector.db import get_session
|
||||
from reflector.db.transcripts import transcripts_controller
|
||||
|
||||
from .rtc_offer import RtcOffer, rtc_offer_base
|
||||
@@ -16,10 +18,11 @@ async def transcript_record_webrtc(
|
||||
params: RtcOffer,
|
||||
request: Request,
|
||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id_for_http(
|
||||
transcript_id, user_id=user_id
|
||||
session, transcript_id, user_id=user_id
|
||||
)
|
||||
|
||||
if transcript.locked:
|
||||
|
||||
@@ -24,7 +24,7 @@ async def transcript_events_websocket(
|
||||
# user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||
):
|
||||
# user_id = user["sub"] if user else None
|
||||
transcript = await transcripts_controller.get_by_id(transcript_id)
|
||||
transcript = await transcripts_controller.get_by_id(session, transcript_id)
|
||||
if not transcript:
|
||||
raise HTTPException(status_code=404, detail="Transcript not found")
|
||||
|
||||
|
||||
@@ -68,8 +68,7 @@ async def whereby_webhook(event: WherebyWebhookEvent, request: Request):
|
||||
raise HTTPException(status_code=404, detail="Meeting not found")
|
||||
|
||||
if event.type in ["room.client.joined", "room.client.left"]:
|
||||
await meetings_controller.update_meeting(
|
||||
meeting.id, num_clients=event.data["numClients"]
|
||||
)
|
||||
update_data = {"num_clients": event.data["numClients"]}
|
||||
await meetings_controller.update_meeting(meeting.id, **update_data)
|
||||
|
||||
return {"status": "ok"}
|
||||
|
||||
@@ -20,6 +20,7 @@ else:
|
||||
"reflector.worker.healthcheck",
|
||||
"reflector.worker.process",
|
||||
"reflector.worker.cleanup",
|
||||
"reflector.worker.ics_sync",
|
||||
]
|
||||
)
|
||||
|
||||
@@ -37,6 +38,14 @@ else:
|
||||
"task": "reflector.worker.process.reprocess_failed_recordings",
|
||||
"schedule": crontab(hour=5, minute=0), # Midnight EST
|
||||
},
|
||||
"sync_all_ics_calendars": {
|
||||
"task": "reflector.worker.ics_sync.sync_all_ics_calendars",
|
||||
"schedule": 60.0, # Run every minute to check which rooms need sync
|
||||
},
|
||||
"create_upcoming_meetings": {
|
||||
"task": "reflector.worker.ics_sync.create_upcoming_meetings",
|
||||
"schedule": 30.0, # Run every 30 seconds to create upcoming meetings
|
||||
},
|
||||
}
|
||||
|
||||
if settings.PUBLIC_MODE:
|
||||
|
||||
@@ -5,22 +5,21 @@ Deletes old anonymous transcripts and their associated meetings/recordings.
|
||||
Transcripts are the main entry point - any associated data is also removed.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import TypedDict
|
||||
|
||||
import structlog
|
||||
from celery import shared_task
|
||||
from databases import Database
|
||||
from pydantic.types import PositiveInt
|
||||
from sqlalchemy import delete, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from reflector.asynctask import asynctask
|
||||
from reflector.db import get_database
|
||||
from reflector.db.meetings import meetings
|
||||
from reflector.db.recordings import recordings
|
||||
from reflector.db.transcripts import transcripts, transcripts_controller
|
||||
from reflector.db.base import MeetingModel, RecordingModel, TranscriptModel
|
||||
from reflector.db.transcripts import transcripts_controller
|
||||
from reflector.settings import settings
|
||||
from reflector.storage import get_recordings_storage
|
||||
from reflector.worker.session_decorator import with_session
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
@@ -35,51 +34,49 @@ class CleanupStats(TypedDict):
|
||||
|
||||
|
||||
async def delete_single_transcript(
|
||||
db: Database, transcript_data: dict, stats: CleanupStats
|
||||
session: AsyncSession, transcript_data: dict, stats: CleanupStats
|
||||
):
|
||||
transcript_id = transcript_data["id"]
|
||||
meeting_id = transcript_data["meeting_id"]
|
||||
recording_id = transcript_data["recording_id"]
|
||||
|
||||
try:
|
||||
async with db.transaction(isolation="serializable"):
|
||||
if meeting_id:
|
||||
await db.execute(meetings.delete().where(meetings.c.id == meeting_id))
|
||||
stats["meetings_deleted"] += 1
|
||||
logger.info("Deleted associated meeting", meeting_id=meeting_id)
|
||||
|
||||
if recording_id:
|
||||
recording = await db.fetch_one(
|
||||
recordings.select().where(recordings.c.id == recording_id)
|
||||
)
|
||||
if recording:
|
||||
try:
|
||||
await get_recordings_storage().delete_file(
|
||||
recording["object_key"]
|
||||
)
|
||||
except Exception as storage_error:
|
||||
logger.warning(
|
||||
"Failed to delete recording from storage",
|
||||
recording_id=recording_id,
|
||||
object_key=recording["object_key"],
|
||||
error=str(storage_error),
|
||||
)
|
||||
|
||||
await db.execute(
|
||||
recordings.delete().where(recordings.c.id == recording_id)
|
||||
)
|
||||
stats["recordings_deleted"] += 1
|
||||
logger.info(
|
||||
"Deleted associated recording", recording_id=recording_id
|
||||
)
|
||||
|
||||
await transcripts_controller.remove_by_id(transcript_id)
|
||||
stats["transcripts_deleted"] += 1
|
||||
logger.info(
|
||||
"Deleted transcript",
|
||||
transcript_id=transcript_id,
|
||||
created_at=transcript_data["created_at"].isoformat(),
|
||||
if meeting_id:
|
||||
await session.execute(
|
||||
delete(MeetingModel).where(MeetingModel.id == meeting_id)
|
||||
)
|
||||
stats["meetings_deleted"] += 1
|
||||
logger.info("Deleted associated meeting", meeting_id=meeting_id)
|
||||
|
||||
if recording_id:
|
||||
result = await session.execute(
|
||||
select(RecordingModel).where(RecordingModel.id == recording_id)
|
||||
)
|
||||
recording = result.mappings().first()
|
||||
if recording:
|
||||
try:
|
||||
await get_recordings_storage().delete_file(recording["object_key"])
|
||||
except Exception as storage_error:
|
||||
logger.warning(
|
||||
"Failed to delete recording from storage",
|
||||
recording_id=recording_id,
|
||||
object_key=recording["object_key"],
|
||||
error=str(storage_error),
|
||||
)
|
||||
|
||||
await session.execute(
|
||||
delete(RecordingModel).where(RecordingModel.id == recording_id)
|
||||
)
|
||||
stats["recordings_deleted"] += 1
|
||||
logger.info("Deleted associated recording", recording_id=recording_id)
|
||||
|
||||
await transcripts_controller.remove_by_id(session, transcript_id)
|
||||
stats["transcripts_deleted"] += 1
|
||||
logger.info(
|
||||
"Deleted transcript",
|
||||
transcript_id=transcript_id,
|
||||
created_at=transcript_data["created_at"].isoformat(),
|
||||
)
|
||||
except Exception as e:
|
||||
error_msg = f"Failed to delete transcript {transcript_id}: {str(e)}"
|
||||
logger.error(error_msg, exc_info=e)
|
||||
@@ -87,18 +84,30 @@ async def delete_single_transcript(
|
||||
|
||||
|
||||
async def cleanup_old_transcripts(
|
||||
db: Database, cutoff_date: datetime, stats: CleanupStats
|
||||
session: AsyncSession, cutoff_date: datetime, stats: CleanupStats
|
||||
):
|
||||
"""Delete old anonymous transcripts and their associated recordings/meetings."""
|
||||
query = transcripts.select().where(
|
||||
(transcripts.c.created_at < cutoff_date) & (transcripts.c.user_id.is_(None))
|
||||
query = select(
|
||||
TranscriptModel.id,
|
||||
TranscriptModel.meeting_id,
|
||||
TranscriptModel.recording_id,
|
||||
TranscriptModel.created_at,
|
||||
).where(
|
||||
(TranscriptModel.created_at < cutoff_date) & (TranscriptModel.user_id.is_(None))
|
||||
)
|
||||
old_transcripts = await db.fetch_all(query)
|
||||
|
||||
result = await session.execute(query)
|
||||
old_transcripts = result.mappings().all()
|
||||
|
||||
logger.info(f"Found {len(old_transcripts)} old transcripts to delete")
|
||||
|
||||
for transcript_data in old_transcripts:
|
||||
await delete_single_transcript(db, transcript_data, stats)
|
||||
try:
|
||||
await delete_single_transcript(session, transcript_data, stats)
|
||||
except Exception as e:
|
||||
error_msg = f"Failed to delete transcript {transcript_data['id']}: {str(e)}"
|
||||
logger.error(error_msg, exc_info=e)
|
||||
stats["errors"].append(error_msg)
|
||||
|
||||
|
||||
def log_cleanup_results(stats: CleanupStats):
|
||||
@@ -118,6 +127,7 @@ def log_cleanup_results(stats: CleanupStats):
|
||||
|
||||
|
||||
async def cleanup_old_public_data(
|
||||
session: AsyncSession,
|
||||
days: PositiveInt | None = None,
|
||||
) -> CleanupStats | None:
|
||||
if days is None:
|
||||
@@ -140,8 +150,7 @@ async def cleanup_old_public_data(
|
||||
"errors": [],
|
||||
}
|
||||
|
||||
db = get_database()
|
||||
await cleanup_old_transcripts(db, cutoff_date, stats)
|
||||
await cleanup_old_transcripts(session, cutoff_date, stats)
|
||||
|
||||
log_cleanup_results(stats)
|
||||
return stats
|
||||
@@ -152,5 +161,6 @@ async def cleanup_old_public_data(
|
||||
retry_kwargs={"max_retries": 3, "countdown": 300},
|
||||
)
|
||||
@asynctask
|
||||
def cleanup_old_public_data_task(days: int | None = None):
|
||||
asyncio.run(cleanup_old_public_data(days=days))
|
||||
@with_session
|
||||
async def cleanup_old_public_data_task(session: AsyncSession, days: int | None = None):
|
||||
await cleanup_old_public_data(session, days=days)
|
||||
|
||||
186
server/reflector/worker/ics_sync.py
Normal file
186
server/reflector/worker/ics_sync.py
Normal file
@@ -0,0 +1,186 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
import structlog
|
||||
from celery import shared_task
|
||||
from celery.utils.log import get_task_logger
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from reflector.asynctask import asynctask
|
||||
from reflector.db.calendar_events import calendar_events_controller
|
||||
from reflector.db.meetings import meetings_controller
|
||||
from reflector.db.rooms import rooms_controller
|
||||
from reflector.redis_cache import RedisAsyncLock
|
||||
from reflector.services.ics_sync import SyncStatus, ics_sync_service
|
||||
from reflector.whereby import create_meeting, upload_logo
|
||||
from reflector.worker.session_decorator import with_session
|
||||
|
||||
logger = structlog.wrap_logger(get_task_logger(__name__))
|
||||
|
||||
|
||||
@shared_task
|
||||
@asynctask
|
||||
@with_session
|
||||
async def sync_room_ics(session: AsyncSession, room_id: str):
|
||||
try:
|
||||
room = await rooms_controller.get_by_id(session, room_id)
|
||||
if not room:
|
||||
logger.warning("Room not found for ICS sync", room_id=room_id)
|
||||
return
|
||||
|
||||
if not room.ics_enabled or not room.ics_url:
|
||||
logger.debug("ICS not enabled for room", room_id=room_id)
|
||||
return
|
||||
|
||||
logger.info("Starting ICS sync for room", room_id=room_id, room_name=room.name)
|
||||
result = await ics_sync_service.sync_room_calendar(session, room)
|
||||
|
||||
if result["status"] == SyncStatus.SUCCESS:
|
||||
logger.info(
|
||||
"ICS sync completed successfully",
|
||||
room_id=room_id,
|
||||
events_found=result.get("events_found", 0),
|
||||
events_created=result.get("events_created", 0),
|
||||
events_updated=result.get("events_updated", 0),
|
||||
events_deleted=result.get("events_deleted", 0),
|
||||
)
|
||||
elif result["status"] == SyncStatus.UNCHANGED:
|
||||
logger.debug("ICS content unchanged", room_id=room_id)
|
||||
elif result["status"] == SyncStatus.ERROR:
|
||||
logger.error("ICS sync failed", room_id=room_id, error=result.get("error"))
|
||||
else:
|
||||
logger.debug(
|
||||
"ICS sync skipped", room_id=room_id, reason=result.get("reason")
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Unexpected error during ICS sync", room_id=room_id, error=str(e))
|
||||
|
||||
|
||||
@shared_task
|
||||
@asynctask
|
||||
@with_session
|
||||
async def sync_all_ics_calendars(session: AsyncSession):
|
||||
try:
|
||||
logger.info("Starting sync for all ICS-enabled rooms")
|
||||
|
||||
ics_enabled_rooms = await rooms_controller.get_ics_enabled(session)
|
||||
logger.info(f"Found {len(ics_enabled_rooms)} rooms with ICS enabled")
|
||||
|
||||
for room in ics_enabled_rooms:
|
||||
if not _should_sync(room):
|
||||
logger.debug("Skipping room, not time to sync yet", room_id=room.id)
|
||||
continue
|
||||
|
||||
sync_room_ics.delay(room.id)
|
||||
|
||||
logger.info("Queued sync tasks for all eligible rooms")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error in sync_all_ics_calendars", error=str(e))
|
||||
|
||||
|
||||
def _should_sync(room) -> bool:
|
||||
if not room.ics_last_sync:
|
||||
return True
|
||||
|
||||
time_since_sync = datetime.now(timezone.utc) - room.ics_last_sync
|
||||
return time_since_sync.total_seconds() >= room.ics_fetch_interval
|
||||
|
||||
|
||||
MEETING_DEFAULT_DURATION = timedelta(hours=1)
|
||||
|
||||
|
||||
async def create_upcoming_meetings_for_event(
|
||||
session: AsyncSession, event, create_window, room_id, room
|
||||
):
|
||||
if event.start_time <= create_window:
|
||||
return
|
||||
existing_meeting = await meetings_controller.get_by_calendar_event(
|
||||
session, event.id
|
||||
)
|
||||
|
||||
if existing_meeting:
|
||||
return
|
||||
|
||||
logger.info(
|
||||
"Pre-creating meeting for calendar event",
|
||||
room_id=room_id,
|
||||
event_id=event.id,
|
||||
event_title=event.title,
|
||||
)
|
||||
|
||||
try:
|
||||
end_date = event.end_time or (event.start_time + MEETING_DEFAULT_DURATION)
|
||||
|
||||
whereby_meeting = await create_meeting(
|
||||
"",
|
||||
end_date=end_date,
|
||||
room=room,
|
||||
)
|
||||
await upload_logo(whereby_meeting["roomName"], "./images/logo.png")
|
||||
|
||||
meeting = await meetings_controller.create(
|
||||
session,
|
||||
id=whereby_meeting["meetingId"],
|
||||
room_name=whereby_meeting["roomName"],
|
||||
room_url=whereby_meeting["roomUrl"],
|
||||
host_room_url=whereby_meeting["hostRoomUrl"],
|
||||
start_date=datetime.fromisoformat(whereby_meeting["startDate"]),
|
||||
end_date=datetime.fromisoformat(whereby_meeting["endDate"]),
|
||||
room=room,
|
||||
calendar_event_id=event.id,
|
||||
calendar_metadata={
|
||||
"title": event.title,
|
||||
"description": event.description,
|
||||
"attendees": event.attendees,
|
||||
},
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Meeting pre-created successfully",
|
||||
meeting_id=meeting.id,
|
||||
event_id=event.id,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to pre-create meeting",
|
||||
room_id=room_id,
|
||||
event_id=event.id,
|
||||
error=str(e),
|
||||
)
|
||||
|
||||
|
||||
@shared_task
|
||||
@asynctask
|
||||
@with_session
|
||||
async def create_upcoming_meetings(session: AsyncSession):
|
||||
async with RedisAsyncLock("create_upcoming_meetings", skip_if_locked=True) as lock:
|
||||
if not lock.acquired:
|
||||
logger.warning(
|
||||
"Another worker is already creating upcoming meetings, skipping"
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
logger.info("Starting creation of upcoming meetings")
|
||||
|
||||
ics_enabled_rooms = await rooms_controller.get_ics_enabled(session)
|
||||
now = datetime.now(timezone.utc)
|
||||
create_window = now - timedelta(minutes=6)
|
||||
|
||||
for room in ics_enabled_rooms:
|
||||
events = await calendar_events_controller.get_upcoming(
|
||||
session,
|
||||
room.id,
|
||||
minutes_ahead=7,
|
||||
)
|
||||
|
||||
for event in events:
|
||||
await create_upcoming_meetings_for_event(
|
||||
session, event, create_window, room.id, room
|
||||
)
|
||||
logger.info("Completed pre-creation check for upcoming meetings")
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error in create_upcoming_meetings", error=str(e))
|
||||
@@ -9,6 +9,8 @@ import structlog
|
||||
from celery import shared_task
|
||||
from celery.utils.log import get_task_logger
|
||||
from pydantic import ValidationError
|
||||
from redis.exceptions import LockError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from reflector.db.meetings import meetings_controller
|
||||
from reflector.db.recordings import Recording, recordings_controller
|
||||
@@ -16,8 +18,10 @@ from reflector.db.rooms import rooms_controller
|
||||
from reflector.db.transcripts import SourceKind, transcripts_controller
|
||||
from reflector.pipelines.main_file_pipeline import task_pipeline_file_process
|
||||
from reflector.pipelines.main_live_pipeline import asynctask
|
||||
from reflector.redis_cache import get_redis_client
|
||||
from reflector.settings import settings
|
||||
from reflector.whereby import get_room_sessions
|
||||
from reflector.worker.session_decorator import with_session
|
||||
|
||||
logger = structlog.wrap_logger(get_task_logger(__name__))
|
||||
|
||||
@@ -73,30 +77,39 @@ def process_messages():
|
||||
|
||||
@shared_task
|
||||
@asynctask
|
||||
async def process_recording(bucket_name: str, object_key: str):
|
||||
@with_session
|
||||
async def process_recording(session: AsyncSession, bucket_name: str, object_key: str):
|
||||
logger.info("Processing recording: %s/%s", bucket_name, object_key)
|
||||
|
||||
# extract a guid and a datetime from the object key
|
||||
room_name = f"/{object_key[:36]}"
|
||||
recorded_at = parse_datetime_with_timezone(object_key[37:57])
|
||||
|
||||
meeting = await meetings_controller.get_by_room_name(room_name)
|
||||
room = await rooms_controller.get_by_id(meeting.room_id)
|
||||
meeting = await meetings_controller.get_by_room_name(session, room_name)
|
||||
if not meeting:
|
||||
logger.warning("Room not found, may be deleted ?", room_name=room_name)
|
||||
return
|
||||
|
||||
recording = await recordings_controller.get_by_object_key(bucket_name, object_key)
|
||||
room = await rooms_controller.get_by_id(session, meeting.room_id)
|
||||
|
||||
recording = await recordings_controller.get_by_object_key(
|
||||
session, bucket_name, object_key
|
||||
)
|
||||
if not recording:
|
||||
recording = await recordings_controller.create(
|
||||
session,
|
||||
Recording(
|
||||
bucket_name=bucket_name,
|
||||
object_key=object_key,
|
||||
recorded_at=recorded_at,
|
||||
meeting_id=meeting.id,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
transcript = await transcripts_controller.get_by_recording_id(recording.id)
|
||||
transcript = await transcripts_controller.get_by_recording_id(session, recording.id)
|
||||
if transcript:
|
||||
await transcripts_controller.update(
|
||||
session,
|
||||
transcript,
|
||||
{
|
||||
"topics": [],
|
||||
@@ -104,6 +117,7 @@ async def process_recording(bucket_name: str, object_key: str):
|
||||
)
|
||||
else:
|
||||
transcript = await transcripts_controller.add(
|
||||
session,
|
||||
"",
|
||||
source_kind=SourceKind.ROOM,
|
||||
source_language="en",
|
||||
@@ -139,37 +153,110 @@ async def process_recording(bucket_name: str, object_key: str):
|
||||
finally:
|
||||
container.close()
|
||||
|
||||
await transcripts_controller.update(transcript, {"status": "uploaded"})
|
||||
await transcripts_controller.update(session, transcript, {"status": "uploaded"})
|
||||
|
||||
task_pipeline_file_process.delay(transcript_id=transcript.id)
|
||||
|
||||
|
||||
@shared_task
|
||||
@asynctask
|
||||
async def process_meetings():
|
||||
@with_session
|
||||
async def process_meetings(session: AsyncSession):
|
||||
"""
|
||||
Checks which meetings are still active and deactivates those that have ended.
|
||||
|
||||
Deactivation logic:
|
||||
- Active sessions: Keep meeting active regardless of scheduled time
|
||||
- No active sessions:
|
||||
* Calendar meetings:
|
||||
- If previously used (had sessions): Deactivate immediately
|
||||
- If never used: Keep active until scheduled end time, then deactivate
|
||||
* On-the-fly meetings: Deactivate immediately (created when someone joins,
|
||||
so no sessions means everyone left)
|
||||
|
||||
Uses distributed locking to prevent race conditions when multiple workers
|
||||
process the same meeting simultaneously.
|
||||
"""
|
||||
logger.info("Processing meetings")
|
||||
meetings = await meetings_controller.get_all_active()
|
||||
meetings = await meetings_controller.get_all_active(session)
|
||||
current_time = datetime.now(timezone.utc)
|
||||
redis_client = get_redis_client()
|
||||
processed_count = 0
|
||||
skipped_count = 0
|
||||
|
||||
for meeting in meetings:
|
||||
is_active = False
|
||||
end_date = meeting.end_date
|
||||
if end_date.tzinfo is None:
|
||||
end_date = end_date.replace(tzinfo=timezone.utc)
|
||||
if end_date > datetime.now(timezone.utc):
|
||||
logger_ = logger.bind(meeting_id=meeting.id, room_name=meeting.room_name)
|
||||
lock_key = f"meeting_process_lock:{meeting.id}"
|
||||
lock = redis_client.lock(lock_key, timeout=120)
|
||||
|
||||
try:
|
||||
if not lock.acquire(blocking=False):
|
||||
logger_.debug("Meeting is being processed by another worker, skipping")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Process the meeting
|
||||
should_deactivate = False
|
||||
end_date = meeting.end_date
|
||||
if end_date.tzinfo is None:
|
||||
end_date = end_date.replace(tzinfo=timezone.utc)
|
||||
|
||||
# This API call could be slow, extend lock if needed
|
||||
response = await get_room_sessions(meeting.room_name)
|
||||
|
||||
try:
|
||||
# Extend lock after slow operation to ensure we still hold it
|
||||
lock.extend(120, replace_ttl=True)
|
||||
except LockError:
|
||||
logger_.warning("Lost lock for meeting, skipping")
|
||||
continue
|
||||
|
||||
room_sessions = response.get("results", [])
|
||||
is_active = not room_sessions or any(
|
||||
has_active_sessions = room_sessions and any(
|
||||
rs["endedAt"] is None for rs in room_sessions
|
||||
)
|
||||
if not is_active:
|
||||
await meetings_controller.update_meeting(meeting.id, is_active=False)
|
||||
logger.info("Meeting %s is deactivated", meeting.id)
|
||||
has_had_sessions = bool(room_sessions)
|
||||
|
||||
logger.info("Processed meetings")
|
||||
if has_active_sessions:
|
||||
logger_.debug("Meeting still has active sessions, keep it")
|
||||
elif has_had_sessions:
|
||||
should_deactivate = True
|
||||
logger_.info("Meeting ended - all participants left")
|
||||
elif current_time > end_date:
|
||||
should_deactivate = True
|
||||
logger_.info(
|
||||
"Meeting deactivated - scheduled time ended with no participants",
|
||||
)
|
||||
else:
|
||||
logger_.debug("Meeting not yet started, keep it")
|
||||
|
||||
if should_deactivate:
|
||||
await meetings_controller.update_meeting(
|
||||
session, meeting.id, is_active=False
|
||||
)
|
||||
logger_.info("Meeting is deactivated")
|
||||
|
||||
processed_count += 1
|
||||
|
||||
except Exception:
|
||||
logger_.error("Error processing meeting", exc_info=True)
|
||||
finally:
|
||||
try:
|
||||
lock.release()
|
||||
except LockError:
|
||||
pass # Lock already released or expired
|
||||
|
||||
logger.info(
|
||||
"Processed meetings finished",
|
||||
processed_count=processed_count,
|
||||
skipped_count=skipped_count,
|
||||
)
|
||||
|
||||
|
||||
@shared_task
|
||||
@asynctask
|
||||
async def reprocess_failed_recordings():
|
||||
@with_session
|
||||
async def reprocess_failed_recordings(session: AsyncSession):
|
||||
"""
|
||||
Find recordings in the S3 bucket and check if they have proper transcriptions.
|
||||
If not, requeue them for processing.
|
||||
@@ -200,7 +287,7 @@ async def reprocess_failed_recordings():
|
||||
continue
|
||||
|
||||
recording = await recordings_controller.get_by_object_key(
|
||||
bucket_name, object_key
|
||||
session, bucket_name, object_key
|
||||
)
|
||||
if not recording:
|
||||
logger.info(f"Queueing recording for processing: {object_key}")
|
||||
@@ -211,10 +298,12 @@ async def reprocess_failed_recordings():
|
||||
transcript = None
|
||||
try:
|
||||
transcript = await transcripts_controller.get_by_recording_id(
|
||||
recording.id
|
||||
session, recording.id
|
||||
)
|
||||
except ValidationError:
|
||||
await transcripts_controller.remove_by_recording_id(recording.id)
|
||||
await transcripts_controller.remove_by_recording_id(
|
||||
session, recording.id
|
||||
)
|
||||
logger.warning(
|
||||
f"Removed invalid transcript for recording: {recording.id}"
|
||||
)
|
||||
|
||||
109
server/reflector/worker/session_decorator.py
Normal file
109
server/reflector/worker/session_decorator.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""
|
||||
Session management decorator for async worker tasks.
|
||||
|
||||
This decorator ensures that all worker tasks have a properly managed database session
|
||||
that stays open for the entire duration of the task execution.
|
||||
"""
|
||||
|
||||
import functools
|
||||
from typing import Any, Callable, TypeVar
|
||||
|
||||
from celery import current_task
|
||||
|
||||
from reflector.db import get_session_factory
|
||||
from reflector.db.transcripts import transcripts_controller
|
||||
from reflector.logger import logger
|
||||
|
||||
F = TypeVar("F", bound=Callable[..., Any])
|
||||
|
||||
|
||||
def with_session(func: F) -> F:
|
||||
"""
|
||||
Decorator that provides an AsyncSession as the first argument to the decorated function.
|
||||
|
||||
This should be used AFTER the @asynctask decorator on Celery tasks to ensure
|
||||
proper session management throughout the task execution.
|
||||
|
||||
Example:
|
||||
@shared_task
|
||||
@asynctask
|
||||
@with_session
|
||||
async def my_task(session: AsyncSession, arg1: str, arg2: int):
|
||||
# session is automatically provided and managed
|
||||
result = await some_controller.get_by_id(session, arg1)
|
||||
...
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
session_factory = get_session_factory()
|
||||
async with session_factory() as session:
|
||||
async with session.begin():
|
||||
# Pass session as first argument to the decorated function
|
||||
return await func(session, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def with_session_and_transcript(func: F) -> F:
|
||||
"""
|
||||
Decorator that provides both an AsyncSession and a Transcript to the decorated function.
|
||||
|
||||
This decorator:
|
||||
1. Extracts transcript_id from kwargs
|
||||
2. Creates and manages a database session
|
||||
3. Fetches the transcript using the session
|
||||
4. Creates an enhanced logger with Celery task context
|
||||
5. Passes session, transcript, and logger to the decorated function
|
||||
|
||||
This should be used AFTER the @asynctask decorator on Celery tasks.
|
||||
|
||||
Example:
|
||||
@shared_task
|
||||
@asynctask
|
||||
@with_session_and_transcript
|
||||
async def my_task(session: AsyncSession, transcript: Transcript, logger: Logger, arg1: str):
|
||||
# session, transcript, and logger are automatically provided
|
||||
room = await rooms_controller.get_by_id(session, transcript.room_id)
|
||||
...
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
transcript_id = kwargs.pop("transcript_id", None)
|
||||
if not transcript_id:
|
||||
raise ValueError(
|
||||
"transcript_id is required for @with_session_and_transcript"
|
||||
)
|
||||
|
||||
session_factory = get_session_factory()
|
||||
async with session_factory() as session:
|
||||
async with session.begin():
|
||||
# Fetch the transcript
|
||||
transcript = await transcripts_controller.get_by_id(
|
||||
session, transcript_id
|
||||
)
|
||||
if not transcript:
|
||||
raise Exception(f"Transcript {transcript_id} not found")
|
||||
|
||||
# Create enhanced logger with Celery task context
|
||||
tlogger = logger.bind(transcript_id=transcript.id)
|
||||
if current_task:
|
||||
tlogger = tlogger.bind(
|
||||
task_id=current_task.request.id,
|
||||
task_name=current_task.name,
|
||||
worker_hostname=current_task.request.hostname,
|
||||
task_retries=current_task.request.retries,
|
||||
transcript_id=transcript_id,
|
||||
)
|
||||
|
||||
try:
|
||||
# Pass session, transcript, and logger to the decorated function
|
||||
return await func(
|
||||
session, transcript=transcript, logger=tlogger, *args, **kwargs
|
||||
)
|
||||
except Exception:
|
||||
tlogger.exception("Error in task execution")
|
||||
raise
|
||||
|
||||
return wrapper
|
||||
@@ -10,12 +10,14 @@ import httpx
|
||||
import structlog
|
||||
from celery import shared_task
|
||||
from celery.utils.log import get_task_logger
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from reflector.db.rooms import rooms_controller
|
||||
from reflector.db.transcripts import transcripts_controller
|
||||
from reflector.pipelines.main_live_pipeline import asynctask
|
||||
from reflector.settings import settings
|
||||
from reflector.utils.webvtt import topics_to_webvtt
|
||||
from reflector.worker.session_decorator import with_session
|
||||
|
||||
logger = structlog.wrap_logger(get_task_logger(__name__))
|
||||
|
||||
@@ -39,11 +41,13 @@ def generate_webhook_signature(payload: bytes, secret: str, timestamp: str) -> s
|
||||
retry_backoff_max=3600, # Max 1 hour between retries
|
||||
)
|
||||
@asynctask
|
||||
@with_session
|
||||
async def send_transcript_webhook(
|
||||
self,
|
||||
transcript_id: str,
|
||||
room_id: str,
|
||||
event_id: str,
|
||||
session: AsyncSession,
|
||||
):
|
||||
log = logger.bind(
|
||||
transcript_id=transcript_id,
|
||||
@@ -53,12 +57,12 @@ async def send_transcript_webhook(
|
||||
|
||||
try:
|
||||
# Fetch transcript and room
|
||||
transcript = await transcripts_controller.get_by_id(transcript_id)
|
||||
transcript = await transcripts_controller.get_by_id(session, transcript_id)
|
||||
if not transcript:
|
||||
log.error("Transcript not found, skipping webhook")
|
||||
return
|
||||
|
||||
room = await rooms_controller.get_by_id(room_id)
|
||||
room = await rooms_controller.get_by_id(session, room_id)
|
||||
if not room:
|
||||
log.error("Room not found, skipping webhook")
|
||||
return
|
||||
|
||||
29
server/test.ics
Normal file
29
server/test.ics
Normal file
@@ -0,0 +1,29 @@
|
||||
BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
CALSCALE:GREGORIAN
|
||||
METHOD:PUBLISH
|
||||
PRODID:-//Fastmail/2020.5/EN
|
||||
X-APPLE-CALENDAR-COLOR:#0F6A0F
|
||||
X-WR-CALNAME:Test reflector
|
||||
X-WR-TIMEZONE:America/Costa_Rica
|
||||
BEGIN:VTIMEZONE
|
||||
TZID:America/Costa_Rica
|
||||
BEGIN:STANDARD
|
||||
DTSTART:19700101T000000
|
||||
TZOFFSETFROM:-0600
|
||||
TZOFFSETTO:-0600
|
||||
END:STANDARD
|
||||
END:VTIMEZONE
|
||||
BEGIN:VEVENT
|
||||
ATTENDEE;CN=Mathieu Virbel;PARTSTAT=ACCEPTED:MAILTO:mathieu@monadical.com
|
||||
DTEND;TZID=America/Costa_Rica:20250819T143000
|
||||
DTSTAMP:20250819T155951Z
|
||||
DTSTART;TZID=America/Costa_Rica:20250819T140000
|
||||
LOCATION:http://localhost:1250/mathieu
|
||||
ORGANIZER;CN=Mathieu Virbel:MAILTO:mathieu@monadical.com
|
||||
SEQUENCE:1
|
||||
SUMMARY:Checkin
|
||||
TRANSP:OPAQUE
|
||||
UID:867df50d-8105-4c58-9280-2b5d26cc9cd3
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
||||
@@ -1,10 +1,22 @@
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
from tempfile import NamedTemporaryFile
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def event_loop():
|
||||
if sys.platform.startswith("win") and sys.version_info[:2] >= (3, 8):
|
||||
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
yield loop
|
||||
loop.close()
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def settings_configuration():
|
||||
# theses settings are linked to monadical for pytest-recording
|
||||
@@ -35,7 +47,6 @@ def docker_compose_file(pytestconfig):
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def postgres_service(docker_ip, docker_services):
|
||||
"""Ensure that PostgreSQL service is up and responsive."""
|
||||
port = docker_services.port_for("postgres_test", 5432)
|
||||
|
||||
def is_responsive():
|
||||
@@ -56,7 +67,6 @@ def postgres_service(docker_ip, docker_services):
|
||||
|
||||
docker_services.wait_until_responsive(timeout=30.0, pause=0.1, check=is_responsive)
|
||||
|
||||
# Return connection parameters
|
||||
return {
|
||||
"host": docker_ip,
|
||||
"port": port,
|
||||
@@ -66,20 +76,27 @@ def postgres_service(docker_ip, docker_services):
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
@pytest.mark.asyncio
|
||||
async def setup_database(postgres_service):
|
||||
from reflector.db import engine, metadata, get_database # noqa
|
||||
@pytest.fixture(scope="session")
|
||||
def _database_url(postgres_service):
|
||||
db_config = postgres_service
|
||||
DATABASE_URL = (
|
||||
f"postgresql+asyncpg://{db_config['user']}:{db_config['password']}"
|
||||
f"@{db_config['host']}:{db_config['port']}/{db_config['dbname']}"
|
||||
)
|
||||
|
||||
metadata.drop_all(bind=engine)
|
||||
metadata.create_all(bind=engine)
|
||||
database = get_database()
|
||||
# Override settings
|
||||
from reflector.settings import settings
|
||||
|
||||
try:
|
||||
await database.connect()
|
||||
yield
|
||||
finally:
|
||||
await database.disconnect()
|
||||
settings.DATABASE_URL = DATABASE_URL
|
||||
|
||||
return DATABASE_URL
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def init_database():
|
||||
from reflector.db import Base
|
||||
|
||||
return Base.metadata.create_all
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -327,8 +344,17 @@ def celery_includes():
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
async def ensure_db_session_in_app(db_session):
|
||||
async def mock_get_session():
|
||||
yield db_session
|
||||
|
||||
with patch("reflector.db._get_session", side_effect=mock_get_session):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def client():
|
||||
async def client(db_session):
|
||||
from httpx import AsyncClient
|
||||
|
||||
from reflector.app import app
|
||||
@@ -347,7 +373,7 @@ def fake_mp3_upload():
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def fake_transcript_with_topics(tmpdir, client):
|
||||
async def fake_transcript_with_topics(tmpdir, client, db_session):
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
@@ -363,10 +389,10 @@ async def fake_transcript_with_topics(tmpdir, client):
|
||||
assert response.status_code == 200
|
||||
tid = response.json()["id"]
|
||||
|
||||
transcript = await transcripts_controller.get_by_id(tid)
|
||||
transcript = await transcripts_controller.get_by_id(db_session, tid)
|
||||
assert transcript is not None
|
||||
|
||||
await transcripts_controller.update(transcript, {"status": "ended"})
|
||||
await transcripts_controller.update(db_session, transcript, {"status": "ended"})
|
||||
|
||||
# manually copy a file at the expected location
|
||||
audio_filename = transcript.audio_mp3_filename
|
||||
@@ -376,6 +402,7 @@ async def fake_transcript_with_topics(tmpdir, client):
|
||||
|
||||
# create some topics
|
||||
await transcripts_controller.upsert_topic(
|
||||
db_session,
|
||||
transcript,
|
||||
TranscriptTopic(
|
||||
title="Topic 1",
|
||||
@@ -389,6 +416,7 @@ async def fake_transcript_with_topics(tmpdir, client):
|
||||
),
|
||||
)
|
||||
await transcripts_controller.upsert_topic(
|
||||
db_session,
|
||||
transcript,
|
||||
TranscriptTopic(
|
||||
title="Topic 2",
|
||||
|
||||
18
server/tests/test_attendee_parsing_bug.ics
Normal file
18
server/tests/test_attendee_parsing_bug.ics
Normal file
@@ -0,0 +1,18 @@
|
||||
BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
CALSCALE:GREGORIAN
|
||||
METHOD:PUBLISH
|
||||
PRODID:-//Test/1.0/EN
|
||||
X-WR-CALNAME:Test Attendee Bug
|
||||
BEGIN:VEVENT
|
||||
ATTENDEE:MAILTO:alice@example.com,bob@example.com,charlie@example.com,diana@example.com,eve@example.com,frank@example.com,george@example.com,helen@example.com,ivan@example.com,jane@example.com,kevin@example.com,laura@example.com,mike@example.com,nina@example.com,oscar@example.com,paul@example.com,queen@example.com,robert@example.com,sarah@example.com,tom@example.com,ursula@example.com,victor@example.com,wendy@example.com,xavier@example.com,yvonne@example.com,zack@example.com,amy@example.com,bill@example.com,carol@example.com
|
||||
DTEND:20250910T190000Z
|
||||
DTSTAMP:20250910T174000Z
|
||||
DTSTART:20250910T180000Z
|
||||
LOCATION:http://localhost:3000/test-room
|
||||
ORGANIZER;CN=Test Organizer:MAILTO:organizer@example.com
|
||||
SEQUENCE:1
|
||||
SUMMARY:Test Meeting with Many Attendees
|
||||
UID:test-attendee-bug-event
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
||||
98
server/tests/test_attendee_parsing_bug.py
Normal file
98
server/tests/test_attendee_parsing_bug.py
Normal file
@@ -0,0 +1,98 @@
|
||||
import os
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from reflector.db.rooms import rooms_controller
|
||||
from reflector.services.ics_sync import ICSSyncService
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_attendee_parsing_bug(db_session):
|
||||
"""
|
||||
Test that reproduces the attendee parsing bug where a string with comma-separated
|
||||
emails gets parsed as individual characters instead of separate email addresses.
|
||||
|
||||
The bug manifests as getting 29 attendees with emails like "M", "A", "I", etc.
|
||||
instead of properly parsed email addresses.
|
||||
"""
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="test-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_url="http://test.com/test.ics",
|
||||
ics_enabled=True,
|
||||
)
|
||||
await db_session.flush()
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
test_ics_path = os.path.join(
|
||||
os.path.dirname(__file__), "test_attendee_parsing_bug.ics"
|
||||
)
|
||||
with open(test_ics_path, "r") as f:
|
||||
ics_content = f.read()
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
future_time = now + timedelta(hours=1)
|
||||
end_time = future_time + timedelta(hours=1)
|
||||
|
||||
dtstart = future_time.strftime("%Y%m%dT%H%M%SZ")
|
||||
dtend = end_time.strftime("%Y%m%dT%H%M%SZ")
|
||||
dtstamp = now.strftime("%Y%m%dT%H%M%SZ")
|
||||
|
||||
ics_content = ics_content.replace("20250910T180000Z", dtstart)
|
||||
ics_content = ics_content.replace("20250910T190000Z", dtend)
|
||||
ics_content = ics_content.replace("20250910T174000Z", dtstamp)
|
||||
|
||||
sync_service = ICSSyncService()
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
with patch.object(
|
||||
sync_service.fetch_service, "fetch_ics", new_callable=AsyncMock
|
||||
) as mock_fetch:
|
||||
mock_fetch.return_value = ics_content
|
||||
|
||||
calendar = sync_service.fetch_service.parse_ics(ics_content)
|
||||
from reflector.settings import settings
|
||||
|
||||
room_url = f"{settings.UI_BASE_URL}/{room.name}"
|
||||
|
||||
print(f"Room URL being used for matching: {room_url}")
|
||||
print(f"ICS content:\n{ics_content}")
|
||||
|
||||
events, total_events = sync_service.fetch_service.extract_room_events(
|
||||
calendar, room.name, room_url
|
||||
)
|
||||
|
||||
print(f"Total events in calendar: {total_events}")
|
||||
print(f"Events matching room: {len(events)}")
|
||||
|
||||
result = await sync_service.sync_room_calendar(db_session, room)
|
||||
|
||||
assert result.get("status") == "success"
|
||||
assert result.get("events_found", 0) >= 0
|
||||
|
||||
assert len(events) == 1
|
||||
event = events[0]
|
||||
|
||||
attendees = event["attendees"]
|
||||
|
||||
print(f"Number of attendees: {len(attendees)}")
|
||||
for i, attendee in enumerate(attendees):
|
||||
print(f"Attendee {i}: {attendee}")
|
||||
|
||||
assert len(attendees) == 30, f"Expected 30 attendees, got {len(attendees)}"
|
||||
|
||||
assert attendees[0]["email"] == "alice@example.com"
|
||||
assert attendees[1]["email"] == "bob@example.com"
|
||||
assert attendees[2]["email"] == "charlie@example.com"
|
||||
assert any(att["email"] == "organizer@example.com" for att in attendees)
|
||||
438
server/tests/test_calendar_event.py
Normal file
438
server/tests/test_calendar_event.py
Normal file
@@ -0,0 +1,438 @@
|
||||
"""
|
||||
Tests for CalendarEvent model.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
import pytest
|
||||
|
||||
from reflector.db.calendar_events import CalendarEvent, calendar_events_controller
|
||||
from reflector.db.rooms import rooms_controller
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_calendar_event_create(db_session):
|
||||
"""Test creating a calendar event."""
|
||||
# Create a room first
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="test-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
)
|
||||
|
||||
# Create calendar event
|
||||
now = datetime.now(timezone.utc)
|
||||
event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="test-event-123",
|
||||
title="Team Meeting",
|
||||
description="Weekly team sync",
|
||||
start_time=now + timedelta(hours=1),
|
||||
end_time=now + timedelta(hours=2),
|
||||
location=f"https://example.com/{room.name}",
|
||||
attendees=[
|
||||
{"email": "alice@example.com", "name": "Alice", "status": "ACCEPTED"},
|
||||
{"email": "bob@example.com", "name": "Bob", "status": "TENTATIVE"},
|
||||
],
|
||||
)
|
||||
|
||||
# Save event
|
||||
saved_event = await calendar_events_controller.upsert(db_session, event)
|
||||
|
||||
assert saved_event.ics_uid == "test-event-123"
|
||||
assert saved_event.title == "Team Meeting"
|
||||
assert saved_event.room_id == room.id
|
||||
assert len(saved_event.attendees) == 2
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_calendar_event_get_by_room(db_session):
|
||||
"""Test getting calendar events for a room."""
|
||||
# Create room
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="events-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Create multiple events
|
||||
for i in range(3):
|
||||
event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid=f"event-{i}",
|
||||
title=f"Meeting {i}",
|
||||
start_time=now + timedelta(hours=i),
|
||||
end_time=now + timedelta(hours=i + 1),
|
||||
)
|
||||
await calendar_events_controller.upsert(db_session, event)
|
||||
|
||||
# Get events for room
|
||||
events = await calendar_events_controller.get_by_room(db_session, room.id)
|
||||
|
||||
assert len(events) == 3
|
||||
assert all(e.room_id == room.id for e in events)
|
||||
assert events[0].title == "Meeting 0"
|
||||
assert events[1].title == "Meeting 1"
|
||||
assert events[2].title == "Meeting 2"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_calendar_event_get_upcoming(db_session):
|
||||
"""Test getting upcoming events within time window."""
|
||||
# Create room
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="upcoming-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Create events at different times
|
||||
# Past event (should not be included)
|
||||
past_event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="past-event",
|
||||
title="Past Meeting",
|
||||
start_time=now - timedelta(hours=2),
|
||||
end_time=now - timedelta(hours=1),
|
||||
)
|
||||
await calendar_events_controller.upsert(db_session, past_event)
|
||||
|
||||
# Upcoming event within 30 minutes
|
||||
upcoming_event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="upcoming-event",
|
||||
title="Upcoming Meeting",
|
||||
start_time=now + timedelta(minutes=15),
|
||||
end_time=now + timedelta(minutes=45),
|
||||
)
|
||||
await calendar_events_controller.upsert(db_session, upcoming_event)
|
||||
|
||||
# Currently happening event (started 10 minutes ago, ends in 20 minutes)
|
||||
current_event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="current-event",
|
||||
title="Current Meeting",
|
||||
start_time=now - timedelta(minutes=10),
|
||||
end_time=now + timedelta(minutes=20),
|
||||
)
|
||||
await calendar_events_controller.upsert(db_session, current_event)
|
||||
|
||||
# Future event beyond 30 minutes
|
||||
future_event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="future-event",
|
||||
title="Future Meeting",
|
||||
start_time=now + timedelta(hours=2),
|
||||
end_time=now + timedelta(hours=3),
|
||||
)
|
||||
await calendar_events_controller.upsert(db_session, future_event)
|
||||
|
||||
# Get upcoming events (default 120 minutes) - should include current, upcoming, and future
|
||||
upcoming = await calendar_events_controller.get_upcoming(db_session, room.id)
|
||||
|
||||
assert len(upcoming) == 3
|
||||
# Events should be sorted by start_time (current event first, then upcoming, then future)
|
||||
assert upcoming[0].ics_uid == "current-event"
|
||||
assert upcoming[1].ics_uid == "upcoming-event"
|
||||
assert upcoming[2].ics_uid == "future-event"
|
||||
|
||||
# Get upcoming with custom window
|
||||
upcoming_extended = await calendar_events_controller.get_upcoming(
|
||||
db_session, room.id, minutes_ahead=180
|
||||
)
|
||||
|
||||
assert len(upcoming_extended) == 3
|
||||
# Events should be sorted by start_time
|
||||
assert upcoming_extended[0].ics_uid == "current-event"
|
||||
assert upcoming_extended[1].ics_uid == "upcoming-event"
|
||||
assert upcoming_extended[2].ics_uid == "future-event"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_calendar_event_get_upcoming_includes_currently_happening(db_session):
|
||||
"""Test that get_upcoming includes currently happening events but excludes ended events."""
|
||||
# Create room
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="current-happening-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Event that ended in the past (should NOT be included)
|
||||
past_ended_event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="past-ended-event",
|
||||
title="Past Ended Meeting",
|
||||
start_time=now - timedelta(hours=2),
|
||||
end_time=now - timedelta(minutes=30),
|
||||
)
|
||||
await calendar_events_controller.upsert(db_session, past_ended_event)
|
||||
|
||||
# Event currently happening (started 10 minutes ago, ends in 20 minutes) - SHOULD be included
|
||||
currently_happening_event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="currently-happening",
|
||||
title="Currently Happening Meeting",
|
||||
start_time=now - timedelta(minutes=10),
|
||||
end_time=now + timedelta(minutes=20),
|
||||
)
|
||||
await calendar_events_controller.upsert(db_session, currently_happening_event)
|
||||
|
||||
# Event starting soon (in 5 minutes) - SHOULD be included
|
||||
upcoming_soon_event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="upcoming-soon",
|
||||
title="Upcoming Soon Meeting",
|
||||
start_time=now + timedelta(minutes=5),
|
||||
end_time=now + timedelta(minutes=35),
|
||||
)
|
||||
await calendar_events_controller.upsert(db_session, upcoming_soon_event)
|
||||
|
||||
# Get upcoming events
|
||||
upcoming = await calendar_events_controller.get_upcoming(
|
||||
db_session, room.id, minutes_ahead=30
|
||||
)
|
||||
|
||||
# Should only include currently happening and upcoming soon events
|
||||
assert len(upcoming) == 2
|
||||
assert upcoming[0].ics_uid == "currently-happening"
|
||||
assert upcoming[1].ics_uid == "upcoming-soon"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_calendar_event_upsert(db_session):
|
||||
"""Test upserting (create/update) calendar events."""
|
||||
# Create room
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="upsert-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Create new event
|
||||
event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="upsert-test",
|
||||
title="Original Title",
|
||||
start_time=now,
|
||||
end_time=now + timedelta(hours=1),
|
||||
)
|
||||
|
||||
created = await calendar_events_controller.upsert(db_session, event)
|
||||
assert created.title == "Original Title"
|
||||
|
||||
# Update existing event
|
||||
event.title = "Updated Title"
|
||||
event.description = "Added description"
|
||||
|
||||
updated = await calendar_events_controller.upsert(db_session, event)
|
||||
assert updated.title == "Updated Title"
|
||||
assert updated.description == "Added description"
|
||||
assert updated.ics_uid == "upsert-test"
|
||||
|
||||
# Verify only one event exists
|
||||
events = await calendar_events_controller.get_by_room(db_session, room.id)
|
||||
assert len(events) == 1
|
||||
assert events[0].title == "Updated Title"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_calendar_event_soft_delete(db_session):
|
||||
"""Test soft deleting events no longer in calendar."""
|
||||
# Create room
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="delete-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Create multiple events
|
||||
for i in range(4):
|
||||
event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid=f"event-{i}",
|
||||
title=f"Meeting {i}",
|
||||
start_time=now + timedelta(hours=i),
|
||||
end_time=now + timedelta(hours=i + 1),
|
||||
)
|
||||
await calendar_events_controller.upsert(db_session, event)
|
||||
|
||||
# Soft delete events not in current list
|
||||
current_ids = ["event-0", "event-2"] # Keep events 0 and 2
|
||||
deleted_count = await calendar_events_controller.soft_delete_missing(
|
||||
db_session, room.id, current_ids
|
||||
)
|
||||
|
||||
assert deleted_count == 2 # Should delete events 1 and 3
|
||||
|
||||
# Get non-deleted events
|
||||
events = await calendar_events_controller.get_by_room(
|
||||
db_session, room.id, include_deleted=False
|
||||
)
|
||||
assert len(events) == 2
|
||||
assert {e.ics_uid for e in events} == {"event-0", "event-2"}
|
||||
|
||||
# Get all events including deleted
|
||||
all_events = await calendar_events_controller.get_by_room(
|
||||
db_session, room.id, include_deleted=True
|
||||
)
|
||||
assert len(all_events) == 4
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_calendar_event_past_events_not_deleted(db_session):
|
||||
"""Test that past events are not soft deleted."""
|
||||
# Create room
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="past-events-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Create past event
|
||||
past_event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="past-event",
|
||||
title="Past Meeting",
|
||||
start_time=now - timedelta(hours=2),
|
||||
end_time=now - timedelta(hours=1),
|
||||
)
|
||||
await calendar_events_controller.upsert(db_session, past_event)
|
||||
|
||||
# Create future event
|
||||
future_event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="future-event",
|
||||
title="Future Meeting",
|
||||
start_time=now + timedelta(hours=1),
|
||||
end_time=now + timedelta(hours=2),
|
||||
)
|
||||
await calendar_events_controller.upsert(db_session, future_event)
|
||||
|
||||
# Try to soft delete all events (only future should be deleted)
|
||||
deleted_count = await calendar_events_controller.soft_delete_missing(
|
||||
db_session, room.id, []
|
||||
)
|
||||
|
||||
assert deleted_count == 1 # Only future event deleted
|
||||
|
||||
# Verify past event still exists
|
||||
events = await calendar_events_controller.get_by_room(
|
||||
db_session, room.id, include_deleted=False
|
||||
)
|
||||
assert len(events) == 1
|
||||
assert events[0].ics_uid == "past-event"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_calendar_event_with_raw_ics_data(db_session):
|
||||
"""Test storing raw ICS data with calendar event."""
|
||||
# Create room
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="raw-ics-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
)
|
||||
|
||||
raw_ics = """BEGIN:VEVENT
|
||||
UID:test-raw-123
|
||||
SUMMARY:Test Event
|
||||
DTSTART:20240101T100000Z
|
||||
DTEND:20240101T110000Z
|
||||
END:VEVENT"""
|
||||
|
||||
event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="test-raw-123",
|
||||
title="Test Event",
|
||||
start_time=datetime.now(timezone.utc),
|
||||
end_time=datetime.now(timezone.utc) + timedelta(hours=1),
|
||||
ics_raw_data=raw_ics,
|
||||
)
|
||||
|
||||
saved = await calendar_events_controller.upsert(db_session, event)
|
||||
|
||||
assert saved.ics_raw_data == raw_ics
|
||||
|
||||
# Retrieve and verify
|
||||
retrieved = await calendar_events_controller.get_by_ics_uid(
|
||||
db_session, room.id, "test-raw-123"
|
||||
)
|
||||
assert retrieved is not None
|
||||
assert retrieved.ics_raw_data == raw_ics
|
||||
@@ -2,26 +2,32 @@ from datetime import datetime, timedelta, timezone
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import delete, insert, select, update
|
||||
|
||||
from reflector.db.recordings import Recording, recordings_controller
|
||||
from reflector.db.base import (
|
||||
MeetingConsentModel,
|
||||
MeetingModel,
|
||||
RecordingModel,
|
||||
TranscriptModel,
|
||||
)
|
||||
from reflector.db.transcripts import SourceKind, transcripts_controller
|
||||
from reflector.worker.cleanup import cleanup_old_public_data
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cleanup_old_public_data_skips_when_not_public():
|
||||
async def test_cleanup_old_public_data_skips_when_not_public(db_session):
|
||||
"""Test that cleanup is skipped when PUBLIC_MODE is False."""
|
||||
with patch("reflector.worker.cleanup.settings") as mock_settings:
|
||||
mock_settings.PUBLIC_MODE = False
|
||||
|
||||
result = await cleanup_old_public_data()
|
||||
result = await cleanup_old_public_data(db_session)
|
||||
|
||||
# Should return early without doing anything
|
||||
assert result is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cleanup_old_public_data_deletes_old_anonymous_transcripts():
|
||||
async def test_cleanup_old_public_data_deletes_old_anonymous_transcripts(db_session):
|
||||
"""Test that old anonymous transcripts are deleted."""
|
||||
# Create old and new anonymous transcripts
|
||||
old_date = datetime.now(timezone.utc) - timedelta(days=8)
|
||||
@@ -29,22 +35,23 @@ async def test_cleanup_old_public_data_deletes_old_anonymous_transcripts():
|
||||
|
||||
# Create old anonymous transcript (should be deleted)
|
||||
old_transcript = await transcripts_controller.add(
|
||||
db_session,
|
||||
name="Old Anonymous Transcript",
|
||||
source_kind=SourceKind.FILE,
|
||||
user_id=None, # Anonymous
|
||||
)
|
||||
# Manually update created_at to be old
|
||||
from reflector.db import get_database
|
||||
from reflector.db.transcripts import transcripts
|
||||
|
||||
await get_database().execute(
|
||||
transcripts.update()
|
||||
.where(transcripts.c.id == old_transcript.id)
|
||||
# Manually update created_at to be old
|
||||
await db_session.execute(
|
||||
update(TranscriptModel)
|
||||
.where(TranscriptModel.id == old_transcript.id)
|
||||
.values(created_at=old_date)
|
||||
)
|
||||
await db_session.commit()
|
||||
|
||||
# Create new anonymous transcript (should NOT be deleted)
|
||||
new_transcript = await transcripts_controller.add(
|
||||
db_session,
|
||||
name="New Anonymous Transcript",
|
||||
source_kind=SourceKind.FILE,
|
||||
user_id=None, # Anonymous
|
||||
@@ -52,234 +59,265 @@ async def test_cleanup_old_public_data_deletes_old_anonymous_transcripts():
|
||||
|
||||
# Create old transcript with user (should NOT be deleted)
|
||||
old_user_transcript = await transcripts_controller.add(
|
||||
db_session,
|
||||
name="Old User Transcript",
|
||||
source_kind=SourceKind.FILE,
|
||||
user_id="user123",
|
||||
user_id="user-123",
|
||||
)
|
||||
await get_database().execute(
|
||||
transcripts.update()
|
||||
.where(transcripts.c.id == old_user_transcript.id)
|
||||
await db_session.execute(
|
||||
update(TranscriptModel)
|
||||
.where(TranscriptModel.id == old_user_transcript.id)
|
||||
.values(created_at=old_date)
|
||||
)
|
||||
await db_session.commit()
|
||||
|
||||
# Mock settings for public mode
|
||||
with patch("reflector.worker.cleanup.settings") as mock_settings:
|
||||
mock_settings.PUBLIC_MODE = True
|
||||
mock_settings.PUBLIC_DATA_RETENTION_DAYS = 7
|
||||
|
||||
# Mock the storage deletion
|
||||
with patch("reflector.db.transcripts.get_transcripts_storage") as mock_storage:
|
||||
mock_storage.return_value.delete_file = AsyncMock()
|
||||
# Mock delete_single_transcript to track what gets deleted
|
||||
with patch("reflector.worker.cleanup.delete_single_transcript") as mock_delete:
|
||||
mock_delete.return_value = None
|
||||
|
||||
result = await cleanup_old_public_data()
|
||||
# Run cleanup with test session
|
||||
await cleanup_old_public_data(db_session)
|
||||
|
||||
# Check results
|
||||
assert result["transcripts_deleted"] == 1
|
||||
assert result["errors"] == []
|
||||
|
||||
# Verify old anonymous transcript was deleted
|
||||
assert await transcripts_controller.get_by_id(old_transcript.id) is None
|
||||
|
||||
# Verify new anonymous transcript still exists
|
||||
assert await transcripts_controller.get_by_id(new_transcript.id) is not None
|
||||
|
||||
# Verify user transcript still exists
|
||||
assert await transcripts_controller.get_by_id(old_user_transcript.id) is not None
|
||||
# Verify only old anonymous transcript was deleted
|
||||
assert mock_delete.call_count == 1
|
||||
# The function is called with session_factory, transcript_data dict, and stats dict
|
||||
call_args = mock_delete.call_args[0]
|
||||
transcript_data = call_args[1]
|
||||
assert transcript_data["id"] == old_transcript.id
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cleanup_deletes_associated_meeting_and_recording():
|
||||
"""Test that meetings and recordings associated with old transcripts are deleted."""
|
||||
from reflector.db import get_database
|
||||
from reflector.db.meetings import meetings
|
||||
from reflector.db.transcripts import transcripts
|
||||
|
||||
async def test_cleanup_deletes_associated_meeting_and_recording(db_session):
|
||||
"""Test that cleanup deletes associated meetings and recordings."""
|
||||
old_date = datetime.now(timezone.utc) - timedelta(days=8)
|
||||
|
||||
# Create a meeting
|
||||
meeting_id = "test-meeting-for-transcript"
|
||||
await get_database().execute(
|
||||
meetings.insert().values(
|
||||
id=meeting_id,
|
||||
room_name="Meeting with Transcript",
|
||||
room_url="https://example.com/meeting",
|
||||
host_room_url="https://example.com/meeting-host",
|
||||
start_date=old_date,
|
||||
end_date=old_date + timedelta(hours=1),
|
||||
room_id=None,
|
||||
)
|
||||
)
|
||||
|
||||
# Create a recording
|
||||
recording = await recordings_controller.create(
|
||||
Recording(
|
||||
bucket_name="test-bucket",
|
||||
object_key="test-recording.mp4",
|
||||
recorded_at=old_date,
|
||||
)
|
||||
)
|
||||
|
||||
# Create an old transcript with both meeting and recording
|
||||
old_transcript = await transcripts_controller.add(
|
||||
db_session,
|
||||
name="Old Transcript with Meeting and Recording",
|
||||
source_kind=SourceKind.ROOM,
|
||||
source_kind=SourceKind.FILE,
|
||||
user_id=None,
|
||||
meeting_id=meeting_id,
|
||||
recording_id=recording.id,
|
||||
)
|
||||
|
||||
# Update created_at to be old
|
||||
await get_database().execute(
|
||||
transcripts.update()
|
||||
.where(transcripts.c.id == old_transcript.id)
|
||||
await db_session.execute(
|
||||
update(TranscriptModel)
|
||||
.where(TranscriptModel.id == old_transcript.id)
|
||||
.values(created_at=old_date)
|
||||
)
|
||||
await db_session.commit()
|
||||
|
||||
# Create associated meeting directly
|
||||
meeting_id = "test-meeting-id"
|
||||
await db_session.execute(
|
||||
insert(MeetingModel).values(
|
||||
id=meeting_id,
|
||||
room_id=None,
|
||||
room_name="test-room",
|
||||
room_url="https://example.com/room",
|
||||
host_room_url="https://example.com/room-host",
|
||||
start_date=old_date,
|
||||
end_date=old_date + timedelta(hours=1),
|
||||
is_active=False,
|
||||
num_clients=0,
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic",
|
||||
)
|
||||
)
|
||||
|
||||
# Create associated recording directly
|
||||
recording_id = "test-recording-id"
|
||||
await db_session.execute(
|
||||
insert(RecordingModel).values(
|
||||
id=recording_id,
|
||||
meeting_id=meeting_id,
|
||||
url="https://example.com/recording.mp4",
|
||||
object_key="recordings/test.mp4",
|
||||
duration=3600.0,
|
||||
created_at=old_date,
|
||||
)
|
||||
)
|
||||
await db_session.commit()
|
||||
|
||||
# Update transcript with meeting_id and recording_id
|
||||
await db_session.execute(
|
||||
update(TranscriptModel)
|
||||
.where(TranscriptModel.id == old_transcript.id)
|
||||
.values(meeting_id=meeting_id, recording_id=recording_id)
|
||||
)
|
||||
await db_session.commit()
|
||||
|
||||
# Mock settings
|
||||
with patch("reflector.worker.cleanup.settings") as mock_settings:
|
||||
mock_settings.PUBLIC_MODE = True
|
||||
mock_settings.PUBLIC_DATA_RETENTION_DAYS = 7
|
||||
|
||||
# Mock storage deletion
|
||||
with patch("reflector.db.transcripts.get_transcripts_storage") as mock_storage:
|
||||
with patch("reflector.worker.cleanup.get_recordings_storage") as mock_storage:
|
||||
mock_storage.return_value.delete_file = AsyncMock()
|
||||
with patch(
|
||||
"reflector.worker.cleanup.get_recordings_storage"
|
||||
) as mock_rec_storage:
|
||||
mock_rec_storage.return_value.delete_file = AsyncMock()
|
||||
|
||||
result = await cleanup_old_public_data()
|
||||
# Run cleanup with test session
|
||||
await cleanup_old_public_data(db_session)
|
||||
|
||||
# Check results
|
||||
assert result["transcripts_deleted"] == 1
|
||||
assert result["meetings_deleted"] == 1
|
||||
assert result["recordings_deleted"] == 1
|
||||
assert result["errors"] == []
|
||||
# Verify transcript was deleted
|
||||
result = await db_session.execute(
|
||||
select(TranscriptModel).where(TranscriptModel.id == old_transcript.id)
|
||||
)
|
||||
transcript = result.scalar_one_or_none()
|
||||
assert transcript is None
|
||||
|
||||
# Verify transcript was deleted
|
||||
assert await transcripts_controller.get_by_id(old_transcript.id) is None
|
||||
# Verify meeting was deleted
|
||||
result = await db_session.execute(
|
||||
select(MeetingModel).where(MeetingModel.id == meeting_id)
|
||||
)
|
||||
meeting = result.scalar_one_or_none()
|
||||
assert meeting is None
|
||||
|
||||
# Verify meeting was deleted
|
||||
query = meetings.select().where(meetings.c.id == meeting_id)
|
||||
meeting_result = await get_database().fetch_one(query)
|
||||
assert meeting_result is None
|
||||
|
||||
# Verify recording was deleted
|
||||
assert await recordings_controller.get_by_id(recording.id) is None
|
||||
# Verify recording was deleted
|
||||
result = await db_session.execute(
|
||||
select(RecordingModel).where(RecordingModel.id == recording_id)
|
||||
)
|
||||
recording = result.scalar_one_or_none()
|
||||
assert recording is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cleanup_handles_errors_gracefully():
|
||||
"""Test that cleanup continues even when individual deletions fail."""
|
||||
async def test_cleanup_handles_errors_gracefully(db_session):
|
||||
"""Test that cleanup continues even if individual deletions fail."""
|
||||
old_date = datetime.now(timezone.utc) - timedelta(days=8)
|
||||
|
||||
# Create multiple old transcripts
|
||||
transcript1 = await transcripts_controller.add(
|
||||
db_session,
|
||||
name="Transcript 1",
|
||||
source_kind=SourceKind.FILE,
|
||||
user_id=None,
|
||||
)
|
||||
await db_session.execute(
|
||||
update(TranscriptModel)
|
||||
.where(TranscriptModel.id == transcript1.id)
|
||||
.values(created_at=old_date)
|
||||
)
|
||||
|
||||
transcript2 = await transcripts_controller.add(
|
||||
db_session,
|
||||
name="Transcript 2",
|
||||
source_kind=SourceKind.FILE,
|
||||
user_id=None,
|
||||
)
|
||||
|
||||
# Update created_at to be old
|
||||
from reflector.db import get_database
|
||||
from reflector.db.transcripts import transcripts
|
||||
|
||||
for t_id in [transcript1.id, transcript2.id]:
|
||||
await get_database().execute(
|
||||
transcripts.update()
|
||||
.where(transcripts.c.id == t_id)
|
||||
.values(created_at=old_date)
|
||||
)
|
||||
await db_session.execute(
|
||||
update(TranscriptModel)
|
||||
.where(TranscriptModel.id == transcript2.id)
|
||||
.values(created_at=old_date)
|
||||
)
|
||||
await db_session.commit()
|
||||
|
||||
with patch("reflector.worker.cleanup.settings") as mock_settings:
|
||||
mock_settings.PUBLIC_MODE = True
|
||||
mock_settings.PUBLIC_DATA_RETENTION_DAYS = 7
|
||||
|
||||
# Mock remove_by_id to fail for the first transcript
|
||||
original_remove = transcripts_controller.remove_by_id
|
||||
call_count = 0
|
||||
# Mock delete_single_transcript to fail on first call but succeed on second
|
||||
with patch("reflector.worker.cleanup.delete_single_transcript") as mock_delete:
|
||||
mock_delete.side_effect = [Exception("Delete failed"), None]
|
||||
|
||||
async def mock_remove_by_id(transcript_id, user_id=None):
|
||||
nonlocal call_count
|
||||
call_count += 1
|
||||
if call_count == 1:
|
||||
raise Exception("Simulated deletion error")
|
||||
return await original_remove(transcript_id, user_id)
|
||||
# Run cleanup with test session - should not raise exception
|
||||
await cleanup_old_public_data(db_session)
|
||||
|
||||
with patch.object(
|
||||
transcripts_controller, "remove_by_id", side_effect=mock_remove_by_id
|
||||
):
|
||||
result = await cleanup_old_public_data()
|
||||
|
||||
# Should have one successful deletion and one error
|
||||
assert result["transcripts_deleted"] == 1
|
||||
assert len(result["errors"]) == 1
|
||||
assert "Failed to delete transcript" in result["errors"][0]
|
||||
# Both transcripts should have been attempted to delete
|
||||
assert mock_delete.call_count == 2
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_meeting_consent_cascade_delete():
|
||||
"""Test that meeting_consent records are automatically deleted when meeting is deleted."""
|
||||
from reflector.db import get_database
|
||||
from reflector.db.meetings import (
|
||||
meeting_consent,
|
||||
meeting_consent_controller,
|
||||
meetings,
|
||||
)
|
||||
async def test_meeting_consent_cascade_delete(db_session):
|
||||
"""Test that meeting_consent entries are cascade deleted with meetings."""
|
||||
old_date = datetime.now(timezone.utc) - timedelta(days=8)
|
||||
|
||||
# Create a meeting
|
||||
meeting_id = "test-cascade-meeting"
|
||||
await get_database().execute(
|
||||
meetings.insert().values(
|
||||
# Create an old transcript
|
||||
transcript = await transcripts_controller.add(
|
||||
db_session,
|
||||
name="Transcript with Meeting",
|
||||
source_kind=SourceKind.FILE,
|
||||
user_id=None,
|
||||
)
|
||||
await db_session.execute(
|
||||
update(TranscriptModel)
|
||||
.where(TranscriptModel.id == transcript.id)
|
||||
.values(created_at=old_date)
|
||||
)
|
||||
await db_session.commit()
|
||||
|
||||
# Create a meeting directly
|
||||
meeting_id = "test-meeting-consent"
|
||||
await db_session.execute(
|
||||
insert(MeetingModel).values(
|
||||
id=meeting_id,
|
||||
room_name="Test Meeting for CASCADE",
|
||||
room_url="https://example.com/cascade-test",
|
||||
host_room_url="https://example.com/cascade-test-host",
|
||||
start_date=datetime.now(timezone.utc),
|
||||
end_date=datetime.now(timezone.utc) + timedelta(hours=1),
|
||||
room_id=None,
|
||||
room_name="test-room",
|
||||
room_url="https://example.com/room",
|
||||
host_room_url="https://example.com/room-host",
|
||||
start_date=old_date,
|
||||
end_date=old_date + timedelta(hours=1),
|
||||
is_active=False,
|
||||
num_clients=0,
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic",
|
||||
)
|
||||
)
|
||||
await db_session.commit()
|
||||
|
||||
# Create consent records for this meeting
|
||||
consent1_id = "consent-1"
|
||||
consent2_id = "consent-2"
|
||||
# Update transcript with meeting_id
|
||||
await db_session.execute(
|
||||
update(TranscriptModel)
|
||||
.where(TranscriptModel.id == transcript.id)
|
||||
.values(meeting_id=meeting_id)
|
||||
)
|
||||
await db_session.commit()
|
||||
|
||||
await get_database().execute(
|
||||
meeting_consent.insert().values(
|
||||
id=consent1_id,
|
||||
# Create meeting_consent entries
|
||||
await db_session.execute(
|
||||
insert(MeetingConsentModel).values(
|
||||
id="consent-1",
|
||||
meeting_id=meeting_id,
|
||||
user_id="user1",
|
||||
user_id="user-1",
|
||||
consent_given=True,
|
||||
consent_timestamp=datetime.now(timezone.utc),
|
||||
consent_timestamp=old_date,
|
||||
)
|
||||
)
|
||||
|
||||
await get_database().execute(
|
||||
meeting_consent.insert().values(
|
||||
id=consent2_id,
|
||||
await db_session.execute(
|
||||
insert(MeetingConsentModel).values(
|
||||
id="consent-2",
|
||||
meeting_id=meeting_id,
|
||||
user_id="user2",
|
||||
consent_given=False,
|
||||
consent_timestamp=datetime.now(timezone.utc),
|
||||
user_id="user-2",
|
||||
consent_given=True,
|
||||
consent_timestamp=old_date,
|
||||
)
|
||||
)
|
||||
await db_session.commit()
|
||||
|
||||
# Verify consent records exist
|
||||
consents = await meeting_consent_controller.get_by_meeting_id(meeting_id)
|
||||
# Verify consent entries exist
|
||||
result = await db_session.execute(
|
||||
select(MeetingConsentModel).where(MeetingConsentModel.meeting_id == meeting_id)
|
||||
)
|
||||
consents = result.scalars().all()
|
||||
assert len(consents) == 2
|
||||
|
||||
# Delete the meeting
|
||||
await get_database().execute(meetings.delete().where(meetings.c.id == meeting_id))
|
||||
# Delete the transcript and meeting
|
||||
await db_session.execute(
|
||||
delete(TranscriptModel).where(TranscriptModel.id == transcript.id)
|
||||
)
|
||||
await db_session.execute(delete(MeetingModel).where(MeetingModel.id == meeting_id))
|
||||
await db_session.commit()
|
||||
|
||||
# Verify meeting is deleted
|
||||
query = meetings.select().where(meetings.c.id == meeting_id)
|
||||
result = await get_database().fetch_one(query)
|
||||
assert result is None
|
||||
|
||||
# Verify consent records are automatically deleted (CASCADE DELETE)
|
||||
consents_after = await meeting_consent_controller.get_by_meeting_id(meeting_id)
|
||||
assert len(consents_after) == 0
|
||||
# Verify consent entries were cascade deleted
|
||||
result = await db_session.execute(
|
||||
select(MeetingConsentModel).where(MeetingConsentModel.meeting_id == meeting_id)
|
||||
)
|
||||
consents = result.scalars().all()
|
||||
assert len(consents) == 0
|
||||
|
||||
251
server/tests/test_ics_background_tasks.py
Normal file
251
server/tests/test_ics_background_tasks.py
Normal file
@@ -0,0 +1,251 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from icalendar import Calendar, Event
|
||||
|
||||
from reflector.db.calendar_events import calendar_events_controller
|
||||
from reflector.db.rooms import rooms_controller
|
||||
from reflector.services.ics_sync import ics_sync_service
|
||||
from reflector.worker.ics_sync import (
|
||||
_should_sync,
|
||||
sync_room_ics,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_sync_room_ics_task(db_session):
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="task-test-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_url="https://calendar.example.com/task.ics",
|
||||
ics_enabled=True,
|
||||
)
|
||||
await db_session.flush()
|
||||
|
||||
cal = Calendar()
|
||||
event = Event()
|
||||
event.add("uid", "task-event-1")
|
||||
event.add("summary", "Task Test Meeting")
|
||||
from reflector.settings import settings
|
||||
|
||||
event.add("location", f"{settings.UI_BASE_URL}/{room.name}")
|
||||
now = datetime.now(timezone.utc)
|
||||
event.add("dtstart", now + timedelta(hours=1))
|
||||
event.add("dtend", now + timedelta(hours=2))
|
||||
cal.add_component(event)
|
||||
ics_content = cal.to_ical().decode("utf-8")
|
||||
|
||||
with patch(
|
||||
"reflector.services.ics_sync.ICSFetchService.fetch_ics",
|
||||
new_callable=AsyncMock,
|
||||
) as mock_fetch:
|
||||
mock_fetch.return_value = ics_content
|
||||
|
||||
await ics_sync_service.sync_room_calendar(db_session, room)
|
||||
|
||||
events = await calendar_events_controller.get_by_room(db_session, room.id)
|
||||
assert len(events) == 1
|
||||
assert events[0].ics_uid == "task-event-1"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_sync_room_ics_disabled(db_session):
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="disabled-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_enabled=False,
|
||||
)
|
||||
|
||||
result = await ics_sync_service.sync_room_calendar(db_session, room)
|
||||
|
||||
events = await calendar_events_controller.get_by_room(db_session, room.id)
|
||||
assert len(events) == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_sync_all_ics_calendars(db_session):
|
||||
room1 = await rooms_controller.add(
|
||||
db_session,
|
||||
name="sync-all-1",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_url="https://calendar.example.com/1.ics",
|
||||
ics_enabled=True,
|
||||
)
|
||||
|
||||
room2 = await rooms_controller.add(
|
||||
db_session,
|
||||
name="sync-all-2",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_url="https://calendar.example.com/2.ics",
|
||||
ics_enabled=True,
|
||||
)
|
||||
|
||||
room3 = await rooms_controller.add(
|
||||
db_session,
|
||||
name="sync-all-3",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_enabled=False,
|
||||
)
|
||||
|
||||
with patch("reflector.worker.ics_sync.sync_room_ics.delay") as mock_delay:
|
||||
ics_enabled_rooms = await rooms_controller.get_ics_enabled(db_session)
|
||||
|
||||
for room in ics_enabled_rooms:
|
||||
if room and _should_sync(room):
|
||||
sync_room_ics.delay(room.id)
|
||||
|
||||
assert mock_delay.call_count == 2
|
||||
called_room_ids = [call.args[0] for call in mock_delay.call_args_list]
|
||||
assert room1.id in called_room_ids
|
||||
assert room2.id in called_room_ids
|
||||
assert room3.id not in called_room_ids
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_should_sync_logic():
|
||||
room = MagicMock()
|
||||
|
||||
room.ics_last_sync = None
|
||||
assert _should_sync(room) is True
|
||||
|
||||
room.ics_last_sync = datetime.now(timezone.utc) - timedelta(seconds=100)
|
||||
room.ics_fetch_interval = 300
|
||||
assert _should_sync(room) is False
|
||||
|
||||
room.ics_last_sync = datetime.now(timezone.utc) - timedelta(seconds=400)
|
||||
room.ics_fetch_interval = 300
|
||||
assert _should_sync(room) is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_sync_respects_fetch_interval(db_session):
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
room1 = await rooms_controller.add(
|
||||
db_session,
|
||||
name="interval-test-1",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_url="https://calendar.example.com/interval.ics",
|
||||
ics_enabled=True,
|
||||
ics_fetch_interval=300,
|
||||
)
|
||||
|
||||
await rooms_controller.update(
|
||||
db_session,
|
||||
room1,
|
||||
{"ics_last_sync": now - timedelta(seconds=100)},
|
||||
)
|
||||
|
||||
room2 = await rooms_controller.add(
|
||||
db_session,
|
||||
name="interval-test-2",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_url="https://calendar.example.com/interval2.ics",
|
||||
ics_enabled=True,
|
||||
ics_fetch_interval=60,
|
||||
)
|
||||
|
||||
await rooms_controller.update(
|
||||
db_session,
|
||||
room2,
|
||||
{"ics_last_sync": now - timedelta(seconds=100)},
|
||||
)
|
||||
|
||||
with patch("reflector.worker.ics_sync.sync_room_ics.delay") as mock_delay:
|
||||
ics_enabled_rooms = await rooms_controller.get_ics_enabled(db_session)
|
||||
|
||||
for room in ics_enabled_rooms:
|
||||
if room and _should_sync(room):
|
||||
sync_room_ics.delay(room.id)
|
||||
|
||||
assert mock_delay.call_count == 1
|
||||
assert mock_delay.call_args[0][0] == room2.id
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_sync_handles_errors_gracefully(db_session):
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="error-task-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_url="https://calendar.example.com/error.ics",
|
||||
ics_enabled=True,
|
||||
)
|
||||
|
||||
with patch(
|
||||
"reflector.services.ics_sync.ICSFetchService.fetch_ics", new_callable=AsyncMock
|
||||
) as mock_fetch:
|
||||
mock_fetch.side_effect = Exception("Network error")
|
||||
|
||||
result = await ics_sync_service.sync_room_calendar(db_session, room)
|
||||
assert result["status"] == "error"
|
||||
|
||||
events = await calendar_events_controller.get_by_room(db_session, room.id)
|
||||
assert len(events) == 0
|
||||
296
server/tests/test_ics_sync.py
Normal file
296
server/tests/test_ics_sync.py
Normal file
@@ -0,0 +1,296 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from icalendar import Calendar, Event
|
||||
|
||||
from reflector.db.calendar_events import calendar_events_controller
|
||||
from reflector.db.rooms import rooms_controller
|
||||
from reflector.services.ics_sync import ICSFetchService, ICSSyncService
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_ics_fetch_service_event_matching():
|
||||
service = ICSFetchService()
|
||||
room_name = "test-room"
|
||||
room_url = "https://example.com/test-room"
|
||||
|
||||
# Create test event
|
||||
event = Event()
|
||||
event.add("uid", "test-123")
|
||||
event.add("summary", "Test Meeting")
|
||||
|
||||
# Test matching with full URL in location
|
||||
event.add("location", "https://example.com/test-room")
|
||||
assert service._event_matches_room(event, room_name, room_url) is True
|
||||
|
||||
# Test non-matching with URL without protocol (exact matching only now)
|
||||
event["location"] = "example.com/test-room"
|
||||
assert service._event_matches_room(event, room_name, room_url) is False
|
||||
|
||||
# Test matching in description
|
||||
event["location"] = "Conference Room A"
|
||||
event.add("description", f"Join at {room_url}")
|
||||
assert service._event_matches_room(event, room_name, room_url) is True
|
||||
|
||||
# Test non-matching
|
||||
event["location"] = "Different Room"
|
||||
event["description"] = "No room URL here"
|
||||
assert service._event_matches_room(event, room_name, room_url) is False
|
||||
|
||||
# Test partial paths should NOT match anymore
|
||||
event["location"] = "/test-room"
|
||||
assert service._event_matches_room(event, room_name, room_url) is False
|
||||
|
||||
event["location"] = f"Room: {room_name}"
|
||||
assert service._event_matches_room(event, room_name, room_url) is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_ics_fetch_service_parse_event():
|
||||
service = ICSFetchService()
|
||||
|
||||
# Create test event
|
||||
event = Event()
|
||||
event.add("uid", "test-456")
|
||||
event.add("summary", "Team Standup")
|
||||
event.add("description", "Daily team sync")
|
||||
event.add("location", "https://example.com/standup")
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
event.add("dtstart", now)
|
||||
event.add("dtend", now + timedelta(hours=1))
|
||||
|
||||
# Add attendees
|
||||
event.add("attendee", "mailto:alice@example.com", parameters={"CN": "Alice"})
|
||||
event.add("attendee", "mailto:bob@example.com", parameters={"CN": "Bob"})
|
||||
event.add("organizer", "mailto:carol@example.com", parameters={"CN": "Carol"})
|
||||
|
||||
# Parse event
|
||||
result = service._parse_event(event)
|
||||
|
||||
assert result is not None
|
||||
assert result["ics_uid"] == "test-456"
|
||||
assert result["title"] == "Team Standup"
|
||||
assert result["description"] == "Daily team sync"
|
||||
assert result["location"] == "https://example.com/standup"
|
||||
assert len(result["attendees"]) == 3 # 2 attendees + 1 organizer
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_ics_fetch_service_extract_room_events():
|
||||
service = ICSFetchService()
|
||||
room_name = "meeting"
|
||||
room_url = "https://example.com/meeting"
|
||||
|
||||
# Create calendar with multiple events
|
||||
cal = Calendar()
|
||||
|
||||
# Event 1: Matches room
|
||||
event1 = Event()
|
||||
event1.add("uid", "match-1")
|
||||
event1.add("summary", "Planning Meeting")
|
||||
event1.add("location", room_url)
|
||||
now = datetime.now(timezone.utc)
|
||||
event1.add("dtstart", now + timedelta(hours=2))
|
||||
event1.add("dtend", now + timedelta(hours=3))
|
||||
cal.add_component(event1)
|
||||
|
||||
# Event 2: Doesn't match room
|
||||
event2 = Event()
|
||||
event2.add("uid", "no-match")
|
||||
event2.add("summary", "Other Meeting")
|
||||
event2.add("location", "https://example.com/other")
|
||||
event2.add("dtstart", now + timedelta(hours=4))
|
||||
event2.add("dtend", now + timedelta(hours=5))
|
||||
cal.add_component(event2)
|
||||
|
||||
# Event 3: Matches room in description
|
||||
event3 = Event()
|
||||
event3.add("uid", "match-2")
|
||||
event3.add("summary", "Review Session")
|
||||
event3.add("description", f"Meeting link: {room_url}")
|
||||
event3.add("dtstart", now + timedelta(hours=6))
|
||||
event3.add("dtend", now + timedelta(hours=7))
|
||||
cal.add_component(event3)
|
||||
|
||||
# Event 4: Cancelled event (should be skipped)
|
||||
event4 = Event()
|
||||
event4.add("uid", "cancelled")
|
||||
event4.add("summary", "Cancelled Meeting")
|
||||
event4.add("location", room_url)
|
||||
event4.add("status", "CANCELLED")
|
||||
event4.add("dtstart", now + timedelta(hours=8))
|
||||
event4.add("dtend", now + timedelta(hours=9))
|
||||
cal.add_component(event4)
|
||||
|
||||
# Extract events
|
||||
events, total_events = service.extract_room_events(cal, room_name, room_url)
|
||||
|
||||
assert len(events) == 2
|
||||
assert total_events == 3 # 3 events in time window (excluding cancelled)
|
||||
assert events[0]["ics_uid"] == "match-1"
|
||||
assert events[1]["ics_uid"] == "match-2"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_ics_sync_service_sync_room_calendar(db_session):
|
||||
# Create room
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="sync-test",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_url="https://calendar.example.com/test.ics",
|
||||
ics_enabled=True,
|
||||
)
|
||||
await db_session.flush()
|
||||
|
||||
# Mock ICS content
|
||||
cal = Calendar()
|
||||
event = Event()
|
||||
event.add("uid", "sync-event-1")
|
||||
event.add("summary", "Sync Test Meeting")
|
||||
# Use the actual UI_BASE_URL from settings
|
||||
from reflector.settings import settings
|
||||
|
||||
event.add("location", f"{settings.UI_BASE_URL}/{room.name}")
|
||||
now = datetime.now(timezone.utc)
|
||||
event.add("dtstart", now + timedelta(hours=1))
|
||||
event.add("dtend", now + timedelta(hours=2))
|
||||
cal.add_component(event)
|
||||
ics_content = cal.to_ical().decode("utf-8")
|
||||
|
||||
# Create sync service and mock fetch
|
||||
sync_service = ICSSyncService()
|
||||
|
||||
with patch.object(
|
||||
sync_service.fetch_service, "fetch_ics", new_callable=AsyncMock
|
||||
) as mock_fetch:
|
||||
mock_fetch.return_value = ics_content
|
||||
|
||||
# First sync
|
||||
result = await sync_service.sync_room_calendar(db_session, room)
|
||||
|
||||
assert result["status"] == "success"
|
||||
assert result["events_found"] == 1
|
||||
assert result["events_created"] == 1
|
||||
assert result["events_updated"] == 0
|
||||
assert result["events_deleted"] == 0
|
||||
|
||||
# Verify event was created
|
||||
events = await calendar_events_controller.get_by_room(db_session, room.id)
|
||||
assert len(events) == 1
|
||||
assert events[0].ics_uid == "sync-event-1"
|
||||
assert events[0].title == "Sync Test Meeting"
|
||||
|
||||
# Second sync with same content (should be unchanged)
|
||||
# Refresh room to get updated etag and force sync by setting old sync time
|
||||
room = await rooms_controller.get_by_id(db_session, room.id)
|
||||
await rooms_controller.update(
|
||||
db_session,
|
||||
room,
|
||||
{"ics_last_sync": datetime.now(timezone.utc) - timedelta(minutes=10)},
|
||||
)
|
||||
result = await sync_service.sync_room_calendar(db_session, room)
|
||||
assert result["status"] == "unchanged"
|
||||
|
||||
# Third sync with updated event
|
||||
event["summary"] = "Updated Meeting Title"
|
||||
cal = Calendar()
|
||||
cal.add_component(event)
|
||||
ics_content = cal.to_ical().decode("utf-8")
|
||||
mock_fetch.return_value = ics_content
|
||||
|
||||
# Force sync by clearing etag
|
||||
await rooms_controller.update(db_session, room, {"ics_last_etag": None})
|
||||
|
||||
result = await sync_service.sync_room_calendar(db_session, room)
|
||||
assert result["status"] == "success"
|
||||
assert result["events_created"] == 0
|
||||
assert result["events_updated"] == 1
|
||||
|
||||
# Verify event was updated
|
||||
events = await calendar_events_controller.get_by_room(db_session, room.id)
|
||||
assert len(events) == 1
|
||||
assert events[0].title == "Updated Meeting Title"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_ics_sync_service_should_sync():
|
||||
service = ICSSyncService()
|
||||
|
||||
# Room never synced
|
||||
room = MagicMock()
|
||||
room.ics_last_sync = None
|
||||
room.ics_fetch_interval = 300
|
||||
assert service._should_sync(room) is True
|
||||
|
||||
# Room synced recently
|
||||
room.ics_last_sync = datetime.now(timezone.utc) - timedelta(seconds=100)
|
||||
assert service._should_sync(room) is False
|
||||
|
||||
# Room sync due
|
||||
room.ics_last_sync = datetime.now(timezone.utc) - timedelta(seconds=400)
|
||||
assert service._should_sync(room) is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_ics_sync_service_skip_disabled():
|
||||
service = ICSSyncService()
|
||||
|
||||
# Room with ICS disabled
|
||||
room = MagicMock()
|
||||
room.ics_enabled = False
|
||||
room.ics_url = "https://calendar.example.com/test.ics"
|
||||
|
||||
result = await service.sync_room_calendar(MagicMock(), room)
|
||||
assert result["status"] == "skipped"
|
||||
assert result["reason"] == "ICS not configured"
|
||||
|
||||
# Room without URL
|
||||
room.ics_enabled = True
|
||||
room.ics_url = None
|
||||
|
||||
result = await service.sync_room_calendar(MagicMock(), room)
|
||||
assert result["status"] == "skipped"
|
||||
assert result["reason"] == "ICS not configured"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_ics_sync_service_error_handling(db_session):
|
||||
# Create room
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="error-test",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_url="https://calendar.example.com/error.ics",
|
||||
ics_enabled=True,
|
||||
)
|
||||
await db_session.flush()
|
||||
|
||||
sync_service = ICSSyncService()
|
||||
|
||||
with patch.object(
|
||||
sync_service.fetch_service, "fetch_ics", new_callable=AsyncMock
|
||||
) as mock_fetch:
|
||||
mock_fetch.side_effect = Exception("Network error")
|
||||
|
||||
result = await sync_service.sync_room_calendar(db_session, room)
|
||||
assert result["status"] == "error"
|
||||
assert "Network error" in result["error"]
|
||||
63
server/tests/test_model_api_diarization.py
Normal file
63
server/tests/test_model_api_diarization.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""
|
||||
Tests for diarization Model API endpoint (self-hosted service compatible shape).
|
||||
|
||||
Marked with the "model_api" marker and skipped unless DIARIZATION_URL is provided.
|
||||
|
||||
Run with for local self-hosted server:
|
||||
DIARIZATION_API_KEY=dev-key \
|
||||
DIARIZATION_URL=http://localhost:8000 \
|
||||
uv run -m pytest -m model_api --no-cov tests/test_model_api_diarization.py
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
|
||||
# Public test audio file hosted on S3 specifically for reflector pytests
|
||||
TEST_AUDIO_URL = (
|
||||
"https://reflector-github-pytest.s3.us-east-1.amazonaws.com/test_mathieu_hello.mp3"
|
||||
)
|
||||
|
||||
|
||||
def get_modal_diarization_url():
|
||||
url = os.environ.get("DIARIZATION_URL")
|
||||
if not url:
|
||||
pytest.skip(
|
||||
"DIARIZATION_URL environment variable is required for Model API tests"
|
||||
)
|
||||
return url
|
||||
|
||||
|
||||
def get_auth_headers():
|
||||
api_key = os.environ.get("DIARIZATION_API_KEY") or os.environ.get(
|
||||
"REFLECTOR_GPU_APIKEY"
|
||||
)
|
||||
return {"Authorization": f"Bearer {api_key}"} if api_key else {}
|
||||
|
||||
|
||||
@pytest.mark.model_api
|
||||
class TestModelAPIDiarization:
|
||||
def test_diarize_from_url(self):
|
||||
url = get_modal_diarization_url()
|
||||
headers = get_auth_headers()
|
||||
|
||||
with httpx.Client(timeout=60.0) as client:
|
||||
response = client.post(
|
||||
f"{url}/diarize",
|
||||
params={"audio_file_url": TEST_AUDIO_URL, "timestamp": 0.0},
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
assert response.status_code == 200, f"Request failed: {response.text}"
|
||||
result = response.json()
|
||||
|
||||
assert "diarization" in result
|
||||
assert isinstance(result["diarization"], list)
|
||||
assert len(result["diarization"]) > 0
|
||||
|
||||
for seg in result["diarization"]:
|
||||
assert "start" in seg and "end" in seg and "speaker" in seg
|
||||
assert isinstance(seg["start"], (int, float))
|
||||
assert isinstance(seg["end"], (int, float))
|
||||
assert seg["start"] <= seg["end"]
|
||||
@@ -1,21 +1,21 @@
|
||||
"""
|
||||
Tests for GPU Modal transcription endpoints.
|
||||
Tests for transcription Model API endpoints.
|
||||
|
||||
These tests are marked with the "gpu-modal" group and will not run by default.
|
||||
Run them with: pytest -m gpu-modal tests/test_gpu_modal_transcript_parakeet.py
|
||||
These tests are marked with the "model_api" group and will not run by default.
|
||||
Run them with: pytest -m model_api tests/test_model_api_transcript.py
|
||||
|
||||
Required environment variables:
|
||||
- TRANSCRIPT_URL: URL to the Modal.com endpoint (required)
|
||||
- TRANSCRIPT_MODAL_API_KEY: API key for authentication (optional)
|
||||
- TRANSCRIPT_URL: URL to the Model API endpoint (required)
|
||||
- TRANSCRIPT_API_KEY: API key for authentication (optional)
|
||||
- TRANSCRIPT_MODEL: Model name to use (optional, defaults to nvidia/parakeet-tdt-0.6b-v2)
|
||||
|
||||
Example with pytest (override default addopts to run ONLY gpu_modal tests):
|
||||
Example with pytest (override default addopts to run ONLY model_api tests):
|
||||
TRANSCRIPT_URL=https://monadical-sas--reflector-transcriber-parakeet-web-dev.modal.run \
|
||||
TRANSCRIPT_MODAL_API_KEY=your-api-key \
|
||||
uv run -m pytest -m gpu_modal --no-cov tests/test_gpu_modal_transcript.py
|
||||
TRANSCRIPT_API_KEY=your-api-key \
|
||||
uv run -m pytest -m model_api --no-cov tests/test_model_api_transcript.py
|
||||
|
||||
# Or with completely clean options:
|
||||
uv run -m pytest -m gpu_modal -o addopts="" tests/
|
||||
uv run -m pytest -m model_api -o addopts="" tests/
|
||||
|
||||
Running Modal locally for testing:
|
||||
modal serve gpu/modal_deployments/reflector_transcriber_parakeet.py
|
||||
@@ -40,14 +40,16 @@ def get_modal_transcript_url():
|
||||
url = os.environ.get("TRANSCRIPT_URL")
|
||||
if not url:
|
||||
pytest.skip(
|
||||
"TRANSCRIPT_URL environment variable is required for GPU Modal tests"
|
||||
"TRANSCRIPT_URL environment variable is required for Model API tests"
|
||||
)
|
||||
return url
|
||||
|
||||
|
||||
def get_auth_headers():
|
||||
"""Get authentication headers if API key is available."""
|
||||
api_key = os.environ.get("TRANSCRIPT_MODAL_API_KEY")
|
||||
api_key = os.environ.get("TRANSCRIPT_API_KEY") or os.environ.get(
|
||||
"REFLECTOR_GPU_APIKEY"
|
||||
)
|
||||
if api_key:
|
||||
return {"Authorization": f"Bearer {api_key}"}
|
||||
return {}
|
||||
@@ -58,8 +60,8 @@ def get_model_name():
|
||||
return os.environ.get("TRANSCRIPT_MODEL", "nvidia/parakeet-tdt-0.6b-v2")
|
||||
|
||||
|
||||
@pytest.mark.gpu_modal
|
||||
class TestGPUModalTranscript:
|
||||
@pytest.mark.model_api
|
||||
class TestModelAPITranscript:
|
||||
"""Test suite for GPU Modal transcription endpoints."""
|
||||
|
||||
def test_transcriptions_from_url(self):
|
||||
56
server/tests/test_model_api_translation.py
Normal file
56
server/tests/test_model_api_translation.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""
|
||||
Tests for translation Model API endpoint (self-hosted service compatible shape).
|
||||
|
||||
Marked with the "model_api" marker and skipped unless TRANSLATION_URL is provided
|
||||
or we fallback to TRANSCRIPT_URL base (same host for self-hosted).
|
||||
|
||||
Run locally against self-hosted server:
|
||||
TRANSLATION_API_KEY=dev-key \
|
||||
TRANSLATION_URL=http://localhost:8000 \
|
||||
uv run -m pytest -m model_api --no-cov tests/test_model_api_translation.py
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
|
||||
|
||||
def get_translation_url():
|
||||
url = os.environ.get("TRANSLATION_URL") or os.environ.get("TRANSCRIPT_URL")
|
||||
if not url:
|
||||
pytest.skip(
|
||||
"TRANSLATION_URL or TRANSCRIPT_URL environment variable is required for Model API tests"
|
||||
)
|
||||
return url
|
||||
|
||||
|
||||
def get_auth_headers():
|
||||
api_key = os.environ.get("TRANSLATION_API_KEY") or os.environ.get(
|
||||
"REFLECTOR_GPU_APIKEY"
|
||||
)
|
||||
return {"Authorization": f"Bearer {api_key}"} if api_key else {}
|
||||
|
||||
|
||||
@pytest.mark.model_api
|
||||
class TestModelAPITranslation:
|
||||
def test_translate_text(self):
|
||||
url = get_translation_url()
|
||||
headers = get_auth_headers()
|
||||
|
||||
with httpx.Client(timeout=60.0) as client:
|
||||
response = client.post(
|
||||
f"{url}/translate",
|
||||
params={"text": "The meeting will start in five minutes."},
|
||||
json={"source_language": "en", "target_language": "fr"},
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
assert response.status_code == 200, f"Request failed: {response.text}"
|
||||
data = response.json()
|
||||
|
||||
assert "text" in data and isinstance(data["text"], dict)
|
||||
assert data["text"].get("en") == "The meeting will start in five minutes."
|
||||
assert isinstance(data["text"].get("fr", ""), str)
|
||||
assert len(data["text"]["fr"]) > 0
|
||||
assert data["text"]["fr"] == "La réunion commencera dans cinq minutes."
|
||||
176
server/tests/test_multiple_active_meetings.py
Normal file
176
server/tests/test_multiple_active_meetings.py
Normal file
@@ -0,0 +1,176 @@
|
||||
"""Tests for multiple active meetings per room functionality."""
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
import pytest
|
||||
|
||||
from reflector.db.calendar_events import CalendarEvent, calendar_events_controller
|
||||
from reflector.db.meetings import meetings_controller
|
||||
from reflector.db.rooms import rooms_controller
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_multiple_active_meetings_per_room(db_session):
|
||||
"""Test that multiple active meetings can exist for the same room."""
|
||||
# Create a room
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="test-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
)
|
||||
|
||||
current_time = datetime.now(timezone.utc)
|
||||
end_time = current_time + timedelta(hours=2)
|
||||
|
||||
# Create first meeting
|
||||
meeting1 = await meetings_controller.create(
|
||||
db_session,
|
||||
id="meeting-1",
|
||||
room_name="test-meeting-1",
|
||||
room_url="https://whereby.com/test-1",
|
||||
host_room_url="https://whereby.com/test-1-host",
|
||||
start_date=current_time,
|
||||
end_date=end_time,
|
||||
room=room,
|
||||
)
|
||||
|
||||
# Create second meeting for the same room (should succeed now)
|
||||
meeting2 = await meetings_controller.create(
|
||||
db_session,
|
||||
id="meeting-2",
|
||||
room_name="test-meeting-2",
|
||||
room_url="https://whereby.com/test-2",
|
||||
host_room_url="https://whereby.com/test-2-host",
|
||||
start_date=current_time,
|
||||
end_date=end_time,
|
||||
room=room,
|
||||
)
|
||||
|
||||
# Both meetings should be active
|
||||
active_meetings = await meetings_controller.get_all_active_for_room(
|
||||
db_session, room=room, current_time=current_time
|
||||
)
|
||||
|
||||
assert len(active_meetings) == 2
|
||||
assert meeting1.id in [m.id for m in active_meetings]
|
||||
assert meeting2.id in [m.id for m in active_meetings]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_active_by_calendar_event(db_session):
|
||||
"""Test getting active meeting by calendar event ID."""
|
||||
# Create a room
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="test-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
)
|
||||
|
||||
# Create a calendar event
|
||||
event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="test-event-uid",
|
||||
title="Test Meeting",
|
||||
start_time=datetime.now(timezone.utc),
|
||||
end_time=datetime.now(timezone.utc) + timedelta(hours=1),
|
||||
)
|
||||
event = await calendar_events_controller.upsert(db_session, event)
|
||||
|
||||
current_time = datetime.now(timezone.utc)
|
||||
end_time = current_time + timedelta(hours=2)
|
||||
|
||||
# Create meeting linked to calendar event
|
||||
meeting = await meetings_controller.create(
|
||||
db_session,
|
||||
id="meeting-cal-1",
|
||||
room_name="test-meeting-cal",
|
||||
room_url="https://whereby.com/test-cal",
|
||||
host_room_url="https://whereby.com/test-cal-host",
|
||||
start_date=current_time,
|
||||
end_date=end_time,
|
||||
room=room,
|
||||
calendar_event_id=event.id,
|
||||
calendar_metadata={"title": event.title},
|
||||
)
|
||||
|
||||
# Should find the meeting by calendar event
|
||||
found_meeting = await meetings_controller.get_active_by_calendar_event(
|
||||
db_session, room=room, calendar_event_id=event.id, current_time=current_time
|
||||
)
|
||||
|
||||
assert found_meeting is not None
|
||||
assert found_meeting.id == meeting.id
|
||||
assert found_meeting.calendar_event_id == event.id
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_calendar_meeting_deactivates_after_scheduled_end(db_session):
|
||||
"""Test that unused calendar meetings deactivate after scheduled end time."""
|
||||
# Create a room
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="test-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
)
|
||||
|
||||
# Create a calendar event that ended 35 minutes ago
|
||||
event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="test-event-unused",
|
||||
title="Test Meeting Unused",
|
||||
start_time=datetime.now(timezone.utc) - timedelta(hours=2),
|
||||
end_time=datetime.now(timezone.utc) - timedelta(minutes=35),
|
||||
)
|
||||
event = await calendar_events_controller.upsert(db_session, event)
|
||||
|
||||
current_time = datetime.now(timezone.utc)
|
||||
|
||||
# Create meeting linked to calendar event
|
||||
meeting = await meetings_controller.create(
|
||||
db_session,
|
||||
id="meeting-unused",
|
||||
room_name="test-meeting-unused",
|
||||
room_url="https://whereby.com/test-unused",
|
||||
host_room_url="https://whereby.com/test-unused-host",
|
||||
start_date=event.start_time,
|
||||
end_date=event.end_time,
|
||||
room=room,
|
||||
calendar_event_id=event.id,
|
||||
)
|
||||
|
||||
# Test the new logic: unused calendar meetings deactivate after scheduled end
|
||||
# The meeting ended 35 minutes ago and was never used, so it should be deactivated
|
||||
|
||||
# Simulate process_meetings logic for unused calendar meeting past end time
|
||||
if meeting.calendar_event_id and current_time > meeting.end_date:
|
||||
# In real code, we'd check has_had_sessions = False here
|
||||
await meetings_controller.update_meeting(
|
||||
db_session, meeting.id, is_active=False
|
||||
)
|
||||
|
||||
updated_meeting = await meetings_controller.get_by_id(db_session, meeting.id)
|
||||
assert updated_meeting.is_active is False # Deactivated after scheduled end
|
||||
@@ -101,21 +101,37 @@ async def mock_transcript_in_db(tmpdir):
|
||||
target_language="en",
|
||||
)
|
||||
|
||||
# Mock the controller to return our transcript
|
||||
# Mock all transcripts controller methods that are used in the pipeline
|
||||
try:
|
||||
with patch(
|
||||
"reflector.pipelines.main_file_pipeline.transcripts_controller.get_by_id"
|
||||
) as mock_get:
|
||||
mock_get.return_value = transcript
|
||||
with patch(
|
||||
"reflector.pipelines.main_live_pipeline.transcripts_controller.get_by_id"
|
||||
) as mock_get2:
|
||||
mock_get2.return_value = transcript
|
||||
"reflector.pipelines.main_file_pipeline.transcripts_controller.update"
|
||||
) as mock_update:
|
||||
mock_update.return_value = transcript
|
||||
with patch(
|
||||
"reflector.pipelines.main_live_pipeline.transcripts_controller.update"
|
||||
) as mock_update:
|
||||
mock_update.return_value = None
|
||||
yield transcript
|
||||
"reflector.pipelines.main_file_pipeline.transcripts_controller.set_status"
|
||||
) as mock_set_status:
|
||||
mock_set_status.return_value = None
|
||||
with patch(
|
||||
"reflector.pipelines.main_file_pipeline.transcripts_controller.upsert_topic"
|
||||
) as mock_upsert_topic:
|
||||
mock_upsert_topic.return_value = None
|
||||
with patch(
|
||||
"reflector.pipelines.main_file_pipeline.transcripts_controller.append_event"
|
||||
) as mock_append_event:
|
||||
mock_append_event.return_value = None
|
||||
with patch(
|
||||
"reflector.pipelines.main_live_pipeline.transcripts_controller.get_by_id"
|
||||
) as mock_get2:
|
||||
mock_get2.return_value = transcript
|
||||
with patch(
|
||||
"reflector.pipelines.main_live_pipeline.transcripts_controller.update"
|
||||
) as mock_update2:
|
||||
mock_update2.return_value = None
|
||||
yield transcript
|
||||
finally:
|
||||
# Restore original DATA_DIR
|
||||
settings.DATA_DIR = original_data_dir
|
||||
@@ -608,7 +624,11 @@ async def test_pipeline_file_process_no_transcript():
|
||||
|
||||
# Should raise an exception for missing transcript when get_transcript is called
|
||||
with pytest.raises(Exception, match="Transcript not found"):
|
||||
await pipeline.get_transcript()
|
||||
# Use a mock session - the controller is mocked to return None anyway
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
mock_session = MagicMock()
|
||||
await pipeline.get_transcript(mock_session)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
||||
235
server/tests/test_room_ics.py
Normal file
235
server/tests/test_room_ics.py
Normal file
@@ -0,0 +1,235 @@
|
||||
"""
|
||||
Tests for Room model ICS calendar integration fields.
|
||||
"""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import pytest
|
||||
|
||||
from reflector.db.rooms import rooms_controller
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_room_create_with_ics_fields(db_session):
|
||||
"""Test creating a room with ICS calendar fields."""
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="test-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_url="https://calendar.google.com/calendar/ical/test/private-token/basic.ics",
|
||||
ics_fetch_interval=600,
|
||||
ics_enabled=True,
|
||||
)
|
||||
|
||||
assert room.name == "test-room"
|
||||
assert (
|
||||
room.ics_url
|
||||
== "https://calendar.google.com/calendar/ical/test/private-token/basic.ics"
|
||||
)
|
||||
assert room.ics_fetch_interval == 600
|
||||
assert room.ics_enabled is True
|
||||
assert room.ics_last_sync is None
|
||||
assert room.ics_last_etag is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_room_update_ics_configuration(db_session):
|
||||
"""Test updating room ICS configuration."""
|
||||
# Create room without ICS
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="update-test",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
)
|
||||
|
||||
assert room.ics_enabled is False
|
||||
assert room.ics_url is None
|
||||
|
||||
# Update with ICS configuration
|
||||
await rooms_controller.update(
|
||||
db_session,
|
||||
room,
|
||||
{
|
||||
"ics_url": "https://outlook.office365.com/owa/calendar/test/calendar.ics",
|
||||
"ics_fetch_interval": 300,
|
||||
"ics_enabled": True,
|
||||
},
|
||||
)
|
||||
|
||||
assert (
|
||||
room.ics_url == "https://outlook.office365.com/owa/calendar/test/calendar.ics"
|
||||
)
|
||||
assert room.ics_fetch_interval == 300
|
||||
assert room.ics_enabled is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_room_ics_sync_metadata(db_session):
|
||||
"""Test updating room ICS sync metadata."""
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="sync-test",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_url="https://example.com/calendar.ics",
|
||||
ics_enabled=True,
|
||||
)
|
||||
|
||||
# Update sync metadata
|
||||
sync_time = datetime.now(timezone.utc)
|
||||
await rooms_controller.update(
|
||||
db_session,
|
||||
room,
|
||||
{
|
||||
"ics_last_sync": sync_time,
|
||||
"ics_last_etag": "abc123hash",
|
||||
},
|
||||
)
|
||||
|
||||
assert room.ics_last_sync == sync_time
|
||||
assert room.ics_last_etag == "abc123hash"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_room_get_with_ics_fields(db_session):
|
||||
"""Test retrieving room with ICS fields."""
|
||||
# Create room
|
||||
created_room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="get-test",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_url="webcal://calendar.example.com/feed.ics",
|
||||
ics_fetch_interval=900,
|
||||
ics_enabled=True,
|
||||
)
|
||||
|
||||
# Get by ID
|
||||
room = await rooms_controller.get_by_id(db_session, created_room.id)
|
||||
assert room is not None
|
||||
assert room.ics_url == "webcal://calendar.example.com/feed.ics"
|
||||
assert room.ics_fetch_interval == 900
|
||||
assert room.ics_enabled is True
|
||||
|
||||
# Get by name
|
||||
room = await rooms_controller.get_by_name(db_session, "get-test")
|
||||
assert room is not None
|
||||
assert room.ics_url == "webcal://calendar.example.com/feed.ics"
|
||||
assert room.ics_fetch_interval == 900
|
||||
assert room.ics_enabled is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_room_list_with_ics_enabled_filter(db_session):
|
||||
"""Test listing rooms filtered by ICS enabled status."""
|
||||
# Create rooms with and without ICS
|
||||
room1 = await rooms_controller.add(
|
||||
db_session,
|
||||
name="ics-enabled-1",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=True,
|
||||
ics_enabled=True,
|
||||
ics_url="https://calendar1.example.com/feed.ics",
|
||||
)
|
||||
|
||||
room2 = await rooms_controller.add(
|
||||
db_session,
|
||||
name="ics-disabled",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=True,
|
||||
ics_enabled=False,
|
||||
)
|
||||
|
||||
room3 = await rooms_controller.add(
|
||||
db_session,
|
||||
name="ics-enabled-2",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=True,
|
||||
ics_enabled=True,
|
||||
ics_url="https://calendar2.example.com/feed.ics",
|
||||
)
|
||||
|
||||
# Get all rooms
|
||||
all_rooms = await rooms_controller.get_all(db_session)
|
||||
assert len(all_rooms) == 3
|
||||
|
||||
# Filter for ICS-enabled rooms (would need to implement this in controller)
|
||||
ics_rooms = [r for r in all_rooms if r.ics_enabled]
|
||||
assert len(ics_rooms) == 2
|
||||
assert all(r.ics_enabled for r in ics_rooms)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_room_default_ics_values(db_session):
|
||||
"""Test that ICS fields have correct default values."""
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="default-test",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
# Don't specify ICS fields
|
||||
)
|
||||
|
||||
assert room.ics_url is None
|
||||
assert room.ics_fetch_interval == 300 # Default 5 minutes
|
||||
assert room.ics_enabled is False
|
||||
assert room.ics_last_sync is None
|
||||
assert room.ics_last_etag is None
|
||||
399
server/tests/test_room_ics_api.py
Normal file
399
server/tests/test_room_ics_api.py
Normal file
@@ -0,0 +1,399 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
from icalendar import Calendar, Event
|
||||
|
||||
from reflector.db.calendar_events import CalendarEvent, calendar_events_controller
|
||||
from reflector.db.rooms import rooms_controller
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def authenticated_client(client):
|
||||
from reflector.app import app
|
||||
from reflector.auth import current_user_optional
|
||||
|
||||
app.dependency_overrides[current_user_optional] = lambda: {
|
||||
"sub": "test-user",
|
||||
"email": "test@example.com",
|
||||
}
|
||||
yield client
|
||||
del app.dependency_overrides[current_user_optional]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_room_with_ics_fields(authenticated_client):
|
||||
client = authenticated_client
|
||||
response = await client.post(
|
||||
"/rooms",
|
||||
json={
|
||||
"name": "test-ics-room",
|
||||
"zulip_auto_post": False,
|
||||
"zulip_stream": "",
|
||||
"zulip_topic": "",
|
||||
"is_locked": False,
|
||||
"room_mode": "normal",
|
||||
"recording_type": "cloud",
|
||||
"recording_trigger": "automatic-2nd-participant",
|
||||
"is_shared": False,
|
||||
"webhook_url": "",
|
||||
"webhook_secret": "",
|
||||
"ics_url": "https://calendar.example.com/test.ics",
|
||||
"ics_fetch_interval": 600,
|
||||
"ics_enabled": True,
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["name"] == "test-ics-room"
|
||||
assert data["ics_url"] == "https://calendar.example.com/test.ics"
|
||||
assert data["ics_fetch_interval"] == 600
|
||||
assert data["ics_enabled"] is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_room_ics_configuration(authenticated_client):
|
||||
client = authenticated_client
|
||||
response = await client.post(
|
||||
"/rooms",
|
||||
json={
|
||||
"name": "update-ics-room",
|
||||
"zulip_auto_post": False,
|
||||
"zulip_stream": "",
|
||||
"zulip_topic": "",
|
||||
"is_locked": False,
|
||||
"room_mode": "normal",
|
||||
"recording_type": "cloud",
|
||||
"recording_trigger": "automatic-2nd-participant",
|
||||
"is_shared": False,
|
||||
"webhook_url": "",
|
||||
"webhook_secret": "",
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
room_id = response.json()["id"]
|
||||
|
||||
response = await client.patch(
|
||||
f"/rooms/{room_id}",
|
||||
json={
|
||||
"ics_url": "https://calendar.google.com/updated.ics",
|
||||
"ics_fetch_interval": 300,
|
||||
"ics_enabled": True,
|
||||
},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["ics_url"] == "https://calendar.google.com/updated.ics"
|
||||
assert data["ics_fetch_interval"] == 300
|
||||
assert data["ics_enabled"] is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_trigger_ics_sync(authenticated_client, db_session):
|
||||
client = authenticated_client
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="sync-api-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_url="https://calendar.example.com/api.ics",
|
||||
ics_enabled=True,
|
||||
)
|
||||
|
||||
cal = Calendar()
|
||||
event = Event()
|
||||
event.add("uid", "api-test-event")
|
||||
event.add("summary", "API Test Meeting")
|
||||
from reflector.settings import settings
|
||||
|
||||
event.add("location", f"{settings.UI_BASE_URL}/{room.name}")
|
||||
now = datetime.now(timezone.utc)
|
||||
event.add("dtstart", now + timedelta(hours=1))
|
||||
event.add("dtend", now + timedelta(hours=2))
|
||||
cal.add_component(event)
|
||||
ics_content = cal.to_ical().decode("utf-8")
|
||||
|
||||
with patch(
|
||||
"reflector.services.ics_sync.ICSFetchService.fetch_ics", new_callable=AsyncMock
|
||||
) as mock_fetch:
|
||||
mock_fetch.return_value = ics_content
|
||||
|
||||
response = await client.post(f"/rooms/{room.name}/ics/sync")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["status"] == "success"
|
||||
assert data["events_found"] == 1
|
||||
assert data["events_created"] == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_trigger_ics_sync_unauthorized(client, db_session):
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="sync-unauth-room",
|
||||
user_id="owner-123",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_url="https://calendar.example.com/api.ics",
|
||||
ics_enabled=True,
|
||||
)
|
||||
|
||||
response = await client.post(f"/rooms/{room.name}/ics/sync")
|
||||
assert response.status_code == 403
|
||||
assert "Only room owner can trigger ICS sync" in response.json()["detail"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_trigger_ics_sync_not_configured(authenticated_client, db_session):
|
||||
client = authenticated_client
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="sync-not-configured",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_enabled=False,
|
||||
)
|
||||
|
||||
response = await client.post(f"/rooms/{room.name}/ics/sync")
|
||||
assert response.status_code == 400
|
||||
assert "ICS not configured" in response.json()["detail"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_ics_status(authenticated_client, db_session):
|
||||
client = authenticated_client
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="status-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_url="https://calendar.example.com/status.ics",
|
||||
ics_enabled=True,
|
||||
ics_fetch_interval=300,
|
||||
)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
await rooms_controller.update(
|
||||
db_session,
|
||||
room,
|
||||
{"ics_last_sync": now, "ics_last_etag": "test-etag"},
|
||||
)
|
||||
|
||||
response = await client.get(f"/rooms/{room.name}/ics/status")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["status"] == "enabled"
|
||||
assert data["last_etag"] == "test-etag"
|
||||
assert data["events_count"] == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_ics_status_unauthorized(client, db_session):
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="status-unauth",
|
||||
user_id="owner-456",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
ics_url="https://calendar.example.com/status.ics",
|
||||
ics_enabled=True,
|
||||
)
|
||||
|
||||
response = await client.get(f"/rooms/{room.name}/ics/status")
|
||||
assert response.status_code == 403
|
||||
assert "Only room owner can view ICS status" in response.json()["detail"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_room_meetings(authenticated_client, db_session):
|
||||
client = authenticated_client
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="meetings-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
event1 = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="meeting-1",
|
||||
title="Past Meeting",
|
||||
start_time=now - timedelta(hours=2),
|
||||
end_time=now - timedelta(hours=1),
|
||||
)
|
||||
await calendar_events_controller.upsert(db_session, event1)
|
||||
|
||||
event2 = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="meeting-2",
|
||||
title="Future Meeting",
|
||||
description="Team sync",
|
||||
start_time=now + timedelta(hours=1),
|
||||
end_time=now + timedelta(hours=2),
|
||||
attendees=[{"email": "test@example.com"}],
|
||||
)
|
||||
await calendar_events_controller.upsert(db_session, event2)
|
||||
|
||||
response = await client.get(f"/rooms/{room.name}/meetings")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert len(data) == 2
|
||||
assert data[0]["title"] == "Past Meeting"
|
||||
assert data[1]["title"] == "Future Meeting"
|
||||
assert data[1]["description"] == "Team sync"
|
||||
assert data[1]["attendees"] == [{"email": "test@example.com"}]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_room_meetings_non_owner(client, db_session):
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="meetings-privacy",
|
||||
user_id="owner-789",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
)
|
||||
|
||||
event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="private-meeting",
|
||||
title="Meeting Title",
|
||||
description="Sensitive info",
|
||||
start_time=datetime.now(timezone.utc) + timedelta(hours=1),
|
||||
end_time=datetime.now(timezone.utc) + timedelta(hours=2),
|
||||
attendees=[{"email": "private@example.com"}],
|
||||
)
|
||||
await calendar_events_controller.upsert(db_session, event)
|
||||
|
||||
response = await client.get(f"/rooms/{room.name}/meetings")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert len(data) == 1
|
||||
assert data[0]["title"] == "Meeting Title"
|
||||
assert data[0]["description"] is None
|
||||
assert data[0]["attendees"] is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_upcoming_meetings(authenticated_client, db_session):
|
||||
client = authenticated_client
|
||||
room = await rooms_controller.add(
|
||||
db_session,
|
||||
name="upcoming-room",
|
||||
user_id="test-user",
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic-2nd-participant",
|
||||
is_shared=False,
|
||||
)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
past_event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="past",
|
||||
title="Past",
|
||||
start_time=now - timedelta(hours=1),
|
||||
end_time=now - timedelta(minutes=30),
|
||||
)
|
||||
await calendar_events_controller.upsert(db_session, past_event)
|
||||
|
||||
soon_event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="soon",
|
||||
title="Soon",
|
||||
start_time=now + timedelta(minutes=15),
|
||||
end_time=now + timedelta(minutes=45),
|
||||
)
|
||||
await calendar_events_controller.upsert(db_session, soon_event)
|
||||
|
||||
later_event = CalendarEvent(
|
||||
room_id=room.id,
|
||||
ics_uid="later",
|
||||
title="Later",
|
||||
start_time=now + timedelta(hours=2),
|
||||
end_time=now + timedelta(hours=3),
|
||||
)
|
||||
await calendar_events_controller.upsert(db_session, later_event)
|
||||
|
||||
response = await client.get(f"/rooms/{room.name}/meetings/upcoming")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert len(data) == 2
|
||||
assert data[0]["title"] == "Soon"
|
||||
assert data[1]["title"] == "Later"
|
||||
|
||||
response = await client.get(
|
||||
f"/rooms/{room.name}/meetings/upcoming", params={"minutes_ahead": 180}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert len(data) == 2
|
||||
assert data[0]["title"] == "Soon"
|
||||
assert data[1]["title"] == "Later"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_room_not_found_endpoints(client):
|
||||
response = await client.post("/rooms/nonexistent/ics/sync")
|
||||
assert response.status_code == 404
|
||||
|
||||
response = await client.get("/rooms/nonexistent/ics/status")
|
||||
assert response.status_code == 404
|
||||
|
||||
response = await client.get("/rooms/nonexistent/meetings")
|
||||
assert response.status_code == 404
|
||||
|
||||
response = await client.get("/rooms/nonexistent/meetings/upcoming")
|
||||
assert response.status_code == 404
|
||||
@@ -2,40 +2,40 @@
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import delete, insert
|
||||
|
||||
from reflector.db import get_database
|
||||
from reflector.db.base import TranscriptModel
|
||||
from reflector.db.search import (
|
||||
SearchController,
|
||||
SearchParameters,
|
||||
SearchResult,
|
||||
search_controller,
|
||||
)
|
||||
from reflector.db.transcripts import SourceKind, transcripts
|
||||
from reflector.db.transcripts import SourceKind
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_search_postgresql_only():
|
||||
async def test_search_postgresql_only(db_session):
|
||||
params = SearchParameters(query_text="any query here")
|
||||
results, total = await search_controller.search_transcripts(params)
|
||||
results, total = await search_controller.search_transcripts(db_session, params)
|
||||
assert results == []
|
||||
assert total == 0
|
||||
|
||||
params_empty = SearchParameters(query_text=None)
|
||||
results_empty, total_empty = await search_controller.search_transcripts(
|
||||
params_empty
|
||||
db_session, params_empty
|
||||
)
|
||||
assert isinstance(results_empty, list)
|
||||
assert isinstance(total_empty, int)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_search_with_empty_query():
|
||||
async def test_search_with_empty_query(db_session):
|
||||
"""Test that empty query returns all transcripts."""
|
||||
params = SearchParameters(query_text=None)
|
||||
results, total = await search_controller.search_transcripts(params)
|
||||
results, total = await search_controller.search_transcripts(db_session, params)
|
||||
|
||||
assert isinstance(results, list)
|
||||
assert isinstance(total, int)
|
||||
@@ -45,13 +45,13 @@ async def test_search_with_empty_query():
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_empty_transcript_title_only_match():
|
||||
async def test_empty_transcript_title_only_match(db_session):
|
||||
"""Test that transcripts with title-only matches return empty snippets."""
|
||||
test_id = "test-empty-9b3f2a8d"
|
||||
|
||||
try:
|
||||
await get_database().execute(
|
||||
transcripts.delete().where(transcripts.c.id == test_id)
|
||||
await db_session.execute(
|
||||
delete(TranscriptModel).where(TranscriptModel.id == test_id)
|
||||
)
|
||||
|
||||
test_data = {
|
||||
@@ -77,10 +77,11 @@ async def test_empty_transcript_title_only_match():
|
||||
"user_id": "test-user-1",
|
||||
}
|
||||
|
||||
await get_database().execute(transcripts.insert().values(**test_data))
|
||||
await db_session.execute(insert(TranscriptModel).values(**test_data))
|
||||
await db_session.commit()
|
||||
|
||||
params = SearchParameters(query_text="empty", user_id="test-user-1")
|
||||
results, total = await search_controller.search_transcripts(params)
|
||||
results, total = await search_controller.search_transcripts(db_session, params)
|
||||
|
||||
assert total >= 1
|
||||
found = next((r for r in results if r.id == test_id), None)
|
||||
@@ -89,20 +90,20 @@ async def test_empty_transcript_title_only_match():
|
||||
assert found.total_match_count == 0
|
||||
|
||||
finally:
|
||||
await get_database().execute(
|
||||
transcripts.delete().where(transcripts.c.id == test_id)
|
||||
await db_session.execute(
|
||||
delete(TranscriptModel).where(TranscriptModel.id == test_id)
|
||||
)
|
||||
await get_database().disconnect()
|
||||
await db_session.commit()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_search_with_long_summary():
|
||||
async def test_search_with_long_summary(db_session):
|
||||
"""Test that long_summary content is searchable."""
|
||||
test_id = "test-long-summary-8a9f3c2d"
|
||||
|
||||
try:
|
||||
await get_database().execute(
|
||||
transcripts.delete().where(transcripts.c.id == test_id)
|
||||
await db_session.execute(
|
||||
delete(TranscriptModel).where(TranscriptModel.id == test_id)
|
||||
)
|
||||
|
||||
test_data = {
|
||||
@@ -131,10 +132,11 @@ Basic meeting content without special keywords.""",
|
||||
"user_id": "test-user-2",
|
||||
}
|
||||
|
||||
await get_database().execute(transcripts.insert().values(**test_data))
|
||||
await db_session.execute(insert(TranscriptModel).values(**test_data))
|
||||
await db_session.commit()
|
||||
|
||||
params = SearchParameters(query_text="quantum computing", user_id="test-user-2")
|
||||
results, total = await search_controller.search_transcripts(params)
|
||||
results, total = await search_controller.search_transcripts(db_session, params)
|
||||
|
||||
assert total >= 1
|
||||
found = any(r.id == test_id for r in results)
|
||||
@@ -146,19 +148,19 @@ Basic meeting content without special keywords.""",
|
||||
assert "quantum computing" in test_result.search_snippets[0].lower()
|
||||
|
||||
finally:
|
||||
await get_database().execute(
|
||||
transcripts.delete().where(transcripts.c.id == test_id)
|
||||
await db_session.execute(
|
||||
delete(TranscriptModel).where(TranscriptModel.id == test_id)
|
||||
)
|
||||
await get_database().disconnect()
|
||||
await db_session.commit()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_postgresql_search_with_data():
|
||||
async def test_postgresql_search_with_data(db_session):
|
||||
test_id = "test-search-e2e-7f3a9b2c"
|
||||
|
||||
try:
|
||||
await get_database().execute(
|
||||
transcripts.delete().where(transcripts.c.id == test_id)
|
||||
await db_session.execute(
|
||||
delete(TranscriptModel).where(TranscriptModel.id == test_id)
|
||||
)
|
||||
|
||||
test_data = {
|
||||
@@ -196,16 +198,17 @@ We need to implement PostgreSQL tsvector for better performance.""",
|
||||
"user_id": "test-user-3",
|
||||
}
|
||||
|
||||
await get_database().execute(transcripts.insert().values(**test_data))
|
||||
await db_session.execute(insert(TranscriptModel).values(**test_data))
|
||||
await db_session.commit()
|
||||
|
||||
params = SearchParameters(query_text="planning", user_id="test-user-3")
|
||||
results, total = await search_controller.search_transcripts(params)
|
||||
results, total = await search_controller.search_transcripts(db_session, params)
|
||||
assert total >= 1
|
||||
found = any(r.id == test_id for r in results)
|
||||
assert found, "Should find test transcript by title word"
|
||||
|
||||
params = SearchParameters(query_text="tsvector", user_id="test-user-3")
|
||||
results, total = await search_controller.search_transcripts(params)
|
||||
results, total = await search_controller.search_transcripts(db_session, params)
|
||||
assert total >= 1
|
||||
found = any(r.id == test_id for r in results)
|
||||
assert found, "Should find test transcript by webvtt content"
|
||||
@@ -213,7 +216,7 @@ We need to implement PostgreSQL tsvector for better performance.""",
|
||||
params = SearchParameters(
|
||||
query_text="engineering planning", user_id="test-user-3"
|
||||
)
|
||||
results, total = await search_controller.search_transcripts(params)
|
||||
results, total = await search_controller.search_transcripts(db_session, params)
|
||||
assert total >= 1
|
||||
found = any(r.id == test_id for r in results)
|
||||
assert found, "Should find test transcript by multiple words"
|
||||
@@ -228,7 +231,7 @@ We need to implement PostgreSQL tsvector for better performance.""",
|
||||
params = SearchParameters(
|
||||
query_text="tsvector OR nosuchword", user_id="test-user-3"
|
||||
)
|
||||
results, total = await search_controller.search_transcripts(params)
|
||||
results, total = await search_controller.search_transcripts(db_session, params)
|
||||
assert total >= 1
|
||||
found = any(r.id == test_id for r in results)
|
||||
assert found, "Should find test transcript with OR query"
|
||||
@@ -236,16 +239,16 @@ We need to implement PostgreSQL tsvector for better performance.""",
|
||||
params = SearchParameters(
|
||||
query_text='"full-text search"', user_id="test-user-3"
|
||||
)
|
||||
results, total = await search_controller.search_transcripts(params)
|
||||
results, total = await search_controller.search_transcripts(db_session, params)
|
||||
assert total >= 1
|
||||
found = any(r.id == test_id for r in results)
|
||||
assert found, "Should find test transcript by exact phrase"
|
||||
|
||||
finally:
|
||||
await get_database().execute(
|
||||
transcripts.delete().where(transcripts.c.id == test_id)
|
||||
await db_session.execute(
|
||||
delete(TranscriptModel).where(TranscriptModel.id == test_id)
|
||||
)
|
||||
await get_database().disconnect()
|
||||
await db_session.commit()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -311,87 +314,56 @@ class TestSearchControllerFilters:
|
||||
"""Test SearchController functionality with various filters."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_search_with_source_kind_filter(self):
|
||||
async def test_search_with_source_kind_filter(self, db_session):
|
||||
"""Test search filtering by source_kind."""
|
||||
controller = SearchController()
|
||||
with (
|
||||
patch("reflector.db.search.is_postgresql", return_value=True),
|
||||
patch("reflector.db.search.get_database") as mock_db,
|
||||
):
|
||||
mock_db.return_value.fetch_all = AsyncMock(return_value=[])
|
||||
mock_db.return_value.fetch_val = AsyncMock(return_value=0)
|
||||
params = SearchParameters(query_text="test", source_kind=SourceKind.LIVE)
|
||||
|
||||
params = SearchParameters(query_text="test", source_kind=SourceKind.LIVE)
|
||||
# This should not fail, even if no results are found
|
||||
results, total = await controller.search_transcripts(db_session, params)
|
||||
|
||||
results, total = await controller.search_transcripts(params)
|
||||
|
||||
assert results == []
|
||||
assert total == 0
|
||||
|
||||
mock_db.return_value.fetch_all.assert_called_once()
|
||||
assert isinstance(results, list)
|
||||
assert isinstance(total, int)
|
||||
assert total >= 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_search_with_single_room_id(self):
|
||||
async def test_search_with_single_room_id(self, db_session):
|
||||
"""Test search filtering by single room ID (currently supported)."""
|
||||
controller = SearchController()
|
||||
with (
|
||||
patch("reflector.db.search.is_postgresql", return_value=True),
|
||||
patch("reflector.db.search.get_database") as mock_db,
|
||||
):
|
||||
mock_db.return_value.fetch_all = AsyncMock(return_value=[])
|
||||
mock_db.return_value.fetch_val = AsyncMock(return_value=0)
|
||||
params = SearchParameters(
|
||||
query_text="test",
|
||||
room_id="room1",
|
||||
)
|
||||
|
||||
params = SearchParameters(
|
||||
query_text="test",
|
||||
room_id="room1",
|
||||
)
|
||||
# This should not fail, even if no results are found
|
||||
results, total = await controller.search_transcripts(db_session, params)
|
||||
|
||||
results, total = await controller.search_transcripts(params)
|
||||
|
||||
assert results == []
|
||||
assert total == 0
|
||||
mock_db.return_value.fetch_all.assert_called_once()
|
||||
assert isinstance(results, list)
|
||||
assert isinstance(total, int)
|
||||
assert total >= 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_search_result_includes_available_fields(self, mock_db_result):
|
||||
async def test_search_result_includes_available_fields(
|
||||
self, db_session, mock_db_result
|
||||
):
|
||||
"""Test that search results include available fields like source_kind."""
|
||||
# Test that the search method works and returns SearchResult objects
|
||||
controller = SearchController()
|
||||
with (
|
||||
patch("reflector.db.search.is_postgresql", return_value=True),
|
||||
patch("reflector.db.search.get_database") as mock_db,
|
||||
):
|
||||
params = SearchParameters(query_text="test")
|
||||
|
||||
class MockRow:
|
||||
def __init__(self, data):
|
||||
self._data = data
|
||||
self._mapping = data
|
||||
results, total = await controller.search_transcripts(db_session, params)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._data.items())
|
||||
assert isinstance(results, list)
|
||||
assert isinstance(total, int)
|
||||
assert total >= 0
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._data[key]
|
||||
|
||||
def keys(self):
|
||||
return self._data.keys()
|
||||
|
||||
mock_row = MockRow(mock_db_result)
|
||||
|
||||
mock_db.return_value.fetch_all = AsyncMock(return_value=[mock_row])
|
||||
mock_db.return_value.fetch_val = AsyncMock(return_value=1)
|
||||
|
||||
params = SearchParameters(query_text="test")
|
||||
|
||||
results, total = await controller.search_transcripts(params)
|
||||
|
||||
assert total == 1
|
||||
assert len(results) == 1
|
||||
|
||||
result = results[0]
|
||||
# If any results exist, verify they are SearchResult objects
|
||||
for result in results:
|
||||
assert isinstance(result, SearchResult)
|
||||
assert result.id == "test-transcript-id"
|
||||
assert result.title == "Test Transcript"
|
||||
assert result.rank == 0.95
|
||||
assert hasattr(result, "id")
|
||||
assert hasattr(result, "title")
|
||||
assert hasattr(result, "rank")
|
||||
assert hasattr(result, "source_kind")
|
||||
|
||||
|
||||
class TestSearchEndpointParsing:
|
||||
|
||||
@@ -4,21 +4,21 @@ import json
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import delete, insert
|
||||
|
||||
from reflector.db import get_database
|
||||
from reflector.db.base import TranscriptModel
|
||||
from reflector.db.search import SearchParameters, search_controller
|
||||
from reflector.db.transcripts import transcripts
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_long_summary_snippet_prioritization():
|
||||
async def test_long_summary_snippet_prioritization(db_session):
|
||||
"""Test that snippets from long_summary are prioritized over webvtt content."""
|
||||
test_id = "test-snippet-priority-3f9a2b8c"
|
||||
|
||||
try:
|
||||
# Clean up any existing test data
|
||||
await get_database().execute(
|
||||
transcripts.delete().where(transcripts.c.id == test_id)
|
||||
await db_session.execute(
|
||||
delete(TranscriptModel).where(TranscriptModel.id == test_id)
|
||||
)
|
||||
|
||||
test_data = {
|
||||
@@ -57,11 +57,11 @@ We need to consider various implementation approaches.""",
|
||||
"user_id": "test-user-priority",
|
||||
}
|
||||
|
||||
await get_database().execute(transcripts.insert().values(**test_data))
|
||||
await db_session.execute(insert(TranscriptModel).values(**test_data))
|
||||
|
||||
# Search for "robotics" which appears in both long_summary and webvtt
|
||||
params = SearchParameters(query_text="robotics", user_id="test-user-priority")
|
||||
results, total = await search_controller.search_transcripts(params)
|
||||
results, total = await search_controller.search_transcripts(db_session, params)
|
||||
|
||||
assert total >= 1
|
||||
test_result = next((r for r in results if r.id == test_id), None)
|
||||
@@ -86,20 +86,20 @@ We need to consider various implementation approaches.""",
|
||||
), f"Snippet should contain search term: {snippet}"
|
||||
|
||||
finally:
|
||||
await get_database().execute(
|
||||
transcripts.delete().where(transcripts.c.id == test_id)
|
||||
await db_session.execute(
|
||||
delete(TranscriptModel).where(TranscriptModel.id == test_id)
|
||||
)
|
||||
await get_database().disconnect()
|
||||
await db_session.commit()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_long_summary_only_search():
|
||||
async def test_long_summary_only_search(db_session):
|
||||
"""Test searching for content that only exists in long_summary."""
|
||||
test_id = "test-long-only-8b3c9f2a"
|
||||
|
||||
try:
|
||||
await get_database().execute(
|
||||
transcripts.delete().where(transcripts.c.id == test_id)
|
||||
await db_session.execute(
|
||||
delete(TranscriptModel).where(TranscriptModel.id == test_id)
|
||||
)
|
||||
|
||||
test_data = {
|
||||
@@ -135,11 +135,11 @@ Discussion of timeline and deliverables.""",
|
||||
"user_id": "test-user-long",
|
||||
}
|
||||
|
||||
await get_database().execute(transcripts.insert().values(**test_data))
|
||||
await db_session.execute(insert(TranscriptModel).values(**test_data))
|
||||
|
||||
# Search for terms only in long_summary
|
||||
params = SearchParameters(query_text="cryptocurrency", user_id="test-user-long")
|
||||
results, total = await search_controller.search_transcripts(params)
|
||||
results, total = await search_controller.search_transcripts(db_session, params)
|
||||
|
||||
found = any(r.id == test_id for r in results)
|
||||
assert found, "Should find transcript by long_summary-only content"
|
||||
@@ -154,13 +154,15 @@ Discussion of timeline and deliverables.""",
|
||||
|
||||
# Search for "yield farming" - a more specific term
|
||||
params2 = SearchParameters(query_text="yield farming", user_id="test-user-long")
|
||||
results2, total2 = await search_controller.search_transcripts(params2)
|
||||
results2, total2 = await search_controller.search_transcripts(
|
||||
db_session, params2
|
||||
)
|
||||
|
||||
found2 = any(r.id == test_id for r in results2)
|
||||
assert found2, "Should find transcript by specific long_summary phrase"
|
||||
|
||||
finally:
|
||||
await get_database().execute(
|
||||
transcripts.delete().where(transcripts.c.id == test_id)
|
||||
await db_session.execute(
|
||||
delete(TranscriptModel).where(TranscriptModel.id == test_id)
|
||||
)
|
||||
await get_database().disconnect()
|
||||
await db_session.commit()
|
||||
|
||||
@@ -5,7 +5,7 @@ import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def fake_transcript(tmpdir, client):
|
||||
async def fake_transcript(tmpdir, client, db_session):
|
||||
from reflector.settings import settings
|
||||
from reflector.views.transcripts import transcripts_controller
|
||||
|
||||
@@ -16,10 +16,10 @@ async def fake_transcript(tmpdir, client):
|
||||
assert response.status_code == 200
|
||||
tid = response.json()["id"]
|
||||
|
||||
transcript = await transcripts_controller.get_by_id(tid)
|
||||
transcript = await transcripts_controller.get_by_id(db_session, tid)
|
||||
assert transcript is not None
|
||||
|
||||
await transcripts_controller.update(transcript, {"status": "ended"})
|
||||
await transcripts_controller.update(db_session, transcript, {"status": "ended"})
|
||||
|
||||
# manually copy a file at the expected location
|
||||
audio_filename = transcript.audio_mp3_filename
|
||||
|
||||
@@ -23,7 +23,6 @@ async def client(app_lifespan):
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("setup_database")
|
||||
@pytest.mark.usefixtures("celery_session_app")
|
||||
@pytest.mark.usefixtures("celery_session_worker")
|
||||
@pytest.mark.asyncio
|
||||
|
||||
@@ -2,33 +2,84 @@ from datetime import datetime, timezone
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import insert
|
||||
|
||||
from reflector.db.recordings import Recording, recordings_controller
|
||||
from reflector.db.base import MeetingModel, RoomModel
|
||||
from reflector.db.recordings import recordings_controller
|
||||
from reflector.db.transcripts import SourceKind, transcripts_controller
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_recording_deleted_with_transcript():
|
||||
recording = await recordings_controller.create(
|
||||
Recording(
|
||||
bucket_name="test-bucket",
|
||||
object_key="recording.mp4",
|
||||
recorded_at=datetime.now(timezone.utc),
|
||||
async def test_recording_deleted_with_transcript(db_session):
|
||||
"""Test that a recording is deleted when its associated transcript is deleted."""
|
||||
# First create a room and meeting to satisfy foreign key constraints
|
||||
room_id = "test-room"
|
||||
await db_session.execute(
|
||||
insert(RoomModel).values(
|
||||
id=room_id,
|
||||
name="test-room",
|
||||
user_id="test-user",
|
||||
created_at=datetime.now(timezone.utc),
|
||||
zulip_auto_post=False,
|
||||
zulip_stream="",
|
||||
zulip_topic="",
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic",
|
||||
is_shared=False,
|
||||
)
|
||||
)
|
||||
|
||||
meeting_id = "test-meeting"
|
||||
await db_session.execute(
|
||||
insert(MeetingModel).values(
|
||||
id=meeting_id,
|
||||
room_id=room_id,
|
||||
room_name="test-room",
|
||||
room_url="https://example.com/room",
|
||||
host_room_url="https://example.com/room-host",
|
||||
start_date=datetime.now(timezone.utc),
|
||||
end_date=datetime.now(timezone.utc),
|
||||
is_active=False,
|
||||
num_clients=0,
|
||||
is_locked=False,
|
||||
room_mode="normal",
|
||||
recording_type="cloud",
|
||||
recording_trigger="automatic",
|
||||
)
|
||||
)
|
||||
await db_session.commit()
|
||||
|
||||
# Now create a recording
|
||||
recording = await recordings_controller.create(
|
||||
db_session,
|
||||
meeting_id=meeting_id,
|
||||
url="https://example.com/recording.mp4",
|
||||
object_key="recordings/test.mp4",
|
||||
duration=3600.0,
|
||||
created_at=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
# Create a transcript associated with the recording
|
||||
transcript = await transcripts_controller.add(
|
||||
db_session,
|
||||
name="Test Transcript",
|
||||
source_kind=SourceKind.ROOM,
|
||||
recording_id=recording.id,
|
||||
)
|
||||
|
||||
# Mock the storage deletion
|
||||
with patch("reflector.db.transcripts.get_recordings_storage") as mock_get_storage:
|
||||
storage_instance = mock_get_storage.return_value
|
||||
storage_instance.delete_file = AsyncMock()
|
||||
|
||||
await transcripts_controller.remove_by_id(transcript.id)
|
||||
# Delete the transcript
|
||||
await transcripts_controller.remove_by_id(db_session, transcript.id)
|
||||
|
||||
# Verify that the recording file was deleted from storage
|
||||
storage_instance.delete_file.assert_awaited_once_with(recording.object_key)
|
||||
|
||||
assert await recordings_controller.get_by_id(recording.id) is None
|
||||
assert await transcripts_controller.get_by_id(transcript.id) is None
|
||||
# Verify both the recording and transcript are deleted
|
||||
assert await recordings_controller.get_by_id(db_session, recording.id) is None
|
||||
assert await transcripts_controller.get_by_id(db_session, transcript.id) is None
|
||||
|
||||
@@ -49,11 +49,12 @@ class ThreadedUvicorn:
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def appserver(tmpdir, setup_database, celery_session_app, celery_session_worker):
|
||||
def appserver(tmpdir, database, celery_session_app, celery_session_worker):
|
||||
import threading
|
||||
|
||||
from reflector.app import app
|
||||
from reflector.db import get_database
|
||||
|
||||
# Database connection handled by SQLAlchemy engine
|
||||
from reflector.settings import settings
|
||||
|
||||
DATA_DIR = settings.DATA_DIR
|
||||
@@ -77,13 +78,8 @@ def appserver(tmpdir, setup_database, celery_session_app, celery_session_worker)
|
||||
server_instance = Server(config)
|
||||
|
||||
async def start_server():
|
||||
# Initialize database connection in this event loop
|
||||
database = get_database()
|
||||
await database.connect()
|
||||
try:
|
||||
await server_instance.serve()
|
||||
finally:
|
||||
await database.disconnect()
|
||||
# Database connections managed by SQLAlchemy engine
|
||||
await server_instance.serve()
|
||||
|
||||
# Signal that server is starting
|
||||
server_started.set()
|
||||
@@ -115,12 +111,6 @@ def appserver(tmpdir, setup_database, celery_session_app, celery_session_worker)
|
||||
settings.DATA_DIR = DATA_DIR
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def celery_includes():
|
||||
return ["reflector.pipelines.main_live_pipeline"]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("setup_database")
|
||||
@pytest.mark.usefixtures("celery_session_app")
|
||||
@pytest.mark.usefixtures("celery_session_worker")
|
||||
@pytest.mark.asyncio
|
||||
@@ -168,7 +158,7 @@ async def test_transcript_rtc_and_websocket(
|
||||
except Exception as e:
|
||||
print(f"Test websocket: EXCEPTION {e}")
|
||||
finally:
|
||||
ws.close()
|
||||
await ws.close()
|
||||
print("Test websocket: DISCONNECTED")
|
||||
|
||||
websocket_task = asyncio.get_event_loop().create_task(websocket_task())
|
||||
@@ -285,7 +275,6 @@ async def test_transcript_rtc_and_websocket(
|
||||
assert audio_resp.headers["Content-Type"] == "audio/mpeg"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("setup_database")
|
||||
@pytest.mark.usefixtures("celery_session_app")
|
||||
@pytest.mark.usefixtures("celery_session_worker")
|
||||
@pytest.mark.asyncio
|
||||
|
||||
@@ -4,7 +4,6 @@ import time
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("setup_database")
|
||||
@pytest.mark.usefixtures("celery_session_app")
|
||||
@pytest.mark.usefixtures("celery_session_worker")
|
||||
@pytest.mark.asyncio
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
"""Integration tests for WebVTT auto-update functionality in Transcript model."""
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import select
|
||||
|
||||
from reflector.db import get_database
|
||||
from reflector.db.base import TranscriptModel
|
||||
from reflector.db.transcripts import (
|
||||
SourceKind,
|
||||
TranscriptController,
|
||||
TranscriptTopic,
|
||||
transcripts,
|
||||
transcripts_controller,
|
||||
)
|
||||
from reflector.processors.types import Word
|
||||
|
||||
@@ -16,30 +17,35 @@ from reflector.processors.types import Word
|
||||
class TestWebVTTAutoUpdate:
|
||||
"""Test that WebVTT field auto-updates when Transcript is created or modified."""
|
||||
|
||||
async def test_webvtt_not_updated_on_transcript_creation_without_topics(self):
|
||||
async def test_webvtt_not_updated_on_transcript_creation_without_topics(
|
||||
self, db_session
|
||||
):
|
||||
"""WebVTT should be None when creating transcript without topics."""
|
||||
controller = TranscriptController()
|
||||
# Using global transcripts_controller
|
||||
|
||||
transcript = await controller.add(
|
||||
transcript = await transcripts_controller.add(
|
||||
db_session,
|
||||
name="Test Transcript",
|
||||
source_kind=SourceKind.FILE,
|
||||
)
|
||||
|
||||
try:
|
||||
result = await get_database().fetch_one(
|
||||
transcripts.select().where(transcripts.c.id == transcript.id)
|
||||
result = await db_session.execute(
|
||||
select(TranscriptModel).where(TranscriptModel.id == transcript.id)
|
||||
)
|
||||
row = result.scalar_one_or_none()
|
||||
|
||||
assert result is not None
|
||||
assert result["webvtt"] is None
|
||||
assert row is not None
|
||||
assert row.webvtt is None
|
||||
finally:
|
||||
await controller.remove_by_id(transcript.id)
|
||||
await transcripts_controller.remove_by_id(db_session, transcript.id)
|
||||
|
||||
async def test_webvtt_updated_on_upsert_topic(self):
|
||||
async def test_webvtt_updated_on_upsert_topic(self, db_session):
|
||||
"""WebVTT should update when upserting topics via upsert_topic method."""
|
||||
controller = TranscriptController()
|
||||
# Using global transcripts_controller
|
||||
|
||||
transcript = await controller.add(
|
||||
transcript = await transcripts_controller.add(
|
||||
db_session,
|
||||
name="Test Transcript",
|
||||
source_kind=SourceKind.FILE,
|
||||
)
|
||||
@@ -56,14 +62,15 @@ class TestWebVTTAutoUpdate:
|
||||
],
|
||||
)
|
||||
|
||||
await controller.upsert_topic(transcript, topic)
|
||||
await transcripts_controller.upsert_topic(db_session, transcript, topic)
|
||||
|
||||
result = await get_database().fetch_one(
|
||||
transcripts.select().where(transcripts.c.id == transcript.id)
|
||||
result = await db_session.execute(
|
||||
select(TranscriptModel).where(TranscriptModel.id == transcript.id)
|
||||
)
|
||||
row = result.scalar_one_or_none()
|
||||
|
||||
assert result is not None
|
||||
webvtt = result["webvtt"]
|
||||
assert row is not None
|
||||
webvtt = row.webvtt
|
||||
|
||||
assert webvtt is not None
|
||||
assert "WEBVTT" in webvtt
|
||||
@@ -71,13 +78,14 @@ class TestWebVTTAutoUpdate:
|
||||
assert "<v Speaker0>" in webvtt
|
||||
|
||||
finally:
|
||||
await controller.remove_by_id(transcript.id)
|
||||
await transcripts_controller.remove_by_id(db_session, transcript.id)
|
||||
|
||||
async def test_webvtt_updated_on_direct_topics_update(self):
|
||||
async def test_webvtt_updated_on_direct_topics_update(self, db_session):
|
||||
"""WebVTT should update when updating topics field directly."""
|
||||
controller = TranscriptController()
|
||||
# Using global transcripts_controller
|
||||
|
||||
transcript = await controller.add(
|
||||
transcript = await transcripts_controller.add(
|
||||
db_session,
|
||||
name="Test Transcript",
|
||||
source_kind=SourceKind.FILE,
|
||||
)
|
||||
@@ -96,28 +104,32 @@ class TestWebVTTAutoUpdate:
|
||||
}
|
||||
]
|
||||
|
||||
await controller.update(transcript, {"topics": topics_data})
|
||||
|
||||
# Fetch from DB
|
||||
result = await get_database().fetch_one(
|
||||
transcripts.select().where(transcripts.c.id == transcript.id)
|
||||
await transcripts_controller.update(
|
||||
db_session, transcript, {"topics": topics_data}
|
||||
)
|
||||
|
||||
assert result is not None
|
||||
webvtt = result["webvtt"]
|
||||
# Fetch from DB
|
||||
result = await db_session.execute(
|
||||
select(TranscriptModel).where(TranscriptModel.id == transcript.id)
|
||||
)
|
||||
row = result.scalar_one_or_none()
|
||||
|
||||
assert row is not None
|
||||
webvtt = row.webvtt
|
||||
|
||||
assert webvtt is not None
|
||||
assert "WEBVTT" in webvtt
|
||||
assert "First sentence" in webvtt
|
||||
|
||||
finally:
|
||||
await controller.remove_by_id(transcript.id)
|
||||
await transcripts_controller.remove_by_id(db_session, transcript.id)
|
||||
|
||||
async def test_webvtt_updated_manually_with_handle_topics_update(self):
|
||||
async def test_webvtt_updated_manually_with_handle_topics_update(self, db_session):
|
||||
"""Test that _handle_topics_update works when called manually."""
|
||||
controller = TranscriptController()
|
||||
# Using global transcripts_controller
|
||||
|
||||
transcript = await controller.add(
|
||||
transcript = await transcripts_controller.add(
|
||||
db_session,
|
||||
name="Test Transcript",
|
||||
source_kind=SourceKind.FILE,
|
||||
)
|
||||
@@ -138,15 +150,16 @@ class TestWebVTTAutoUpdate:
|
||||
|
||||
values = {"topics": transcript.topics_dump()}
|
||||
|
||||
await controller.update(transcript, values)
|
||||
await transcripts_controller.update(db_session, transcript, values)
|
||||
|
||||
# Fetch from DB
|
||||
result = await get_database().fetch_one(
|
||||
transcripts.select().where(transcripts.c.id == transcript.id)
|
||||
result = await db_session.execute(
|
||||
select(TranscriptModel).where(TranscriptModel.id == transcript.id)
|
||||
)
|
||||
row = result.scalar_one_or_none()
|
||||
|
||||
assert result is not None
|
||||
webvtt = result["webvtt"]
|
||||
assert row is not None
|
||||
webvtt = row.webvtt
|
||||
|
||||
assert webvtt is not None
|
||||
assert "WEBVTT" in webvtt
|
||||
@@ -154,13 +167,14 @@ class TestWebVTTAutoUpdate:
|
||||
assert "<v Speaker0>" in webvtt
|
||||
|
||||
finally:
|
||||
await controller.remove_by_id(transcript.id)
|
||||
await transcripts_controller.remove_by_id(db_session, transcript.id)
|
||||
|
||||
async def test_webvtt_update_with_non_sequential_topics_fails(self):
|
||||
async def test_webvtt_update_with_non_sequential_topics_fails(self, db_session):
|
||||
"""Test that non-sequential topics raise assertion error."""
|
||||
controller = TranscriptController()
|
||||
# Using global transcripts_controller
|
||||
|
||||
transcript = await controller.add(
|
||||
transcript = await transcripts_controller.add(
|
||||
db_session,
|
||||
name="Test Transcript",
|
||||
source_kind=SourceKind.FILE,
|
||||
)
|
||||
@@ -186,13 +200,14 @@ class TestWebVTTAutoUpdate:
|
||||
assert "Words are not in sequence" in str(exc_info.value)
|
||||
|
||||
finally:
|
||||
await controller.remove_by_id(transcript.id)
|
||||
await transcripts_controller.remove_by_id(db_session, transcript.id)
|
||||
|
||||
async def test_multiple_speakers_in_webvtt(self):
|
||||
async def test_multiple_speakers_in_webvtt(self, db_session):
|
||||
"""Test WebVTT generation with multiple speakers."""
|
||||
controller = TranscriptController()
|
||||
# Using global transcripts_controller
|
||||
|
||||
transcript = await controller.add(
|
||||
transcript = await transcripts_controller.add(
|
||||
db_session,
|
||||
name="Test Transcript",
|
||||
source_kind=SourceKind.FILE,
|
||||
)
|
||||
@@ -213,15 +228,16 @@ class TestWebVTTAutoUpdate:
|
||||
transcript.upsert_topic(topic)
|
||||
values = {"topics": transcript.topics_dump()}
|
||||
|
||||
await controller.update(transcript, values)
|
||||
await transcripts_controller.update(db_session, transcript, values)
|
||||
|
||||
# Fetch from DB
|
||||
result = await get_database().fetch_one(
|
||||
transcripts.select().where(transcripts.c.id == transcript.id)
|
||||
result = await db_session.execute(
|
||||
select(TranscriptModel).where(TranscriptModel.id == transcript.id)
|
||||
)
|
||||
row = result.scalar_one_or_none()
|
||||
|
||||
assert result is not None
|
||||
webvtt = result["webvtt"]
|
||||
assert row is not None
|
||||
webvtt = row.webvtt
|
||||
|
||||
assert webvtt is not None
|
||||
assert "<v Speaker0>" in webvtt
|
||||
@@ -231,4 +247,4 @@ class TestWebVTTAutoUpdate:
|
||||
assert "Goodbye" in webvtt
|
||||
|
||||
finally:
|
||||
await controller.remove_by_id(transcript.id)
|
||||
await transcripts_controller.remove_by_id(db_session, transcript.id)
|
||||
|
||||
3269
server/uv.lock
generated
3269
server/uv.lock
generated
File diff suppressed because it is too large
Load Diff
1
www/.npmrc
Normal file
1
www/.npmrc
Normal file
@@ -0,0 +1 @@
|
||||
minimum-release-age=1440 #24hr in minutes
|
||||
345
www/app/(app)/rooms/_components/ICSSettings.tsx
Normal file
345
www/app/(app)/rooms/_components/ICSSettings.tsx
Normal file
@@ -0,0 +1,345 @@
|
||||
import {
|
||||
VStack,
|
||||
HStack,
|
||||
Field,
|
||||
Input,
|
||||
Select,
|
||||
Checkbox,
|
||||
Button,
|
||||
Text,
|
||||
Badge,
|
||||
createListCollection,
|
||||
Spinner,
|
||||
Box,
|
||||
IconButton,
|
||||
} from "@chakra-ui/react";
|
||||
import { useState, useEffect, useRef } from "react";
|
||||
import { LuRefreshCw, LuCopy, LuCheck } from "react-icons/lu";
|
||||
import { FaCheckCircle, FaExclamationCircle } from "react-icons/fa";
|
||||
import { useRoomIcsSync, useRoomIcsStatus } from "../../../lib/apiHooks";
|
||||
import { toaster } from "../../../components/ui/toaster";
|
||||
import { roomAbsoluteUrl } from "../../../lib/routesClient";
|
||||
import {
|
||||
assertExists,
|
||||
assertExistsAndNonEmptyString,
|
||||
NonEmptyString,
|
||||
parseNonEmptyString,
|
||||
} from "../../../lib/utils";
|
||||
|
||||
interface ICSSettingsProps {
|
||||
roomName: NonEmptyString | null;
|
||||
icsUrl?: string;
|
||||
icsEnabled?: boolean;
|
||||
icsFetchInterval?: number;
|
||||
icsLastSync?: string;
|
||||
icsLastEtag?: string;
|
||||
onChange: (settings: Partial<ICSSettingsData>) => void;
|
||||
isOwner?: boolean;
|
||||
isEditing?: boolean;
|
||||
}
|
||||
|
||||
export interface ICSSettingsData {
|
||||
ics_url: string;
|
||||
ics_enabled: boolean;
|
||||
ics_fetch_interval: number;
|
||||
}
|
||||
|
||||
const fetchIntervalOptions = [
|
||||
{ label: "1 minute", value: "1" },
|
||||
{ label: "5 minutes", value: "5" },
|
||||
{ label: "10 minutes", value: "10" },
|
||||
{ label: "30 minutes", value: "30" },
|
||||
{ label: "1 hour", value: "60" },
|
||||
];
|
||||
|
||||
export default function ICSSettings({
|
||||
roomName,
|
||||
icsUrl = "",
|
||||
icsEnabled = false,
|
||||
icsFetchInterval = 5,
|
||||
icsLastSync,
|
||||
icsLastEtag,
|
||||
onChange,
|
||||
isOwner = true,
|
||||
isEditing = false,
|
||||
}: ICSSettingsProps) {
|
||||
const [syncStatus, setSyncStatus] = useState<
|
||||
"idle" | "syncing" | "success" | "error"
|
||||
>("idle");
|
||||
const [syncMessage, setSyncMessage] = useState<string>("");
|
||||
const [syncResult, setSyncResult] = useState<{
|
||||
eventsFound: number;
|
||||
totalEvents: number;
|
||||
eventsCreated: number;
|
||||
eventsUpdated: number;
|
||||
} | null>(null);
|
||||
const [justCopied, setJustCopied] = useState(false);
|
||||
const roomUrlInputRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
const syncMutation = useRoomIcsSync();
|
||||
|
||||
const fetchIntervalCollection = createListCollection({
|
||||
items: fetchIntervalOptions,
|
||||
});
|
||||
|
||||
const handleCopyRoomUrl = async () => {
|
||||
try {
|
||||
await navigator.clipboard.writeText(
|
||||
roomAbsoluteUrl(assertExists(roomName)),
|
||||
);
|
||||
setJustCopied(true);
|
||||
|
||||
toaster
|
||||
.create({
|
||||
placement: "top",
|
||||
duration: 3000,
|
||||
render: ({ dismiss }) => (
|
||||
<Box
|
||||
bg="green.500"
|
||||
color="white"
|
||||
px={4}
|
||||
py={3}
|
||||
borderRadius="md"
|
||||
display="flex"
|
||||
alignItems="center"
|
||||
gap={2}
|
||||
boxShadow="lg"
|
||||
>
|
||||
<LuCheck />
|
||||
<Text>Room URL copied to clipboard!</Text>
|
||||
</Box>
|
||||
),
|
||||
})
|
||||
.then(() => {});
|
||||
|
||||
setTimeout(() => {
|
||||
setJustCopied(false);
|
||||
}, 2000);
|
||||
} catch (err) {
|
||||
console.error("Failed to copy room url:", err);
|
||||
}
|
||||
};
|
||||
|
||||
const handleRoomUrlClick = () => {
|
||||
if (roomUrlInputRef.current) {
|
||||
roomUrlInputRef.current.select();
|
||||
handleCopyRoomUrl().then(() => {});
|
||||
}
|
||||
};
|
||||
|
||||
// Clear sync results when dialog closes
|
||||
useEffect(() => {
|
||||
if (!isEditing) {
|
||||
setSyncStatus("idle");
|
||||
setSyncResult(null);
|
||||
setSyncMessage("");
|
||||
}
|
||||
}, [isEditing]);
|
||||
|
||||
const handleForceSync = async () => {
|
||||
if (!roomName || !isEditing) return;
|
||||
|
||||
// Clear previous results
|
||||
setSyncStatus("syncing");
|
||||
setSyncResult(null);
|
||||
setSyncMessage("");
|
||||
|
||||
try {
|
||||
const result = await syncMutation.mutateAsync({
|
||||
params: {
|
||||
path: { room_name: roomName },
|
||||
},
|
||||
});
|
||||
|
||||
if (result.status === "success" || result.status === "unchanged") {
|
||||
setSyncStatus("success");
|
||||
setSyncResult({
|
||||
eventsFound: result.events_found || 0,
|
||||
totalEvents: result.total_events || 0,
|
||||
eventsCreated: result.events_created || 0,
|
||||
eventsUpdated: result.events_updated || 0,
|
||||
});
|
||||
} else {
|
||||
setSyncStatus("error");
|
||||
setSyncMessage(result.error || "Sync failed");
|
||||
}
|
||||
} catch (err: any) {
|
||||
setSyncStatus("error");
|
||||
setSyncMessage(err.body?.detail || "Failed to force sync calendar");
|
||||
}
|
||||
};
|
||||
|
||||
if (!isOwner) {
|
||||
return null; // ICS settings only visible to room owner
|
||||
}
|
||||
|
||||
return (
|
||||
<VStack gap={4} align="stretch">
|
||||
<Field.Root>
|
||||
<Checkbox.Root
|
||||
checked={icsEnabled}
|
||||
onCheckedChange={(e) => onChange({ ics_enabled: !!e.checked })}
|
||||
>
|
||||
<Checkbox.HiddenInput />
|
||||
<Checkbox.Control>
|
||||
<Checkbox.Indicator />
|
||||
</Checkbox.Control>
|
||||
<Checkbox.Label>Enable ICS calendar sync</Checkbox.Label>
|
||||
</Checkbox.Root>
|
||||
</Field.Root>
|
||||
|
||||
{icsEnabled && (
|
||||
<>
|
||||
<Field.Root>
|
||||
<Field.Label>Room URL</Field.Label>
|
||||
<Field.HelperText>
|
||||
To enable Reflector to recognize your calendar events as meetings,
|
||||
add this URL as the location in your calendar events
|
||||
</Field.HelperText>
|
||||
{roomName ? (
|
||||
<HStack gap={0} position="relative" width="100%">
|
||||
<Input
|
||||
ref={roomUrlInputRef}
|
||||
value={roomAbsoluteUrl(parseNonEmptyString(roomName))}
|
||||
readOnly
|
||||
onClick={handleRoomUrlClick}
|
||||
cursor="pointer"
|
||||
bg="gray.100"
|
||||
_hover={{ bg: "gray.200" }}
|
||||
_focus={{ bg: "gray.200" }}
|
||||
pr="90px"
|
||||
width="100%"
|
||||
/>
|
||||
<HStack position="absolute" right="4px" gap={1} zIndex={1}>
|
||||
<IconButton
|
||||
aria-label="Copy room URL"
|
||||
onClick={handleCopyRoomUrl}
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
>
|
||||
{justCopied ? <LuCheck /> : <LuCopy />}
|
||||
</IconButton>
|
||||
</HStack>
|
||||
</HStack>
|
||||
) : null}
|
||||
</Field.Root>
|
||||
|
||||
<Field.Root>
|
||||
<Field.Label>ICS Calendar URL</Field.Label>
|
||||
<Input
|
||||
placeholder="https://calendar.google.com/calendar/ical/..."
|
||||
value={icsUrl}
|
||||
onChange={(e) => onChange({ ics_url: e.target.value })}
|
||||
/>
|
||||
<Field.HelperText>
|
||||
Enter the ICS URL from Google Calendar, Outlook, or other calendar
|
||||
services
|
||||
</Field.HelperText>
|
||||
</Field.Root>
|
||||
|
||||
<Field.Root>
|
||||
<Field.Label>Sync Interval</Field.Label>
|
||||
<Select.Root
|
||||
collection={fetchIntervalCollection}
|
||||
value={[icsFetchInterval.toString()]}
|
||||
onValueChange={(details) => {
|
||||
const value = parseInt(details.value[0]);
|
||||
onChange({ ics_fetch_interval: value });
|
||||
}}
|
||||
>
|
||||
<Select.Trigger>
|
||||
<Select.ValueText />
|
||||
</Select.Trigger>
|
||||
<Select.Content>
|
||||
{fetchIntervalOptions.map((option) => (
|
||||
<Select.Item key={option.value} item={option}>
|
||||
{option.label}
|
||||
</Select.Item>
|
||||
))}
|
||||
</Select.Content>
|
||||
</Select.Root>
|
||||
<Field.HelperText>
|
||||
How often to check for calendar updates
|
||||
</Field.HelperText>
|
||||
</Field.Root>
|
||||
|
||||
{icsUrl && isEditing && roomName && (
|
||||
<HStack gap={3}>
|
||||
<Button
|
||||
size="sm"
|
||||
variant="outline"
|
||||
onClick={handleForceSync}
|
||||
disabled={syncStatus === "syncing"}
|
||||
>
|
||||
{syncStatus === "syncing" ? (
|
||||
<Spinner size="sm" />
|
||||
) : (
|
||||
<LuRefreshCw />
|
||||
)}
|
||||
Force Sync
|
||||
</Button>
|
||||
</HStack>
|
||||
)}
|
||||
|
||||
{syncResult && syncStatus === "success" && (
|
||||
<Box
|
||||
p={3}
|
||||
borderRadius="md"
|
||||
bg="green.50"
|
||||
borderLeft="4px solid"
|
||||
borderColor="green.400"
|
||||
>
|
||||
<VStack gap={1} align="stretch">
|
||||
<Text fontSize="sm" color="green.800" fontWeight="medium">
|
||||
Sync completed
|
||||
</Text>
|
||||
<Text fontSize="sm" color="green.700">
|
||||
{syncResult.totalEvents} events downloaded,{" "}
|
||||
{syncResult.eventsFound} match this room
|
||||
</Text>
|
||||
{(syncResult.eventsCreated > 0 ||
|
||||
syncResult.eventsUpdated > 0) && (
|
||||
<Text fontSize="sm" color="green.700">
|
||||
{syncResult.eventsCreated} created,{" "}
|
||||
{syncResult.eventsUpdated} updated
|
||||
</Text>
|
||||
)}
|
||||
</VStack>
|
||||
</Box>
|
||||
)}
|
||||
|
||||
{syncMessage && (
|
||||
<Box
|
||||
p={3}
|
||||
borderRadius="md"
|
||||
bg={syncStatus === "success" ? "green.50" : "red.50"}
|
||||
borderLeft="4px solid"
|
||||
borderColor={syncStatus === "success" ? "green.400" : "red.400"}
|
||||
>
|
||||
<Text
|
||||
fontSize="sm"
|
||||
color={syncStatus === "success" ? "green.800" : "red.800"}
|
||||
>
|
||||
{syncMessage}
|
||||
</Text>
|
||||
</Box>
|
||||
)}
|
||||
|
||||
{icsLastSync && (
|
||||
<HStack gap={4} fontSize="sm" color="gray.600">
|
||||
<HStack>
|
||||
<FaCheckCircle color="green" />
|
||||
<Text>Last sync: {new Date(icsLastSync).toLocaleString()}</Text>
|
||||
</HStack>
|
||||
{icsLastEtag && (
|
||||
<Badge colorScheme="blue" fontSize="xs">
|
||||
ETag: {icsLastEtag.slice(0, 8)}...
|
||||
</Badge>
|
||||
)}
|
||||
</HStack>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</VStack>
|
||||
);
|
||||
}
|
||||
@@ -4,12 +4,13 @@ import type { components } from "../../../reflector-api";
|
||||
type Room = components["schemas"]["Room"];
|
||||
import { RoomTable } from "./RoomTable";
|
||||
import { RoomCards } from "./RoomCards";
|
||||
import { NonEmptyString } from "../../../lib/utils";
|
||||
|
||||
interface RoomListProps {
|
||||
title: string;
|
||||
rooms: Room[];
|
||||
linkCopied: string;
|
||||
onCopyUrl: (roomName: string) => void;
|
||||
onCopyUrl: (roomName: NonEmptyString) => void;
|
||||
onEdit: (roomId: string, roomData: any) => void;
|
||||
onDelete: (roomId: string) => void;
|
||||
emptyMessage?: string;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user