Compare commits

..

2 Commits

Author SHA1 Message Date
Igor Loskutov
df6916385b fix: address review feedback
- Add PUBLIC_MODE auth guard on bulk-status endpoint
- Convert DB models to view models via model_validate()
- Early return when no accessible rooms (skip DB queries)
- BulkMeetingStatusMap: Partial<Record> for type honesty
- Sort roomNames in query key for cache stability
- Remove redundant empty-guard in queryFn
- Add 7 backend tests: auth, redaction, whereby host_room_url, 401, empty
- Add 2 frontend tests: error handling, unauthenticated case
2026-02-05 20:30:26 -05:00
Igor Loskutov
083a50cbcd fix: batch room meeting status queries via prop-drilling
Alternative to the batcher approach (#848): parent fetches all room
meeting statuses in a single bulk POST and passes data down as props.
No extra dependency (@yornaath/batshit), no implicit batching magic.

Backend: POST /v1/rooms/meetings/bulk-status + bulk DB methods.
Frontend: useRoomsBulkMeetingStatus hook in RoomList, MeetingStatus
receives data as props instead of calling per-room hooks.
CI: fix pnpm 8→10 auto-detect, add concurrency group.
Tests: Jest+jsdom+testing-library for bulk hook.
2026-02-05 20:04:31 -05:00
20 changed files with 1738 additions and 249 deletions

View File

@@ -13,6 +13,9 @@ on:
jobs: jobs:
test-next-server: test-next-server:
runs-on: ubuntu-latest runs-on: ubuntu-latest
concurrency:
group: test-next-server-${{ github.ref }}
cancel-in-progress: true
defaults: defaults:
run: run:
@@ -21,17 +24,12 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
- name: Install pnpm - name: Install pnpm
uses: pnpm/action-setup@v4 uses: pnpm/action-setup@v4
with: with:
version: 8 package_json_file: './www/package.json'
- name: Setup Node.js cache - name: Setup Node.js
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: '20' node-version: '20'
@@ -42,4 +40,4 @@ jobs:
run: pnpm install run: pnpm install
- name: Run tests - name: Run tests
run: pnpm test run: pnpm test

View File

@@ -1,35 +0,0 @@
"""drop_use_celery_column
Revision ID: 3aa20b96d963
Revises: e69f08ead8ea
Create Date: 2026-02-05 10:12:44.065279
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "3aa20b96d963"
down_revision: Union[str, None] = "e69f08ead8ea"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
with op.batch_alter_table("room", schema=None) as batch_op:
batch_op.drop_column("use_celery")
def downgrade() -> None:
with op.batch_alter_table("room", schema=None) as batch_op:
batch_op.add_column(
sa.Column(
"use_celery",
sa.Boolean(),
server_default=sa.text("false"),
nullable=False,
)
)

View File

@@ -104,6 +104,26 @@ class CalendarEventController:
results = await get_database().fetch_all(query) results = await get_database().fetch_all(query)
return [CalendarEvent(**result) for result in results] return [CalendarEvent(**result) for result in results]
async def get_upcoming_for_rooms(
self, room_ids: list[str], minutes_ahead: int = 120
) -> list[CalendarEvent]:
now = datetime.now(timezone.utc)
future_time = now + timedelta(minutes=minutes_ahead)
query = (
calendar_events.select()
.where(
sa.and_(
calendar_events.c.room_id.in_(room_ids),
calendar_events.c.is_deleted == False,
calendar_events.c.start_time <= future_time,
calendar_events.c.end_time >= now,
)
)
.order_by(calendar_events.c.start_time.asc())
)
results = await get_database().fetch_all(query)
return [CalendarEvent(**result) for result in results]
async def get_by_id(self, event_id: str) -> CalendarEvent | None: async def get_by_id(self, event_id: str) -> CalendarEvent | None:
query = calendar_events.select().where(calendar_events.c.id == event_id) query = calendar_events.select().where(calendar_events.c.id == event_id)
result = await get_database().fetch_one(query) result = await get_database().fetch_one(query)

View File

@@ -301,6 +301,23 @@ class MeetingController:
results = await get_database().fetch_all(query) results = await get_database().fetch_all(query)
return [Meeting(**result) for result in results] return [Meeting(**result) for result in results]
async def get_all_active_for_rooms(
self, room_ids: list[str], current_time: datetime
) -> list[Meeting]:
query = (
meetings.select()
.where(
sa.and_(
meetings.c.room_id.in_(room_ids),
meetings.c.end_date > current_time,
meetings.c.is_active,
)
)
.order_by(meetings.c.end_date.desc())
)
results = await get_database().fetch_all(query)
return [Meeting(**result) for result in results]
async def get_active_by_calendar_event( async def get_active_by_calendar_event(
self, room: Room, calendar_event_id: str, current_time: datetime self, room: Room, calendar_event_id: str, current_time: datetime
) -> Meeting | None: ) -> Meeting | None:

View File

@@ -57,6 +57,12 @@ rooms = sqlalchemy.Table(
sqlalchemy.String, sqlalchemy.String,
nullable=False, nullable=False,
), ),
sqlalchemy.Column(
"use_celery",
sqlalchemy.Boolean,
nullable=False,
server_default=false(),
),
sqlalchemy.Column( sqlalchemy.Column(
"skip_consent", "skip_consent",
sqlalchemy.Boolean, sqlalchemy.Boolean,
@@ -91,6 +97,7 @@ class Room(BaseModel):
ics_last_sync: datetime | None = None ics_last_sync: datetime | None = None
ics_last_etag: str | None = None ics_last_etag: str | None = None
platform: Platform = Field(default_factory=lambda: settings.DEFAULT_VIDEO_PLATFORM) platform: Platform = Field(default_factory=lambda: settings.DEFAULT_VIDEO_PLATFORM)
use_celery: bool = False
skip_consent: bool = False skip_consent: bool = False
@@ -238,6 +245,11 @@ class RoomController:
return room return room
async def get_by_names(self, names: list[str]) -> list[Room]:
query = rooms.select().where(rooms.c.name.in_(names))
results = await get_database().fetch_all(query)
return [Room(**r) for r in results]
async def get_ics_enabled(self) -> list[Room]: async def get_ics_enabled(self) -> list[Room]:
query = rooms.select().where( query = rooms.select().where(
rooms.c.ics_enabled == True, rooms.c.ics_url != None rooms.c.ics_enabled == True, rooms.c.ics_url != None

View File

@@ -15,10 +15,14 @@ from hatchet_sdk.clients.rest.exceptions import ApiException, NotFoundException
from hatchet_sdk.clients.rest.models import V1TaskStatus from hatchet_sdk.clients.rest.models import V1TaskStatus
from reflector.db.recordings import recordings_controller from reflector.db.recordings import recordings_controller
from reflector.db.rooms import rooms_controller
from reflector.db.transcripts import Transcript, transcripts_controller from reflector.db.transcripts import Transcript, transcripts_controller
from reflector.hatchet.client import HatchetClientManager from reflector.hatchet.client import HatchetClientManager
from reflector.logger import logger from reflector.logger import logger
from reflector.pipelines.main_file_pipeline import task_pipeline_file_process from reflector.pipelines.main_file_pipeline import task_pipeline_file_process
from reflector.pipelines.main_multitrack_pipeline import (
task_pipeline_multitrack_process,
)
from reflector.utils.string import NonEmptyString from reflector.utils.string import NonEmptyString
@@ -177,98 +181,124 @@ async def dispatch_transcript_processing(
Returns AsyncResult for Celery tasks, None for Hatchet workflows. Returns AsyncResult for Celery tasks, None for Hatchet workflows.
""" """
if isinstance(config, MultitrackProcessingConfig): if isinstance(config, MultitrackProcessingConfig):
# Multitrack processing always uses Hatchet (no Celery fallback) use_celery = False
# First check if we can replay (outside transaction since it's read-only) if config.room_id:
transcript = await transcripts_controller.get_by_id(config.transcript_id) room = await rooms_controller.get_by_id(config.room_id)
if transcript and transcript.workflow_run_id and not force: use_celery = room.use_celery if room else False
can_replay = await HatchetClientManager.can_replay(
transcript.workflow_run_id use_hatchet = not use_celery
if use_celery:
logger.info(
"Room uses legacy Celery processing",
room_id=config.room_id,
transcript_id=config.transcript_id,
) )
if can_replay:
await HatchetClientManager.replay_workflow(transcript.workflow_run_id) if use_hatchet:
logger.info( # First check if we can replay (outside transaction since it's read-only)
"Replaying Hatchet workflow", transcript = await transcripts_controller.get_by_id(config.transcript_id)
workflow_id=transcript.workflow_run_id, if transcript and transcript.workflow_run_id and not force:
can_replay = await HatchetClientManager.can_replay(
transcript.workflow_run_id
) )
return None if can_replay:
else: await HatchetClientManager.replay_workflow(
# Workflow can't replay (CANCELLED, COMPLETED, or 404 deleted) transcript.workflow_run_id
# Log and proceed to start new workflow )
logger.info(
"Replaying Hatchet workflow",
workflow_id=transcript.workflow_run_id,
)
return None
else:
# Workflow can't replay (CANCELLED, COMPLETED, or 404 deleted)
# Log and proceed to start new workflow
try:
status = await HatchetClientManager.get_workflow_run_status(
transcript.workflow_run_id
)
logger.info(
"Old workflow not replayable, starting new",
old_workflow_id=transcript.workflow_run_id,
old_status=status.value,
)
except NotFoundException:
# Workflow deleted from Hatchet but ID still in DB
logger.info(
"Old workflow not found in Hatchet, starting new",
old_workflow_id=transcript.workflow_run_id,
)
# Force: cancel old workflow if exists
if force and transcript and transcript.workflow_run_id:
try:
await HatchetClientManager.cancel_workflow(
transcript.workflow_run_id
)
logger.info(
"Cancelled old workflow (--force)",
workflow_id=transcript.workflow_run_id,
)
except NotFoundException:
logger.info(
"Old workflow already deleted (--force)",
workflow_id=transcript.workflow_run_id,
)
await transcripts_controller.update(
transcript, {"workflow_run_id": None}
)
# Re-fetch and check for concurrent dispatch (optimistic approach).
# No database lock - worst case is duplicate dispatch, but Hatchet
# workflows are idempotent so this is acceptable.
transcript = await transcripts_controller.get_by_id(config.transcript_id)
if transcript and transcript.workflow_run_id:
# Another process started a workflow between validation and now
try: try:
status = await HatchetClientManager.get_workflow_run_status( status = await HatchetClientManager.get_workflow_run_status(
transcript.workflow_run_id transcript.workflow_run_id
) )
logger.info( if status in (V1TaskStatus.RUNNING, V1TaskStatus.QUEUED):
"Old workflow not replayable, starting new", logger.info(
old_workflow_id=transcript.workflow_run_id, "Concurrent workflow detected, skipping dispatch",
old_status=status.value, workflow_id=transcript.workflow_run_id,
) )
except NotFoundException: return None
# Workflow deleted from Hatchet but ID still in DB except ApiException:
logger.info( # Workflow might be gone (404) or API issue - proceed with new workflow
"Old workflow not found in Hatchet, starting new", pass
old_workflow_id=transcript.workflow_run_id,
)
# Force: cancel old workflow if exists workflow_id = await HatchetClientManager.start_workflow(
if force and transcript and transcript.workflow_run_id: workflow_name="DiarizationPipeline",
try: input_data={
await HatchetClientManager.cancel_workflow(transcript.workflow_run_id) "recording_id": config.recording_id,
logger.info( "tracks": [{"s3_key": k} for k in config.track_keys],
"Cancelled old workflow (--force)", "bucket_name": config.bucket_name,
workflow_id=transcript.workflow_run_id, "transcript_id": config.transcript_id,
) "room_id": config.room_id,
except NotFoundException: },
logger.info( additional_metadata={
"Old workflow already deleted (--force)", "transcript_id": config.transcript_id,
workflow_id=transcript.workflow_run_id, "recording_id": config.recording_id,
) "daily_recording_id": config.recording_id,
await transcripts_controller.update(transcript, {"workflow_run_id": None}) },
# Re-fetch and check for concurrent dispatch (optimistic approach).
# No database lock - worst case is duplicate dispatch, but Hatchet
# workflows are idempotent so this is acceptable.
transcript = await transcripts_controller.get_by_id(config.transcript_id)
if transcript and transcript.workflow_run_id:
# Another process started a workflow between validation and now
try:
status = await HatchetClientManager.get_workflow_run_status(
transcript.workflow_run_id
)
if status in (V1TaskStatus.RUNNING, V1TaskStatus.QUEUED):
logger.info(
"Concurrent workflow detected, skipping dispatch",
workflow_id=transcript.workflow_run_id,
)
return None
except ApiException:
# Workflow might be gone (404) or API issue - proceed with new workflow
pass
workflow_id = await HatchetClientManager.start_workflow(
workflow_name="DiarizationPipeline",
input_data={
"recording_id": config.recording_id,
"tracks": [{"s3_key": k} for k in config.track_keys],
"bucket_name": config.bucket_name,
"transcript_id": config.transcript_id,
"room_id": config.room_id,
},
additional_metadata={
"transcript_id": config.transcript_id,
"recording_id": config.recording_id,
"daily_recording_id": config.recording_id,
},
)
if transcript:
await transcripts_controller.update(
transcript, {"workflow_run_id": workflow_id}
) )
logger.info("Hatchet workflow dispatched", workflow_id=workflow_id) if transcript:
return None await transcripts_controller.update(
transcript, {"workflow_run_id": workflow_id}
)
logger.info("Hatchet workflow dispatched", workflow_id=workflow_id)
return None
# Celery pipeline (durable workflows disabled)
return task_pipeline_multitrack_process.delay(
transcript_id=config.transcript_id,
bucket_name=config.bucket_name,
track_keys=config.track_keys,
)
elif isinstance(config, FileProcessingConfig): elif isinstance(config, FileProcessingConfig):
return task_pipeline_file_process.delay(transcript_id=config.transcript_id) return task_pipeline_file_process.delay(transcript_id=config.transcript_id)
else: else:

View File

@@ -1,7 +1,7 @@
from pydantic.types import PositiveInt from pydantic.types import PositiveInt
from pydantic_settings import BaseSettings, SettingsConfigDict from pydantic_settings import BaseSettings, SettingsConfigDict
from reflector.schemas.platform import DAILY_PLATFORM, Platform from reflector.schemas.platform import WHEREBY_PLATFORM, Platform
from reflector.utils.string import NonEmptyString from reflector.utils.string import NonEmptyString
@@ -155,7 +155,7 @@ class Settings(BaseSettings):
None # Webhook UUID for this environment. Not used by production code None # Webhook UUID for this environment. Not used by production code
) )
# Platform Configuration # Platform Configuration
DEFAULT_VIDEO_PLATFORM: Platform = DAILY_PLATFORM DEFAULT_VIDEO_PLATFORM: Platform = WHEREBY_PLATFORM
# Zulip integration # Zulip integration
ZULIP_REALM: str | None = None ZULIP_REALM: str | None = None

View File

@@ -1,4 +1,6 @@
import asyncio
import logging import logging
from collections import defaultdict
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from enum import Enum from enum import Enum
from typing import Annotated, Any, Literal, Optional from typing import Annotated, Any, Literal, Optional
@@ -6,13 +8,14 @@ from typing import Annotated, Any, Literal, Optional
from fastapi import APIRouter, Depends, HTTPException from fastapi import APIRouter, Depends, HTTPException
from fastapi_pagination import Page from fastapi_pagination import Page
from fastapi_pagination.ext.databases import apaginate from fastapi_pagination.ext.databases import apaginate
from pydantic import BaseModel from pydantic import BaseModel, Field
from redis.exceptions import LockError from redis.exceptions import LockError
import reflector.auth as auth import reflector.auth as auth
from reflector.db import get_database from reflector.db import get_database
from reflector.db.calendar_events import calendar_events_controller from reflector.db.calendar_events import calendar_events_controller
from reflector.db.meetings import meetings_controller from reflector.db.meetings import meetings_controller
from reflector.db.rooms import Room as DbRoom
from reflector.db.rooms import rooms_controller from reflector.db.rooms import rooms_controller
from reflector.redis_cache import RedisAsyncLock from reflector.redis_cache import RedisAsyncLock
from reflector.schemas.platform import Platform from reflector.schemas.platform import Platform
@@ -195,6 +198,82 @@ async def rooms_list(
return paginated return paginated
class BulkStatusRequest(BaseModel):
room_names: list[str] = Field(max_length=100)
class RoomMeetingStatus(BaseModel):
active_meetings: list[Meeting]
upcoming_events: list[CalendarEventResponse]
@router.post("/rooms/meetings/bulk-status", response_model=dict[str, RoomMeetingStatus])
async def rooms_bulk_meeting_status(
request: BulkStatusRequest,
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
):
if not user and not settings.PUBLIC_MODE:
raise HTTPException(status_code=401, detail="Not authenticated")
user_id = user["sub"] if user else None
all_rooms = await rooms_controller.get_by_names(request.room_names)
# Filter to rooms the user can see (owned or shared), matching rooms_list behavior
rooms = [
r
for r in all_rooms
if r.is_shared or (user_id is not None and r.user_id == user_id)
]
room_by_id: dict[str, DbRoom] = {r.id: r for r in rooms}
room_ids = list(room_by_id.keys())
if not room_ids:
return {
name: RoomMeetingStatus(active_meetings=[], upcoming_events=[])
for name in request.room_names
}
current_time = datetime.now(timezone.utc)
active_meetings, upcoming_events = await asyncio.gather(
meetings_controller.get_all_active_for_rooms(room_ids, current_time),
calendar_events_controller.get_upcoming_for_rooms(room_ids),
)
# Group by room name, converting DB models to view models
active_by_room: dict[str, list[Meeting]] = defaultdict(list)
for m in active_meetings:
room = room_by_id.get(m.room_id)
if not room:
continue
m.platform = room.platform
if user_id != room.user_id and m.platform == "whereby":
m.host_room_url = ""
active_by_room[room.name].append(
Meeting.model_validate(m, from_attributes=True)
)
upcoming_by_room: dict[str, list[CalendarEventResponse]] = defaultdict(list)
for e in upcoming_events:
room = room_by_id.get(e.room_id)
if not room:
continue
if user_id != room.user_id:
e.description = None
e.attendees = None
upcoming_by_room[room.name].append(
CalendarEventResponse.model_validate(e, from_attributes=True)
)
result: dict[str, RoomMeetingStatus] = {}
for name in request.room_names:
result[name] = RoomMeetingStatus(
active_meetings=active_by_room.get(name, []),
upcoming_events=upcoming_by_room.get(name, []),
)
return result
@router.get("/rooms/{room_id}", response_model=RoomDetails) @router.get("/rooms/{room_id}", response_model=RoomDetails)
async def rooms_get( async def rooms_get(
room_id: str, room_id: str,

View File

@@ -27,6 +27,9 @@ from reflector.db.transcripts import (
from reflector.hatchet.client import HatchetClientManager from reflector.hatchet.client import HatchetClientManager
from reflector.pipelines.main_file_pipeline import task_pipeline_file_process from reflector.pipelines.main_file_pipeline import task_pipeline_file_process
from reflector.pipelines.main_live_pipeline import asynctask from reflector.pipelines.main_live_pipeline import asynctask
from reflector.pipelines.main_multitrack_pipeline import (
task_pipeline_multitrack_process,
)
from reflector.pipelines.topic_processing import EmptyPipeline from reflector.pipelines.topic_processing import EmptyPipeline
from reflector.processors import AudioFileWriterProcessor from reflector.processors import AudioFileWriterProcessor
from reflector.processors.audio_waveform_processor import AudioWaveformProcessor from reflector.processors.audio_waveform_processor import AudioWaveformProcessor
@@ -348,29 +351,49 @@ async def _process_multitrack_recording_inner(
room_id=room.id, room_id=room.id,
) )
# Multitrack processing always uses Hatchet (no Celery fallback) use_celery = room and room.use_celery
workflow_id = await HatchetClientManager.start_workflow( use_hatchet = not use_celery
workflow_name="DiarizationPipeline",
input_data={
"recording_id": recording_id,
"tracks": [{"s3_key": k} for k in filter_cam_audio_tracks(track_keys)],
"bucket_name": bucket_name,
"transcript_id": transcript.id,
"room_id": room.id,
},
additional_metadata={
"transcript_id": transcript.id,
"recording_id": recording_id,
"daily_recording_id": recording_id,
},
)
logger.info(
"Started Hatchet workflow",
workflow_id=workflow_id,
transcript_id=transcript.id,
)
await transcripts_controller.update(transcript, {"workflow_run_id": workflow_id}) if use_celery:
logger.info(
"Room uses legacy Celery processing",
room_id=room.id,
transcript_id=transcript.id,
)
if use_hatchet:
workflow_id = await HatchetClientManager.start_workflow(
workflow_name="DiarizationPipeline",
input_data={
"recording_id": recording_id,
"tracks": [{"s3_key": k} for k in filter_cam_audio_tracks(track_keys)],
"bucket_name": bucket_name,
"transcript_id": transcript.id,
"room_id": room.id,
},
additional_metadata={
"transcript_id": transcript.id,
"recording_id": recording_id,
"daily_recording_id": recording_id,
},
)
logger.info(
"Started Hatchet workflow",
workflow_id=workflow_id,
transcript_id=transcript.id,
)
await transcripts_controller.update(
transcript, {"workflow_run_id": workflow_id}
)
return
# Celery pipeline (runs when durable workflows disabled)
task_pipeline_multitrack_process.delay(
transcript_id=transcript.id,
bucket_name=bucket_name,
track_keys=filter_cam_audio_tracks(track_keys),
)
@shared_task @shared_task
@@ -1049,43 +1072,66 @@ async def reprocess_failed_daily_recordings():
) )
continue continue
# Multitrack reprocessing always uses Hatchet (no Celery fallback) use_celery = room and room.use_celery
if not transcript: use_hatchet = not use_celery
logger.warning(
"No transcript for Hatchet reprocessing, skipping", if use_hatchet:
recording_id=recording.id, if not transcript:
logger.warning(
"No transcript for Hatchet reprocessing, skipping",
recording_id=recording.id,
)
continue
workflow_id = await HatchetClientManager.start_workflow(
workflow_name="DiarizationPipeline",
input_data={
"recording_id": recording.id,
"tracks": [
{"s3_key": k}
for k in filter_cam_audio_tracks(recording.track_keys)
],
"bucket_name": bucket_name,
"transcript_id": transcript.id,
"room_id": room.id if room else None,
},
additional_metadata={
"transcript_id": transcript.id,
"recording_id": recording.id,
"reprocess": True,
},
)
await transcripts_controller.update(
transcript, {"workflow_run_id": workflow_id}
) )
continue
workflow_id = await HatchetClientManager.start_workflow( logger.info(
workflow_name="DiarizationPipeline", "Queued Daily recording for Hatchet reprocessing",
input_data={ recording_id=recording.id,
"recording_id": recording.id, workflow_id=workflow_id,
"tracks": [ room_name=meeting.room_name,
{"s3_key": k} track_count=len(recording.track_keys),
for k in filter_cam_audio_tracks(recording.track_keys) )
], else:
"bucket_name": bucket_name, logger.info(
"transcript_id": transcript.id, "Queueing Daily recording for Celery reprocessing",
"room_id": room.id if room else None, recording_id=recording.id,
}, room_name=meeting.room_name,
additional_metadata={ track_count=len(recording.track_keys),
"transcript_id": transcript.id, transcript_status=transcript.status if transcript else None,
"recording_id": recording.id, )
"reprocess": True,
},
)
await transcripts_controller.update(
transcript, {"workflow_run_id": workflow_id}
)
logger.info( # For reprocessing, pass actual recording time (though it's ignored - see _process_multitrack_recording_inner)
"Queued Daily recording for Hatchet reprocessing", # Reprocessing uses recording.meeting_id directly instead of time-based matching
recording_id=recording.id, recording_start_ts = int(recording.recorded_at.timestamp())
workflow_id=workflow_id,
room_name=meeting.room_name, process_multitrack_recording.delay(
track_count=len(recording.track_keys), bucket_name=bucket_name,
) daily_room_name=meeting.room_name,
recording_id=recording.id,
track_keys=recording.track_keys,
recording_start_ts=recording_start_ts,
)
reprocessed_count += 1 reprocessed_count += 1

View File

@@ -4,7 +4,7 @@ from unittest.mock import patch
import pytest import pytest
from reflector.schemas.platform import DAILY_PLATFORM, WHEREBY_PLATFORM from reflector.schemas.platform import WHEREBY_PLATFORM
@pytest.fixture(scope="session", autouse=True) @pytest.fixture(scope="session", autouse=True)
@@ -14,7 +14,6 @@ def register_mock_platform():
from reflector.video_platforms.registry import register_platform from reflector.video_platforms.registry import register_platform
register_platform(WHEREBY_PLATFORM, MockPlatformClient) register_platform(WHEREBY_PLATFORM, MockPlatformClient)
register_platform(DAILY_PLATFORM, MockPlatformClient)
yield yield

View File

@@ -0,0 +1,184 @@
from datetime import datetime, timedelta, timezone
import pytest
from conftest import authenticated_client_ctx
from reflector.db.calendar_events import CalendarEvent, calendar_events_controller
from reflector.db.meetings import meetings_controller
from reflector.db.rooms import Room, rooms_controller
from reflector.settings import settings
async def _create_room(name: str, user_id: str, is_shared: bool = False) -> Room:
return await rooms_controller.add(
name=name,
user_id=user_id,
zulip_auto_post=False,
zulip_stream="",
zulip_topic="",
is_locked=False,
room_mode="normal",
recording_type="cloud",
recording_trigger="automatic-2nd-participant",
is_shared=is_shared,
webhook_url="",
webhook_secret="",
)
async def _create_meeting(room: Room, active: bool = True):
now = datetime.now(timezone.utc)
return await meetings_controller.create(
id=f"meeting-{room.name}-{now.timestamp()}",
room_name=room.name,
room_url="room-url",
host_room_url="host-url",
start_date=now - timedelta(minutes=10),
end_date=now + timedelta(minutes=50) if active else now - timedelta(minutes=1),
room=room,
)
async def _create_calendar_event(room: Room):
now = datetime.now(timezone.utc)
return await calendar_events_controller.upsert(
CalendarEvent(
room_id=room.id,
ics_uid=f"event-{room.name}",
title=f"Upcoming in {room.name}",
description="secret description",
start_time=now + timedelta(minutes=30),
end_time=now + timedelta(minutes=90),
attendees=[{"name": "Alice", "email": "alice@example.com"}],
)
)
@pytest.mark.asyncio
async def test_bulk_status_returns_empty_for_no_rooms(client):
"""Empty room_names returns empty dict."""
async with authenticated_client_ctx():
resp = await client.post("/rooms/meetings/bulk-status", json={"room_names": []})
assert resp.status_code == 200
assert resp.json() == {}
@pytest.mark.asyncio
async def test_bulk_status_returns_active_meetings_and_upcoming_events(client):
"""Owner sees active meetings and upcoming events for their rooms."""
room = await _create_room("bulk-test-room", "randomuserid")
await _create_meeting(room, active=True)
await _create_calendar_event(room)
async with authenticated_client_ctx():
resp = await client.post(
"/rooms/meetings/bulk-status",
json={"room_names": ["bulk-test-room"]},
)
assert resp.status_code == 200
data = resp.json()
assert "bulk-test-room" in data
status = data["bulk-test-room"]
assert len(status["active_meetings"]) == 1
assert len(status["upcoming_events"]) == 1
# Owner sees description
assert status["upcoming_events"][0]["description"] == "secret description"
@pytest.mark.asyncio
async def test_bulk_status_redacts_data_for_non_owner(client):
"""Non-owner of a shared room gets redacted calendar events and no whereby host_room_url."""
room = await _create_room("shared-bulk", "other-user-id", is_shared=True)
await _create_meeting(room, active=True)
await _create_calendar_event(room)
# authenticated as "randomuserid" but room owned by "other-user-id"
async with authenticated_client_ctx():
resp = await client.post(
"/rooms/meetings/bulk-status",
json={"room_names": ["shared-bulk"]},
)
assert resp.status_code == 200
status = resp.json()["shared-bulk"]
assert len(status["active_meetings"]) == 1
assert len(status["upcoming_events"]) == 1
# Non-owner: description and attendees redacted
assert status["upcoming_events"][0]["description"] is None
assert status["upcoming_events"][0]["attendees"] is None
@pytest.mark.asyncio
async def test_bulk_status_filters_private_rooms_of_other_users(client):
"""User cannot see private rooms owned by others."""
await _create_room("private-other", "other-user-id", is_shared=False)
async with authenticated_client_ctx():
resp = await client.post(
"/rooms/meetings/bulk-status",
json={"room_names": ["private-other"]},
)
assert resp.status_code == 200
status = resp.json()["private-other"]
assert status["active_meetings"] == []
assert status["upcoming_events"] == []
@pytest.mark.asyncio
async def test_bulk_status_redacts_whereby_host_room_url_for_non_owner(client):
"""Non-owner of a shared whereby room gets empty host_room_url."""
room = await _create_room("shared-whereby", "other-user-id", is_shared=True)
# Force platform to whereby
from reflector.db import get_database
from reflector.db.rooms import rooms as rooms_table
await get_database().execute(
rooms_table.update()
.where(rooms_table.c.id == room.id)
.values(platform="whereby")
)
await _create_meeting(room, active=True)
async with authenticated_client_ctx():
resp = await client.post(
"/rooms/meetings/bulk-status",
json={"room_names": ["shared-whereby"]},
)
assert resp.status_code == 200
status = resp.json()["shared-whereby"]
assert len(status["active_meetings"]) == 1
assert status["active_meetings"][0]["host_room_url"] == ""
@pytest.mark.asyncio
async def test_bulk_status_unauthenticated_rejected_non_public(client):
"""Unauthenticated request on non-PUBLIC_MODE instance returns 401."""
original = settings.PUBLIC_MODE
try:
settings.PUBLIC_MODE = False
resp = await client.post(
"/rooms/meetings/bulk-status",
json={"room_names": ["any-room"]},
)
assert resp.status_code == 401
finally:
settings.PUBLIC_MODE = original
@pytest.mark.asyncio
async def test_bulk_status_nonexistent_room_returns_empty(client):
"""Requesting a room that doesn't exist returns empty lists."""
async with authenticated_client_ctx():
resp = await client.post(
"/rooms/meetings/bulk-status",
json={"room_names": ["does-not-exist"]},
)
assert resp.status_code == 200
status = resp.json()["does-not-exist"]
assert status["active_meetings"] == []
assert status["upcoming_events"] == []

View File

@@ -1,6 +1,6 @@
import asyncio import asyncio
import time import time
from unittest.mock import AsyncMock, patch from unittest.mock import patch
import pytest import pytest
from httpx import ASGITransport, AsyncClient from httpx import ASGITransport, AsyncClient
@@ -142,17 +142,17 @@ async def test_whereby_recording_uses_file_pipeline(client):
"reflector.services.transcript_process.task_pipeline_file_process" "reflector.services.transcript_process.task_pipeline_file_process"
) as mock_file_pipeline, ) as mock_file_pipeline,
patch( patch(
"reflector.services.transcript_process.HatchetClientManager" "reflector.services.transcript_process.task_pipeline_multitrack_process"
) as mock_hatchet, ) as mock_multitrack_pipeline,
): ):
response = await client.post(f"/transcripts/{transcript.id}/process") response = await client.post(f"/transcripts/{transcript.id}/process")
assert response.status_code == 200 assert response.status_code == 200
assert response.json()["status"] == "ok" assert response.json()["status"] == "ok"
# Whereby recordings should use file pipeline, not Hatchet # Whereby recordings should use file pipeline
mock_file_pipeline.delay.assert_called_once_with(transcript_id=transcript.id) mock_file_pipeline.delay.assert_called_once_with(transcript_id=transcript.id)
mock_hatchet.start_workflow.assert_not_called() mock_multitrack_pipeline.delay.assert_not_called()
@pytest.mark.usefixtures("setup_database") @pytest.mark.usefixtures("setup_database")
@@ -177,6 +177,8 @@ async def test_dailyco_recording_uses_multitrack_pipeline(client):
recording_trigger="automatic-2nd-participant", recording_trigger="automatic-2nd-participant",
is_shared=False, is_shared=False,
) )
# Force Celery backend for test
await rooms_controller.update(room, {"use_celery": True})
transcript = await transcripts_controller.add( transcript = await transcripts_controller.add(
"", "",
@@ -211,23 +213,18 @@ async def test_dailyco_recording_uses_multitrack_pipeline(client):
"reflector.services.transcript_process.task_pipeline_file_process" "reflector.services.transcript_process.task_pipeline_file_process"
) as mock_file_pipeline, ) as mock_file_pipeline,
patch( patch(
"reflector.services.transcript_process.HatchetClientManager" "reflector.services.transcript_process.task_pipeline_multitrack_process"
) as mock_hatchet, ) as mock_multitrack_pipeline,
): ):
mock_hatchet.start_workflow = AsyncMock(return_value="test-workflow-id")
response = await client.post(f"/transcripts/{transcript.id}/process") response = await client.post(f"/transcripts/{transcript.id}/process")
assert response.status_code == 200 assert response.status_code == 200
assert response.json()["status"] == "ok" assert response.json()["status"] == "ok"
# Daily.co multitrack recordings should use Hatchet workflow # Daily.co multitrack recordings should use multitrack pipeline
mock_hatchet.start_workflow.assert_called_once() mock_multitrack_pipeline.delay.assert_called_once_with(
call_kwargs = mock_hatchet.start_workflow.call_args.kwargs transcript_id=transcript.id,
assert call_kwargs["workflow_name"] == "DiarizationPipeline" bucket_name="daily-bucket",
assert call_kwargs["input_data"]["transcript_id"] == transcript.id track_keys=track_keys,
assert call_kwargs["input_data"]["bucket_name"] == "daily-bucket" )
assert call_kwargs["input_data"]["tracks"] == [
{"s3_key": k} for k in track_keys
]
mock_file_pipeline.delay.assert_not_called() mock_file_pipeline.delay.assert_not_called()

View File

@@ -1,5 +1,10 @@
import { useMemo } from "react";
import { Box, Heading, Text, VStack } from "@chakra-ui/react"; import { Box, Heading, Text, VStack } from "@chakra-ui/react";
import type { components } from "../../../reflector-api"; import type { components } from "../../../reflector-api";
import {
useRoomsBulkMeetingStatus,
BulkMeetingStatusMap,
} from "../../../lib/apiHooks";
type Room = components["schemas"]["Room"]; type Room = components["schemas"]["Room"];
import { RoomTable } from "./RoomTable"; import { RoomTable } from "./RoomTable";
@@ -31,6 +36,10 @@ export function RoomList({
pt, pt,
loading, loading,
}: RoomListProps) { }: RoomListProps) {
const roomNames = useMemo(() => rooms.map((r) => r.name), [rooms]);
const bulkStatusQuery = useRoomsBulkMeetingStatus(roomNames);
const meetingStatusMap: BulkMeetingStatusMap = bulkStatusQuery.data ?? {};
return ( return (
<VStack alignItems="start" gap={4} mb={mb} pt={pt}> <VStack alignItems="start" gap={4} mb={mb} pt={pt}>
<Heading size="md">{title}</Heading> <Heading size="md">{title}</Heading>
@@ -43,6 +52,8 @@ export function RoomList({
onEdit={onEdit} onEdit={onEdit}
onDelete={onDelete} onDelete={onDelete}
loading={loading} loading={loading}
meetingStatusMap={meetingStatusMap}
meetingStatusLoading={bulkStatusQuery.isLoading}
/> />
<RoomCards <RoomCards
rooms={rooms} rooms={rooms}

View File

@@ -14,11 +14,7 @@ import {
import { LuLink, LuRefreshCw } from "react-icons/lu"; import { LuLink, LuRefreshCw } from "react-icons/lu";
import { FaCalendarAlt } from "react-icons/fa"; import { FaCalendarAlt } from "react-icons/fa";
import type { components } from "../../../reflector-api"; import type { components } from "../../../reflector-api";
import { import { useRoomIcsSync, BulkMeetingStatusMap } from "../../../lib/apiHooks";
useRoomActiveMeetings,
useRoomUpcomingMeetings,
useRoomIcsSync,
} from "../../../lib/apiHooks";
type Room = components["schemas"]["Room"]; type Room = components["schemas"]["Room"];
type Meeting = components["schemas"]["Meeting"]; type Meeting = components["schemas"]["Meeting"];
@@ -62,6 +58,8 @@ interface RoomTableProps {
onEdit: (roomId: string, roomData: any) => void; onEdit: (roomId: string, roomData: any) => void;
onDelete: (roomId: string) => void; onDelete: (roomId: string) => void;
loading?: boolean; loading?: boolean;
meetingStatusMap: BulkMeetingStatusMap;
meetingStatusLoading: boolean;
} }
const getRoomModeDisplay = (mode: string): string => { const getRoomModeDisplay = (mode: string): string => {
@@ -104,14 +102,16 @@ const getZulipDisplay = (
return "Enabled"; return "Enabled";
}; };
function MeetingStatus({ roomName }: { roomName: string }) { function MeetingStatus({
const activeMeetingsQuery = useRoomActiveMeetings(roomName); activeMeetings,
const upcomingMeetingsQuery = useRoomUpcomingMeetings(roomName); upcomingMeetings,
isLoading,
const activeMeetings = activeMeetingsQuery.data || []; }: {
const upcomingMeetings = upcomingMeetingsQuery.data || []; activeMeetings: Meeting[];
upcomingMeetings: CalendarEventResponse[];
if (activeMeetingsQuery.isLoading || upcomingMeetingsQuery.isLoading) { isLoading: boolean;
}) {
if (isLoading) {
return <Spinner size="sm" />; return <Spinner size="sm" />;
} }
@@ -176,6 +176,8 @@ export function RoomTable({
onEdit, onEdit,
onDelete, onDelete,
loading, loading,
meetingStatusMap,
meetingStatusLoading,
}: RoomTableProps) { }: RoomTableProps) {
const [syncingRooms, setSyncingRooms] = useState<Set<NonEmptyString>>( const [syncingRooms, setSyncingRooms] = useState<Set<NonEmptyString>>(
new Set(), new Set(),
@@ -252,7 +254,15 @@ export function RoomTable({
<Link href={`/${room.name}`}>{room.name}</Link> <Link href={`/${room.name}`}>{room.name}</Link>
</Table.Cell> </Table.Cell>
<Table.Cell> <Table.Cell>
<MeetingStatus roomName={room.name} /> <MeetingStatus
activeMeetings={
meetingStatusMap[room.name]?.active_meetings ?? []
}
upcomingMeetings={
meetingStatusMap[room.name]?.upcoming_events ?? []
}
isLoading={meetingStatusLoading}
/>
</Table.Cell> </Table.Cell>
<Table.Cell> <Table.Cell>
{getZulipDisplay( {getZulipDisplay(

View File

@@ -0,0 +1,246 @@
import "@testing-library/jest-dom";
// --- Module mocks (hoisted before imports) ---
jest.mock("../apiClient", () => ({
client: {
GET: jest.fn(),
POST: jest.fn(),
PUT: jest.fn(),
PATCH: jest.fn(),
DELETE: jest.fn(),
use: jest.fn(),
},
$api: {
useQuery: jest.fn(),
useMutation: jest.fn(),
queryOptions: (method: string, path: string, init?: unknown) =>
init === undefined
? { queryKey: [method, path] }
: { queryKey: [method, path, init] },
},
API_URL: "http://test",
WEBSOCKET_URL: "ws://test",
configureApiAuth: jest.fn(),
}));
jest.mock("../AuthProvider", () => ({
useAuth: () => ({
status: "authenticated" as const,
accessToken: "test-token",
accessTokenExpires: Date.now() + 3600000,
user: { id: "user1", name: "Test User" },
update: jest.fn(),
signIn: jest.fn(),
signOut: jest.fn(),
lastUserId: "user1",
}),
}));
// --- Imports (after mocks) ---
import React from "react";
import { render, waitFor, screen } from "@testing-library/react";
import { QueryClient, QueryClientProvider } from "@tanstack/react-query";
import { useRoomsBulkMeetingStatus, BulkMeetingStatusMap } from "../apiHooks";
import { client } from "../apiClient";
import { ErrorProvider } from "../../(errors)/errorContext";
const mockClient = client as { POST: jest.Mock };
// --- Helpers ---
function mockBulkStatusEndpoint(
roomData?: Record<
string,
{ active_meetings: unknown[]; upcoming_events: unknown[] }
>,
) {
mockClient.POST.mockImplementation(
async (_path: string, options: { body: { room_names: string[] } }) => {
const roomNames: string[] = options.body.room_names;
const src = roomData ?? {};
const data = Object.fromEntries(
roomNames.map((name) => [
name,
src[name] ?? { active_meetings: [], upcoming_events: [] },
]),
);
return { data, error: undefined, response: {} };
},
);
}
// --- Test component: uses the bulk hook and displays results ---
function BulkStatusDisplay({ roomNames }: { roomNames: string[] }) {
const { data, isLoading } = useRoomsBulkMeetingStatus(roomNames);
if (isLoading) {
return <div data-testid="status">loading</div>;
}
if (!data) {
return <div data-testid="status">no data</div>;
}
return (
<div data-testid="status">
{roomNames.map((name) => {
const status = data[name];
return (
<div key={name} data-testid={`room-${name}`}>
{status?.active_meetings?.length ?? 0} active,{" "}
{status?.upcoming_events?.length ?? 0} upcoming
</div>
);
})}
</div>
);
}
function createWrapper() {
const queryClient = new QueryClient({
defaultOptions: {
queries: { retry: false },
},
});
return function Wrapper({ children }: { children: React.ReactNode }) {
return (
<QueryClientProvider client={queryClient}>
<ErrorProvider>{children}</ErrorProvider>
</QueryClientProvider>
);
};
}
// --- Tests ---
describe("bulk meeting status (prop-drilling)", () => {
afterEach(() => jest.clearAllMocks());
it("fetches all room statuses in a single POST request", async () => {
const rooms = Array.from({ length: 10 }, (_, i) => `room-${i}`);
mockBulkStatusEndpoint();
render(<BulkStatusDisplay roomNames={rooms} />, {
wrapper: createWrapper(),
});
await waitFor(() => {
for (const name of rooms) {
expect(screen.getByTestId(`room-${name}`)).toHaveTextContent(
"0 active, 0 upcoming",
);
}
});
const postCalls = mockClient.POST.mock.calls.filter(
([path]: [string]) => path === "/v1/rooms/meetings/bulk-status",
);
// Prop-drilling: exactly 1 POST for all rooms (no batcher needed)
expect(postCalls).toHaveLength(1);
// The single call contains all room names
const requestedRooms: string[] = postCalls[0][1].body.room_names;
expect(requestedRooms).toHaveLength(10);
for (const name of rooms) {
expect(requestedRooms).toContain(name);
}
});
it("returns room-specific data correctly", async () => {
mockBulkStatusEndpoint({
"room-a": {
active_meetings: [{ id: "m1", room_name: "room-a" }],
upcoming_events: [],
},
"room-b": {
active_meetings: [],
upcoming_events: [{ id: "e1", title: "Standup" }],
},
});
render(<BulkStatusDisplay roomNames={["room-a", "room-b"]} />, {
wrapper: createWrapper(),
});
await waitFor(() => {
expect(screen.getByTestId("room-room-a")).toHaveTextContent(
"1 active, 0 upcoming",
);
expect(screen.getByTestId("room-room-b")).toHaveTextContent(
"0 active, 1 upcoming",
);
});
// Still just 1 POST
expect(mockClient.POST).toHaveBeenCalledTimes(1);
});
it("does not fetch when roomNames is empty", async () => {
mockBulkStatusEndpoint();
render(<BulkStatusDisplay roomNames={[]} />, {
wrapper: createWrapper(),
});
await waitFor(() => {
expect(screen.getByTestId("status")).toHaveTextContent("no data");
});
// No POST calls when no rooms
expect(mockClient.POST).not.toHaveBeenCalled();
});
it("surfaces error when POST fails", async () => {
mockClient.POST.mockResolvedValue({
data: undefined,
error: { detail: "server error" },
response: {},
});
function ErrorDisplay({ roomNames }: { roomNames: string[] }) {
const { error } = useRoomsBulkMeetingStatus(roomNames);
if (error) return <div data-testid="error">{error.message}</div>;
return <div data-testid="error">no error</div>;
}
render(<ErrorDisplay roomNames={["room-x"]} />, {
wrapper: createWrapper(),
});
await waitFor(() => {
expect(screen.getByTestId("error")).toHaveTextContent(
"bulk-status fetch failed",
);
});
});
it("does not fetch when unauthenticated", async () => {
// Override useAuth to return unauthenticated
const authModule = jest.requireMock("../AuthProvider");
const originalUseAuth = authModule.useAuth;
authModule.useAuth = () => ({
...originalUseAuth(),
status: "unauthenticated",
});
mockBulkStatusEndpoint();
render(<BulkStatusDisplay roomNames={["room-1"]} />, {
wrapper: createWrapper(),
});
await waitFor(() => {
expect(screen.getByTestId("status")).toHaveTextContent("no data");
});
expect(mockClient.POST).not.toHaveBeenCalled();
// Restore
authModule.useAuth = originalUseAuth;
});
});

View File

@@ -1,8 +1,8 @@
"use client"; "use client";
import { $api } from "./apiClient"; import { $api, client } from "./apiClient";
import { useError } from "../(errors)/errorContext"; import { useError } from "../(errors)/errorContext";
import { QueryClient, useQueryClient } from "@tanstack/react-query"; import { QueryClient, useQuery, useQueryClient } from "@tanstack/react-query";
import type { components } from "../reflector-api"; import type { components } from "../reflector-api";
import { useAuth } from "./AuthProvider"; import { useAuth } from "./AuthProvider";
import { MeetingId } from "./types"; import { MeetingId } from "./types";
@@ -641,16 +641,21 @@ export function useMeetingDeactivate() {
setError(error as Error, "Failed to end meeting"); setError(error as Error, "Failed to end meeting");
}, },
onSuccess: () => { onSuccess: () => {
return queryClient.invalidateQueries({ return Promise.all([
predicate: (query) => { queryClient.invalidateQueries({
const key = query.queryKey; predicate: (query) => {
return key.some( const key = query.queryKey;
(k) => return key.some(
typeof k === "string" && (k) =>
!!MEETING_LIST_PATH_PARTIALS.find((e) => k.includes(e)), typeof k === "string" &&
); !!MEETING_LIST_PATH_PARTIALS.find((e) => k.includes(e)),
}, );
}); },
}),
queryClient.invalidateQueries({
queryKey: ["bulk-meeting-status"],
}),
]);
}, },
}); });
} }
@@ -707,6 +712,9 @@ export function useRoomsCreateMeeting() {
}, },
).queryKey, ).queryKey,
}), }),
queryClient.invalidateQueries({
queryKey: ["bulk-meeting-status"],
}),
]); ]);
}, },
onError: (error) => { onError: (error) => {
@@ -772,6 +780,32 @@ export function useRoomActiveMeetings(roomName: string | null) {
); );
} }
type RoomMeetingStatus = components["schemas"]["RoomMeetingStatus"];
export type BulkMeetingStatusMap = Partial<Record<string, RoomMeetingStatus>>;
export function useRoomsBulkMeetingStatus(roomNames: string[]) {
const { isAuthenticated } = useAuthReady();
const sortedNames = [...roomNames].sort();
return useQuery({
queryKey: ["bulk-meeting-status", sortedNames],
queryFn: async (): Promise<BulkMeetingStatusMap> => {
const { data, error } = await client.POST(
"/v1/rooms/meetings/bulk-status",
{ body: { room_names: roomNames } },
);
if (error || !data) {
throw new Error(
`bulk-status fetch failed: ${JSON.stringify(error ?? "no data")}`,
);
}
return data;
},
enabled: sortedNames.length > 0 && isAuthenticated,
});
}
export function useRoomGetMeeting( export function useRoomGetMeeting(
roomName: string | null, roomName: string | null,
meetingId: MeetingId | null, meetingId: MeetingId | null,

View File

@@ -118,6 +118,23 @@ export interface paths {
patch?: never; patch?: never;
trace?: never; trace?: never;
}; };
"/v1/rooms/meetings/bulk-status": {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
get?: never;
put?: never;
/** Rooms Bulk Meeting Status */
post: operations["v1_rooms_bulk_meeting_status"];
delete?: never;
options?: never;
head?: never;
patch?: never;
trace?: never;
};
"/v1/rooms/{room_id}": { "/v1/rooms/{room_id}": {
parameters: { parameters: {
query?: never; query?: never;
@@ -799,6 +816,11 @@ export interface components {
*/ */
chunk: string; chunk: string;
}; };
/** BulkStatusRequest */
BulkStatusRequest: {
/** Room Names */
room_names: string[];
};
/** CalendarEventResponse */ /** CalendarEventResponse */
CalendarEventResponse: { CalendarEventResponse: {
/** Id */ /** Id */
@@ -1675,6 +1697,13 @@ export interface components {
*/ */
skip_consent: boolean; skip_consent: boolean;
}; };
/** RoomMeetingStatus */
RoomMeetingStatus: {
/** Active Meetings */
active_meetings: components["schemas"]["Meeting"][];
/** Upcoming Events */
upcoming_events: components["schemas"]["CalendarEventResponse"][];
};
/** RoomDetails */ /** RoomDetails */
RoomDetails: { RoomDetails: {
/** Id */ /** Id */
@@ -2272,6 +2301,41 @@ export interface operations {
}; };
}; };
}; };
v1_rooms_bulk_meeting_status: {
parameters: {
query?: never;
header?: never;
path?: never;
cookie?: never;
};
requestBody: {
content: {
"application/json": components["schemas"]["BulkStatusRequest"];
};
};
responses: {
/** @description Successful Response */
200: {
headers: {
[name: string]: unknown;
};
content: {
"application/json": {
[key: string]: components["schemas"]["RoomMeetingStatus"];
};
};
};
/** @description Validation Error */
422: {
headers: {
[name: string]: unknown;
};
content: {
"application/json": components["schemas"]["HTTPValidationError"];
};
};
};
};
v1_rooms_get: { v1_rooms_get: {
parameters: { parameters: {
query?: never; query?: never;

View File

@@ -1,8 +1,22 @@
module.exports = { module.exports = {
preset: "ts-jest", testEnvironment: "jest-environment-jsdom",
testEnvironment: "node",
roots: ["<rootDir>/app"], roots: ["<rootDir>/app"],
testMatch: ["**/__tests__/**/*.test.ts"], testMatch: ["**/__tests__/**/*.test.ts", "**/__tests__/**/*.test.tsx"],
collectCoverage: true, collectCoverage: false,
collectCoverageFrom: ["app/**/*.ts", "!app/**/*.d.ts"], transform: {
"^.+\\.[jt]sx?$": [
"ts-jest",
{
tsconfig: {
jsx: "react-jsx",
module: "esnext",
moduleResolution: "bundler",
esModuleInterop: true,
strict: true,
downlevelIteration: true,
lib: ["dom", "dom.iterable", "esnext"],
},
},
],
},
}; };

View File

@@ -61,9 +61,13 @@
"author": "Andreas <andreas@monadical.com>", "author": "Andreas <andreas@monadical.com>",
"license": "All Rights Reserved", "license": "All Rights Reserved",
"devDependencies": { "devDependencies": {
"@testing-library/dom": "^10.4.1",
"@testing-library/jest-dom": "^6.9.1",
"@testing-library/react": "^16.3.2",
"@types/jest": "^30.0.0", "@types/jest": "^30.0.0",
"@types/react": "18.2.20", "@types/react": "18.2.20",
"jest": "^30.1.3", "jest": "^30.1.3",
"jest-environment-jsdom": "^30.2.0",
"openapi-typescript": "^7.9.1", "openapi-typescript": "^7.9.1",
"prettier": "^3.0.0", "prettier": "^3.0.0",
"ts-jest": "^29.4.1" "ts-jest": "^29.4.1"

787
www/pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff