mirror of
https://github.com/Monadical-SAS/reflector.git
synced 2026-02-07 03:06:46 +00:00
Compare commits
2 Commits
fix-room-q
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| cd2255cfbc | |||
| 15ab2e306e |
14
.github/workflows/test_next_server.yml
vendored
14
.github/workflows/test_next_server.yml
vendored
@@ -13,9 +13,6 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
test-next-server:
|
test-next-server:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
concurrency:
|
|
||||||
group: test-next-server-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
@@ -24,12 +21,17 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
- name: Install pnpm
|
- name: Install pnpm
|
||||||
uses: pnpm/action-setup@v4
|
uses: pnpm/action-setup@v4
|
||||||
with:
|
with:
|
||||||
package_json_file: './www/package.json'
|
version: 8
|
||||||
|
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js cache
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '20'
|
node-version: '20'
|
||||||
@@ -40,4 +42,4 @@ jobs:
|
|||||||
run: pnpm install
|
run: pnpm install
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: pnpm test
|
run: pnpm test
|
||||||
12
CHANGELOG.md
12
CHANGELOG.md
@@ -1,5 +1,17 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## [0.33.0](https://github.com/Monadical-SAS/reflector/compare/v0.32.2...v0.33.0) (2026-02-05)
|
||||||
|
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
* Daily+hatchet default ([#846](https://github.com/Monadical-SAS/reflector/issues/846)) ([15ab2e3](https://github.com/Monadical-SAS/reflector/commit/15ab2e306eacf575494b4b5d2b2ad779d44a1c7f))
|
||||||
|
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
* websocket tests ([#825](https://github.com/Monadical-SAS/reflector/issues/825)) ([1ce1c7a](https://github.com/Monadical-SAS/reflector/commit/1ce1c7a910b6c374115d2437b17f9d288ef094dc))
|
||||||
|
|
||||||
## [0.32.2](https://github.com/Monadical-SAS/reflector/compare/v0.32.1...v0.32.2) (2026-02-03)
|
## [0.32.2](https://github.com/Monadical-SAS/reflector/compare/v0.32.1...v0.32.2) (2026-02-03)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,35 @@
|
|||||||
|
"""drop_use_celery_column
|
||||||
|
|
||||||
|
Revision ID: 3aa20b96d963
|
||||||
|
Revises: e69f08ead8ea
|
||||||
|
Create Date: 2026-02-05 10:12:44.065279
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "3aa20b96d963"
|
||||||
|
down_revision: Union[str, None] = "e69f08ead8ea"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("use_celery")
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
with op.batch_alter_table("room", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(
|
||||||
|
sa.Column(
|
||||||
|
"use_celery",
|
||||||
|
sa.Boolean(),
|
||||||
|
server_default=sa.text("false"),
|
||||||
|
nullable=False,
|
||||||
|
)
|
||||||
|
)
|
||||||
@@ -104,26 +104,6 @@ class CalendarEventController:
|
|||||||
results = await get_database().fetch_all(query)
|
results = await get_database().fetch_all(query)
|
||||||
return [CalendarEvent(**result) for result in results]
|
return [CalendarEvent(**result) for result in results]
|
||||||
|
|
||||||
async def get_upcoming_for_rooms(
|
|
||||||
self, room_ids: list[str], minutes_ahead: int = 120
|
|
||||||
) -> list[CalendarEvent]:
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
future_time = now + timedelta(minutes=minutes_ahead)
|
|
||||||
query = (
|
|
||||||
calendar_events.select()
|
|
||||||
.where(
|
|
||||||
sa.and_(
|
|
||||||
calendar_events.c.room_id.in_(room_ids),
|
|
||||||
calendar_events.c.is_deleted == False,
|
|
||||||
calendar_events.c.start_time <= future_time,
|
|
||||||
calendar_events.c.end_time >= now,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by(calendar_events.c.start_time.asc())
|
|
||||||
)
|
|
||||||
results = await get_database().fetch_all(query)
|
|
||||||
return [CalendarEvent(**result) for result in results]
|
|
||||||
|
|
||||||
async def get_by_id(self, event_id: str) -> CalendarEvent | None:
|
async def get_by_id(self, event_id: str) -> CalendarEvent | None:
|
||||||
query = calendar_events.select().where(calendar_events.c.id == event_id)
|
query = calendar_events.select().where(calendar_events.c.id == event_id)
|
||||||
result = await get_database().fetch_one(query)
|
result = await get_database().fetch_one(query)
|
||||||
|
|||||||
@@ -301,23 +301,6 @@ class MeetingController:
|
|||||||
results = await get_database().fetch_all(query)
|
results = await get_database().fetch_all(query)
|
||||||
return [Meeting(**result) for result in results]
|
return [Meeting(**result) for result in results]
|
||||||
|
|
||||||
async def get_all_active_for_rooms(
|
|
||||||
self, room_ids: list[str], current_time: datetime
|
|
||||||
) -> list[Meeting]:
|
|
||||||
query = (
|
|
||||||
meetings.select()
|
|
||||||
.where(
|
|
||||||
sa.and_(
|
|
||||||
meetings.c.room_id.in_(room_ids),
|
|
||||||
meetings.c.end_date > current_time,
|
|
||||||
meetings.c.is_active,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by(meetings.c.end_date.desc())
|
|
||||||
)
|
|
||||||
results = await get_database().fetch_all(query)
|
|
||||||
return [Meeting(**result) for result in results]
|
|
||||||
|
|
||||||
async def get_active_by_calendar_event(
|
async def get_active_by_calendar_event(
|
||||||
self, room: Room, calendar_event_id: str, current_time: datetime
|
self, room: Room, calendar_event_id: str, current_time: datetime
|
||||||
) -> Meeting | None:
|
) -> Meeting | None:
|
||||||
|
|||||||
@@ -57,12 +57,6 @@ rooms = sqlalchemy.Table(
|
|||||||
sqlalchemy.String,
|
sqlalchemy.String,
|
||||||
nullable=False,
|
nullable=False,
|
||||||
),
|
),
|
||||||
sqlalchemy.Column(
|
|
||||||
"use_celery",
|
|
||||||
sqlalchemy.Boolean,
|
|
||||||
nullable=False,
|
|
||||||
server_default=false(),
|
|
||||||
),
|
|
||||||
sqlalchemy.Column(
|
sqlalchemy.Column(
|
||||||
"skip_consent",
|
"skip_consent",
|
||||||
sqlalchemy.Boolean,
|
sqlalchemy.Boolean,
|
||||||
@@ -97,7 +91,6 @@ class Room(BaseModel):
|
|||||||
ics_last_sync: datetime | None = None
|
ics_last_sync: datetime | None = None
|
||||||
ics_last_etag: str | None = None
|
ics_last_etag: str | None = None
|
||||||
platform: Platform = Field(default_factory=lambda: settings.DEFAULT_VIDEO_PLATFORM)
|
platform: Platform = Field(default_factory=lambda: settings.DEFAULT_VIDEO_PLATFORM)
|
||||||
use_celery: bool = False
|
|
||||||
skip_consent: bool = False
|
skip_consent: bool = False
|
||||||
|
|
||||||
|
|
||||||
@@ -245,11 +238,6 @@ class RoomController:
|
|||||||
|
|
||||||
return room
|
return room
|
||||||
|
|
||||||
async def get_by_names(self, names: list[str]) -> list[Room]:
|
|
||||||
query = rooms.select().where(rooms.c.name.in_(names))
|
|
||||||
results = await get_database().fetch_all(query)
|
|
||||||
return [Room(**r) for r in results]
|
|
||||||
|
|
||||||
async def get_ics_enabled(self) -> list[Room]:
|
async def get_ics_enabled(self) -> list[Room]:
|
||||||
query = rooms.select().where(
|
query = rooms.select().where(
|
||||||
rooms.c.ics_enabled == True, rooms.c.ics_url != None
|
rooms.c.ics_enabled == True, rooms.c.ics_url != None
|
||||||
|
|||||||
@@ -15,14 +15,10 @@ from hatchet_sdk.clients.rest.exceptions import ApiException, NotFoundException
|
|||||||
from hatchet_sdk.clients.rest.models import V1TaskStatus
|
from hatchet_sdk.clients.rest.models import V1TaskStatus
|
||||||
|
|
||||||
from reflector.db.recordings import recordings_controller
|
from reflector.db.recordings import recordings_controller
|
||||||
from reflector.db.rooms import rooms_controller
|
|
||||||
from reflector.db.transcripts import Transcript, transcripts_controller
|
from reflector.db.transcripts import Transcript, transcripts_controller
|
||||||
from reflector.hatchet.client import HatchetClientManager
|
from reflector.hatchet.client import HatchetClientManager
|
||||||
from reflector.logger import logger
|
from reflector.logger import logger
|
||||||
from reflector.pipelines.main_file_pipeline import task_pipeline_file_process
|
from reflector.pipelines.main_file_pipeline import task_pipeline_file_process
|
||||||
from reflector.pipelines.main_multitrack_pipeline import (
|
|
||||||
task_pipeline_multitrack_process,
|
|
||||||
)
|
|
||||||
from reflector.utils.string import NonEmptyString
|
from reflector.utils.string import NonEmptyString
|
||||||
|
|
||||||
|
|
||||||
@@ -181,124 +177,98 @@ async def dispatch_transcript_processing(
|
|||||||
Returns AsyncResult for Celery tasks, None for Hatchet workflows.
|
Returns AsyncResult for Celery tasks, None for Hatchet workflows.
|
||||||
"""
|
"""
|
||||||
if isinstance(config, MultitrackProcessingConfig):
|
if isinstance(config, MultitrackProcessingConfig):
|
||||||
use_celery = False
|
# Multitrack processing always uses Hatchet (no Celery fallback)
|
||||||
if config.room_id:
|
# First check if we can replay (outside transaction since it's read-only)
|
||||||
room = await rooms_controller.get_by_id(config.room_id)
|
transcript = await transcripts_controller.get_by_id(config.transcript_id)
|
||||||
use_celery = room.use_celery if room else False
|
if transcript and transcript.workflow_run_id and not force:
|
||||||
|
can_replay = await HatchetClientManager.can_replay(
|
||||||
use_hatchet = not use_celery
|
transcript.workflow_run_id
|
||||||
|
|
||||||
if use_celery:
|
|
||||||
logger.info(
|
|
||||||
"Room uses legacy Celery processing",
|
|
||||||
room_id=config.room_id,
|
|
||||||
transcript_id=config.transcript_id,
|
|
||||||
)
|
)
|
||||||
|
if can_replay:
|
||||||
if use_hatchet:
|
await HatchetClientManager.replay_workflow(transcript.workflow_run_id)
|
||||||
# First check if we can replay (outside transaction since it's read-only)
|
logger.info(
|
||||||
transcript = await transcripts_controller.get_by_id(config.transcript_id)
|
"Replaying Hatchet workflow",
|
||||||
if transcript and transcript.workflow_run_id and not force:
|
workflow_id=transcript.workflow_run_id,
|
||||||
can_replay = await HatchetClientManager.can_replay(
|
|
||||||
transcript.workflow_run_id
|
|
||||||
)
|
)
|
||||||
if can_replay:
|
return None
|
||||||
await HatchetClientManager.replay_workflow(
|
else:
|
||||||
transcript.workflow_run_id
|
# Workflow can't replay (CANCELLED, COMPLETED, or 404 deleted)
|
||||||
)
|
# Log and proceed to start new workflow
|
||||||
logger.info(
|
|
||||||
"Replaying Hatchet workflow",
|
|
||||||
workflow_id=transcript.workflow_run_id,
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
# Workflow can't replay (CANCELLED, COMPLETED, or 404 deleted)
|
|
||||||
# Log and proceed to start new workflow
|
|
||||||
try:
|
|
||||||
status = await HatchetClientManager.get_workflow_run_status(
|
|
||||||
transcript.workflow_run_id
|
|
||||||
)
|
|
||||||
logger.info(
|
|
||||||
"Old workflow not replayable, starting new",
|
|
||||||
old_workflow_id=transcript.workflow_run_id,
|
|
||||||
old_status=status.value,
|
|
||||||
)
|
|
||||||
except NotFoundException:
|
|
||||||
# Workflow deleted from Hatchet but ID still in DB
|
|
||||||
logger.info(
|
|
||||||
"Old workflow not found in Hatchet, starting new",
|
|
||||||
old_workflow_id=transcript.workflow_run_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Force: cancel old workflow if exists
|
|
||||||
if force and transcript and transcript.workflow_run_id:
|
|
||||||
try:
|
|
||||||
await HatchetClientManager.cancel_workflow(
|
|
||||||
transcript.workflow_run_id
|
|
||||||
)
|
|
||||||
logger.info(
|
|
||||||
"Cancelled old workflow (--force)",
|
|
||||||
workflow_id=transcript.workflow_run_id,
|
|
||||||
)
|
|
||||||
except NotFoundException:
|
|
||||||
logger.info(
|
|
||||||
"Old workflow already deleted (--force)",
|
|
||||||
workflow_id=transcript.workflow_run_id,
|
|
||||||
)
|
|
||||||
await transcripts_controller.update(
|
|
||||||
transcript, {"workflow_run_id": None}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Re-fetch and check for concurrent dispatch (optimistic approach).
|
|
||||||
# No database lock - worst case is duplicate dispatch, but Hatchet
|
|
||||||
# workflows are idempotent so this is acceptable.
|
|
||||||
transcript = await transcripts_controller.get_by_id(config.transcript_id)
|
|
||||||
if transcript and transcript.workflow_run_id:
|
|
||||||
# Another process started a workflow between validation and now
|
|
||||||
try:
|
try:
|
||||||
status = await HatchetClientManager.get_workflow_run_status(
|
status = await HatchetClientManager.get_workflow_run_status(
|
||||||
transcript.workflow_run_id
|
transcript.workflow_run_id
|
||||||
)
|
)
|
||||||
if status in (V1TaskStatus.RUNNING, V1TaskStatus.QUEUED):
|
logger.info(
|
||||||
logger.info(
|
"Old workflow not replayable, starting new",
|
||||||
"Concurrent workflow detected, skipping dispatch",
|
old_workflow_id=transcript.workflow_run_id,
|
||||||
workflow_id=transcript.workflow_run_id,
|
old_status=status.value,
|
||||||
)
|
)
|
||||||
return None
|
except NotFoundException:
|
||||||
except ApiException:
|
# Workflow deleted from Hatchet but ID still in DB
|
||||||
# Workflow might be gone (404) or API issue - proceed with new workflow
|
logger.info(
|
||||||
pass
|
"Old workflow not found in Hatchet, starting new",
|
||||||
|
old_workflow_id=transcript.workflow_run_id,
|
||||||
|
)
|
||||||
|
|
||||||
workflow_id = await HatchetClientManager.start_workflow(
|
# Force: cancel old workflow if exists
|
||||||
workflow_name="DiarizationPipeline",
|
if force and transcript and transcript.workflow_run_id:
|
||||||
input_data={
|
try:
|
||||||
"recording_id": config.recording_id,
|
await HatchetClientManager.cancel_workflow(transcript.workflow_run_id)
|
||||||
"tracks": [{"s3_key": k} for k in config.track_keys],
|
logger.info(
|
||||||
"bucket_name": config.bucket_name,
|
"Cancelled old workflow (--force)",
|
||||||
"transcript_id": config.transcript_id,
|
workflow_id=transcript.workflow_run_id,
|
||||||
"room_id": config.room_id,
|
)
|
||||||
},
|
except NotFoundException:
|
||||||
additional_metadata={
|
logger.info(
|
||||||
"transcript_id": config.transcript_id,
|
"Old workflow already deleted (--force)",
|
||||||
"recording_id": config.recording_id,
|
workflow_id=transcript.workflow_run_id,
|
||||||
"daily_recording_id": config.recording_id,
|
)
|
||||||
},
|
await transcripts_controller.update(transcript, {"workflow_run_id": None})
|
||||||
|
|
||||||
|
# Re-fetch and check for concurrent dispatch (optimistic approach).
|
||||||
|
# No database lock - worst case is duplicate dispatch, but Hatchet
|
||||||
|
# workflows are idempotent so this is acceptable.
|
||||||
|
transcript = await transcripts_controller.get_by_id(config.transcript_id)
|
||||||
|
if transcript and transcript.workflow_run_id:
|
||||||
|
# Another process started a workflow between validation and now
|
||||||
|
try:
|
||||||
|
status = await HatchetClientManager.get_workflow_run_status(
|
||||||
|
transcript.workflow_run_id
|
||||||
|
)
|
||||||
|
if status in (V1TaskStatus.RUNNING, V1TaskStatus.QUEUED):
|
||||||
|
logger.info(
|
||||||
|
"Concurrent workflow detected, skipping dispatch",
|
||||||
|
workflow_id=transcript.workflow_run_id,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
except ApiException:
|
||||||
|
# Workflow might be gone (404) or API issue - proceed with new workflow
|
||||||
|
pass
|
||||||
|
|
||||||
|
workflow_id = await HatchetClientManager.start_workflow(
|
||||||
|
workflow_name="DiarizationPipeline",
|
||||||
|
input_data={
|
||||||
|
"recording_id": config.recording_id,
|
||||||
|
"tracks": [{"s3_key": k} for k in config.track_keys],
|
||||||
|
"bucket_name": config.bucket_name,
|
||||||
|
"transcript_id": config.transcript_id,
|
||||||
|
"room_id": config.room_id,
|
||||||
|
},
|
||||||
|
additional_metadata={
|
||||||
|
"transcript_id": config.transcript_id,
|
||||||
|
"recording_id": config.recording_id,
|
||||||
|
"daily_recording_id": config.recording_id,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
if transcript:
|
||||||
|
await transcripts_controller.update(
|
||||||
|
transcript, {"workflow_run_id": workflow_id}
|
||||||
)
|
)
|
||||||
|
|
||||||
if transcript:
|
logger.info("Hatchet workflow dispatched", workflow_id=workflow_id)
|
||||||
await transcripts_controller.update(
|
return None
|
||||||
transcript, {"workflow_run_id": workflow_id}
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info("Hatchet workflow dispatched", workflow_id=workflow_id)
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Celery pipeline (durable workflows disabled)
|
|
||||||
return task_pipeline_multitrack_process.delay(
|
|
||||||
transcript_id=config.transcript_id,
|
|
||||||
bucket_name=config.bucket_name,
|
|
||||||
track_keys=config.track_keys,
|
|
||||||
)
|
|
||||||
elif isinstance(config, FileProcessingConfig):
|
elif isinstance(config, FileProcessingConfig):
|
||||||
return task_pipeline_file_process.delay(transcript_id=config.transcript_id)
|
return task_pipeline_file_process.delay(transcript_id=config.transcript_id)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
from pydantic.types import PositiveInt
|
from pydantic.types import PositiveInt
|
||||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||||
|
|
||||||
from reflector.schemas.platform import WHEREBY_PLATFORM, Platform
|
from reflector.schemas.platform import DAILY_PLATFORM, Platform
|
||||||
from reflector.utils.string import NonEmptyString
|
from reflector.utils.string import NonEmptyString
|
||||||
|
|
||||||
|
|
||||||
@@ -155,7 +155,7 @@ class Settings(BaseSettings):
|
|||||||
None # Webhook UUID for this environment. Not used by production code
|
None # Webhook UUID for this environment. Not used by production code
|
||||||
)
|
)
|
||||||
# Platform Configuration
|
# Platform Configuration
|
||||||
DEFAULT_VIDEO_PLATFORM: Platform = WHEREBY_PLATFORM
|
DEFAULT_VIDEO_PLATFORM: Platform = DAILY_PLATFORM
|
||||||
|
|
||||||
# Zulip integration
|
# Zulip integration
|
||||||
ZULIP_REALM: str | None = None
|
ZULIP_REALM: str | None = None
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
import asyncio
|
|
||||||
import logging
|
import logging
|
||||||
from collections import defaultdict
|
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Annotated, Any, Literal, Optional
|
from typing import Annotated, Any, Literal, Optional
|
||||||
@@ -8,14 +6,13 @@ from typing import Annotated, Any, Literal, Optional
|
|||||||
from fastapi import APIRouter, Depends, HTTPException
|
from fastapi import APIRouter, Depends, HTTPException
|
||||||
from fastapi_pagination import Page
|
from fastapi_pagination import Page
|
||||||
from fastapi_pagination.ext.databases import apaginate
|
from fastapi_pagination.ext.databases import apaginate
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel
|
||||||
from redis.exceptions import LockError
|
from redis.exceptions import LockError
|
||||||
|
|
||||||
import reflector.auth as auth
|
import reflector.auth as auth
|
||||||
from reflector.db import get_database
|
from reflector.db import get_database
|
||||||
from reflector.db.calendar_events import calendar_events_controller
|
from reflector.db.calendar_events import calendar_events_controller
|
||||||
from reflector.db.meetings import meetings_controller
|
from reflector.db.meetings import meetings_controller
|
||||||
from reflector.db.rooms import Room as DbRoom
|
|
||||||
from reflector.db.rooms import rooms_controller
|
from reflector.db.rooms import rooms_controller
|
||||||
from reflector.redis_cache import RedisAsyncLock
|
from reflector.redis_cache import RedisAsyncLock
|
||||||
from reflector.schemas.platform import Platform
|
from reflector.schemas.platform import Platform
|
||||||
@@ -198,82 +195,6 @@ async def rooms_list(
|
|||||||
return paginated
|
return paginated
|
||||||
|
|
||||||
|
|
||||||
class BulkStatusRequest(BaseModel):
|
|
||||||
room_names: list[str] = Field(max_length=100)
|
|
||||||
|
|
||||||
|
|
||||||
class RoomMeetingStatus(BaseModel):
|
|
||||||
active_meetings: list[Meeting]
|
|
||||||
upcoming_events: list[CalendarEventResponse]
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/rooms/meetings/bulk-status", response_model=dict[str, RoomMeetingStatus])
|
|
||||||
async def rooms_bulk_meeting_status(
|
|
||||||
request: BulkStatusRequest,
|
|
||||||
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
|
||||||
):
|
|
||||||
if not user and not settings.PUBLIC_MODE:
|
|
||||||
raise HTTPException(status_code=401, detail="Not authenticated")
|
|
||||||
|
|
||||||
user_id = user["sub"] if user else None
|
|
||||||
|
|
||||||
all_rooms = await rooms_controller.get_by_names(request.room_names)
|
|
||||||
# Filter to rooms the user can see (owned or shared), matching rooms_list behavior
|
|
||||||
rooms = [
|
|
||||||
r
|
|
||||||
for r in all_rooms
|
|
||||||
if r.is_shared or (user_id is not None and r.user_id == user_id)
|
|
||||||
]
|
|
||||||
room_by_id: dict[str, DbRoom] = {r.id: r for r in rooms}
|
|
||||||
room_ids = list(room_by_id.keys())
|
|
||||||
|
|
||||||
if not room_ids:
|
|
||||||
return {
|
|
||||||
name: RoomMeetingStatus(active_meetings=[], upcoming_events=[])
|
|
||||||
for name in request.room_names
|
|
||||||
}
|
|
||||||
|
|
||||||
current_time = datetime.now(timezone.utc)
|
|
||||||
active_meetings, upcoming_events = await asyncio.gather(
|
|
||||||
meetings_controller.get_all_active_for_rooms(room_ids, current_time),
|
|
||||||
calendar_events_controller.get_upcoming_for_rooms(room_ids),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Group by room name, converting DB models to view models
|
|
||||||
active_by_room: dict[str, list[Meeting]] = defaultdict(list)
|
|
||||||
for m in active_meetings:
|
|
||||||
room = room_by_id.get(m.room_id)
|
|
||||||
if not room:
|
|
||||||
continue
|
|
||||||
m.platform = room.platform
|
|
||||||
if user_id != room.user_id and m.platform == "whereby":
|
|
||||||
m.host_room_url = ""
|
|
||||||
active_by_room[room.name].append(
|
|
||||||
Meeting.model_validate(m, from_attributes=True)
|
|
||||||
)
|
|
||||||
|
|
||||||
upcoming_by_room: dict[str, list[CalendarEventResponse]] = defaultdict(list)
|
|
||||||
for e in upcoming_events:
|
|
||||||
room = room_by_id.get(e.room_id)
|
|
||||||
if not room:
|
|
||||||
continue
|
|
||||||
if user_id != room.user_id:
|
|
||||||
e.description = None
|
|
||||||
e.attendees = None
|
|
||||||
upcoming_by_room[room.name].append(
|
|
||||||
CalendarEventResponse.model_validate(e, from_attributes=True)
|
|
||||||
)
|
|
||||||
|
|
||||||
result: dict[str, RoomMeetingStatus] = {}
|
|
||||||
for name in request.room_names:
|
|
||||||
result[name] = RoomMeetingStatus(
|
|
||||||
active_meetings=active_by_room.get(name, []),
|
|
||||||
upcoming_events=upcoming_by_room.get(name, []),
|
|
||||||
)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/rooms/{room_id}", response_model=RoomDetails)
|
@router.get("/rooms/{room_id}", response_model=RoomDetails)
|
||||||
async def rooms_get(
|
async def rooms_get(
|
||||||
room_id: str,
|
room_id: str,
|
||||||
|
|||||||
@@ -27,9 +27,6 @@ from reflector.db.transcripts import (
|
|||||||
from reflector.hatchet.client import HatchetClientManager
|
from reflector.hatchet.client import HatchetClientManager
|
||||||
from reflector.pipelines.main_file_pipeline import task_pipeline_file_process
|
from reflector.pipelines.main_file_pipeline import task_pipeline_file_process
|
||||||
from reflector.pipelines.main_live_pipeline import asynctask
|
from reflector.pipelines.main_live_pipeline import asynctask
|
||||||
from reflector.pipelines.main_multitrack_pipeline import (
|
|
||||||
task_pipeline_multitrack_process,
|
|
||||||
)
|
|
||||||
from reflector.pipelines.topic_processing import EmptyPipeline
|
from reflector.pipelines.topic_processing import EmptyPipeline
|
||||||
from reflector.processors import AudioFileWriterProcessor
|
from reflector.processors import AudioFileWriterProcessor
|
||||||
from reflector.processors.audio_waveform_processor import AudioWaveformProcessor
|
from reflector.processors.audio_waveform_processor import AudioWaveformProcessor
|
||||||
@@ -351,49 +348,29 @@ async def _process_multitrack_recording_inner(
|
|||||||
room_id=room.id,
|
room_id=room.id,
|
||||||
)
|
)
|
||||||
|
|
||||||
use_celery = room and room.use_celery
|
# Multitrack processing always uses Hatchet (no Celery fallback)
|
||||||
use_hatchet = not use_celery
|
workflow_id = await HatchetClientManager.start_workflow(
|
||||||
|
workflow_name="DiarizationPipeline",
|
||||||
if use_celery:
|
input_data={
|
||||||
logger.info(
|
"recording_id": recording_id,
|
||||||
"Room uses legacy Celery processing",
|
"tracks": [{"s3_key": k} for k in filter_cam_audio_tracks(track_keys)],
|
||||||
room_id=room.id,
|
"bucket_name": bucket_name,
|
||||||
transcript_id=transcript.id,
|
"transcript_id": transcript.id,
|
||||||
)
|
"room_id": room.id,
|
||||||
|
},
|
||||||
if use_hatchet:
|
additional_metadata={
|
||||||
workflow_id = await HatchetClientManager.start_workflow(
|
"transcript_id": transcript.id,
|
||||||
workflow_name="DiarizationPipeline",
|
"recording_id": recording_id,
|
||||||
input_data={
|
"daily_recording_id": recording_id,
|
||||||
"recording_id": recording_id,
|
},
|
||||||
"tracks": [{"s3_key": k} for k in filter_cam_audio_tracks(track_keys)],
|
|
||||||
"bucket_name": bucket_name,
|
|
||||||
"transcript_id": transcript.id,
|
|
||||||
"room_id": room.id,
|
|
||||||
},
|
|
||||||
additional_metadata={
|
|
||||||
"transcript_id": transcript.id,
|
|
||||||
"recording_id": recording_id,
|
|
||||||
"daily_recording_id": recording_id,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
logger.info(
|
|
||||||
"Started Hatchet workflow",
|
|
||||||
workflow_id=workflow_id,
|
|
||||||
transcript_id=transcript.id,
|
|
||||||
)
|
|
||||||
|
|
||||||
await transcripts_controller.update(
|
|
||||||
transcript, {"workflow_run_id": workflow_id}
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Celery pipeline (runs when durable workflows disabled)
|
|
||||||
task_pipeline_multitrack_process.delay(
|
|
||||||
transcript_id=transcript.id,
|
|
||||||
bucket_name=bucket_name,
|
|
||||||
track_keys=filter_cam_audio_tracks(track_keys),
|
|
||||||
)
|
)
|
||||||
|
logger.info(
|
||||||
|
"Started Hatchet workflow",
|
||||||
|
workflow_id=workflow_id,
|
||||||
|
transcript_id=transcript.id,
|
||||||
|
)
|
||||||
|
|
||||||
|
await transcripts_controller.update(transcript, {"workflow_run_id": workflow_id})
|
||||||
|
|
||||||
|
|
||||||
@shared_task
|
@shared_task
|
||||||
@@ -1072,66 +1049,43 @@ async def reprocess_failed_daily_recordings():
|
|||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
use_celery = room and room.use_celery
|
# Multitrack reprocessing always uses Hatchet (no Celery fallback)
|
||||||
use_hatchet = not use_celery
|
if not transcript:
|
||||||
|
logger.warning(
|
||||||
if use_hatchet:
|
"No transcript for Hatchet reprocessing, skipping",
|
||||||
if not transcript:
|
|
||||||
logger.warning(
|
|
||||||
"No transcript for Hatchet reprocessing, skipping",
|
|
||||||
recording_id=recording.id,
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
workflow_id = await HatchetClientManager.start_workflow(
|
|
||||||
workflow_name="DiarizationPipeline",
|
|
||||||
input_data={
|
|
||||||
"recording_id": recording.id,
|
|
||||||
"tracks": [
|
|
||||||
{"s3_key": k}
|
|
||||||
for k in filter_cam_audio_tracks(recording.track_keys)
|
|
||||||
],
|
|
||||||
"bucket_name": bucket_name,
|
|
||||||
"transcript_id": transcript.id,
|
|
||||||
"room_id": room.id if room else None,
|
|
||||||
},
|
|
||||||
additional_metadata={
|
|
||||||
"transcript_id": transcript.id,
|
|
||||||
"recording_id": recording.id,
|
|
||||||
"reprocess": True,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
await transcripts_controller.update(
|
|
||||||
transcript, {"workflow_run_id": workflow_id}
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
"Queued Daily recording for Hatchet reprocessing",
|
|
||||||
recording_id=recording.id,
|
recording_id=recording.id,
|
||||||
workflow_id=workflow_id,
|
|
||||||
room_name=meeting.room_name,
|
|
||||||
track_count=len(recording.track_keys),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.info(
|
|
||||||
"Queueing Daily recording for Celery reprocessing",
|
|
||||||
recording_id=recording.id,
|
|
||||||
room_name=meeting.room_name,
|
|
||||||
track_count=len(recording.track_keys),
|
|
||||||
transcript_status=transcript.status if transcript else None,
|
|
||||||
)
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
# For reprocessing, pass actual recording time (though it's ignored - see _process_multitrack_recording_inner)
|
workflow_id = await HatchetClientManager.start_workflow(
|
||||||
# Reprocessing uses recording.meeting_id directly instead of time-based matching
|
workflow_name="DiarizationPipeline",
|
||||||
recording_start_ts = int(recording.recorded_at.timestamp())
|
input_data={
|
||||||
|
"recording_id": recording.id,
|
||||||
|
"tracks": [
|
||||||
|
{"s3_key": k}
|
||||||
|
for k in filter_cam_audio_tracks(recording.track_keys)
|
||||||
|
],
|
||||||
|
"bucket_name": bucket_name,
|
||||||
|
"transcript_id": transcript.id,
|
||||||
|
"room_id": room.id if room else None,
|
||||||
|
},
|
||||||
|
additional_metadata={
|
||||||
|
"transcript_id": transcript.id,
|
||||||
|
"recording_id": recording.id,
|
||||||
|
"reprocess": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
await transcripts_controller.update(
|
||||||
|
transcript, {"workflow_run_id": workflow_id}
|
||||||
|
)
|
||||||
|
|
||||||
process_multitrack_recording.delay(
|
logger.info(
|
||||||
bucket_name=bucket_name,
|
"Queued Daily recording for Hatchet reprocessing",
|
||||||
daily_room_name=meeting.room_name,
|
recording_id=recording.id,
|
||||||
recording_id=recording.id,
|
workflow_id=workflow_id,
|
||||||
track_keys=recording.track_keys,
|
room_name=meeting.room_name,
|
||||||
recording_start_ts=recording_start_ts,
|
track_count=len(recording.track_keys),
|
||||||
)
|
)
|
||||||
|
|
||||||
reprocessed_count += 1
|
reprocessed_count += 1
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ from unittest.mock import patch
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from reflector.schemas.platform import WHEREBY_PLATFORM
|
from reflector.schemas.platform import DAILY_PLATFORM, WHEREBY_PLATFORM
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session", autouse=True)
|
@pytest.fixture(scope="session", autouse=True)
|
||||||
@@ -14,6 +14,7 @@ def register_mock_platform():
|
|||||||
from reflector.video_platforms.registry import register_platform
|
from reflector.video_platforms.registry import register_platform
|
||||||
|
|
||||||
register_platform(WHEREBY_PLATFORM, MockPlatformClient)
|
register_platform(WHEREBY_PLATFORM, MockPlatformClient)
|
||||||
|
register_platform(DAILY_PLATFORM, MockPlatformClient)
|
||||||
yield
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,184 +0,0 @@
|
|||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from conftest import authenticated_client_ctx
|
|
||||||
|
|
||||||
from reflector.db.calendar_events import CalendarEvent, calendar_events_controller
|
|
||||||
from reflector.db.meetings import meetings_controller
|
|
||||||
from reflector.db.rooms import Room, rooms_controller
|
|
||||||
from reflector.settings import settings
|
|
||||||
|
|
||||||
|
|
||||||
async def _create_room(name: str, user_id: str, is_shared: bool = False) -> Room:
|
|
||||||
return await rooms_controller.add(
|
|
||||||
name=name,
|
|
||||||
user_id=user_id,
|
|
||||||
zulip_auto_post=False,
|
|
||||||
zulip_stream="",
|
|
||||||
zulip_topic="",
|
|
||||||
is_locked=False,
|
|
||||||
room_mode="normal",
|
|
||||||
recording_type="cloud",
|
|
||||||
recording_trigger="automatic-2nd-participant",
|
|
||||||
is_shared=is_shared,
|
|
||||||
webhook_url="",
|
|
||||||
webhook_secret="",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def _create_meeting(room: Room, active: bool = True):
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
return await meetings_controller.create(
|
|
||||||
id=f"meeting-{room.name}-{now.timestamp()}",
|
|
||||||
room_name=room.name,
|
|
||||||
room_url="room-url",
|
|
||||||
host_room_url="host-url",
|
|
||||||
start_date=now - timedelta(minutes=10),
|
|
||||||
end_date=now + timedelta(minutes=50) if active else now - timedelta(minutes=1),
|
|
||||||
room=room,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def _create_calendar_event(room: Room):
|
|
||||||
now = datetime.now(timezone.utc)
|
|
||||||
return await calendar_events_controller.upsert(
|
|
||||||
CalendarEvent(
|
|
||||||
room_id=room.id,
|
|
||||||
ics_uid=f"event-{room.name}",
|
|
||||||
title=f"Upcoming in {room.name}",
|
|
||||||
description="secret description",
|
|
||||||
start_time=now + timedelta(minutes=30),
|
|
||||||
end_time=now + timedelta(minutes=90),
|
|
||||||
attendees=[{"name": "Alice", "email": "alice@example.com"}],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_bulk_status_returns_empty_for_no_rooms(client):
|
|
||||||
"""Empty room_names returns empty dict."""
|
|
||||||
async with authenticated_client_ctx():
|
|
||||||
resp = await client.post("/rooms/meetings/bulk-status", json={"room_names": []})
|
|
||||||
assert resp.status_code == 200
|
|
||||||
assert resp.json() == {}
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_bulk_status_returns_active_meetings_and_upcoming_events(client):
|
|
||||||
"""Owner sees active meetings and upcoming events for their rooms."""
|
|
||||||
room = await _create_room("bulk-test-room", "randomuserid")
|
|
||||||
await _create_meeting(room, active=True)
|
|
||||||
await _create_calendar_event(room)
|
|
||||||
|
|
||||||
async with authenticated_client_ctx():
|
|
||||||
resp = await client.post(
|
|
||||||
"/rooms/meetings/bulk-status",
|
|
||||||
json={"room_names": ["bulk-test-room"]},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert resp.status_code == 200
|
|
||||||
data = resp.json()
|
|
||||||
assert "bulk-test-room" in data
|
|
||||||
status = data["bulk-test-room"]
|
|
||||||
assert len(status["active_meetings"]) == 1
|
|
||||||
assert len(status["upcoming_events"]) == 1
|
|
||||||
# Owner sees description
|
|
||||||
assert status["upcoming_events"][0]["description"] == "secret description"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_bulk_status_redacts_data_for_non_owner(client):
|
|
||||||
"""Non-owner of a shared room gets redacted calendar events and no whereby host_room_url."""
|
|
||||||
room = await _create_room("shared-bulk", "other-user-id", is_shared=True)
|
|
||||||
await _create_meeting(room, active=True)
|
|
||||||
await _create_calendar_event(room)
|
|
||||||
|
|
||||||
# authenticated as "randomuserid" but room owned by "other-user-id"
|
|
||||||
async with authenticated_client_ctx():
|
|
||||||
resp = await client.post(
|
|
||||||
"/rooms/meetings/bulk-status",
|
|
||||||
json={"room_names": ["shared-bulk"]},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert resp.status_code == 200
|
|
||||||
status = resp.json()["shared-bulk"]
|
|
||||||
assert len(status["active_meetings"]) == 1
|
|
||||||
assert len(status["upcoming_events"]) == 1
|
|
||||||
# Non-owner: description and attendees redacted
|
|
||||||
assert status["upcoming_events"][0]["description"] is None
|
|
||||||
assert status["upcoming_events"][0]["attendees"] is None
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_bulk_status_filters_private_rooms_of_other_users(client):
|
|
||||||
"""User cannot see private rooms owned by others."""
|
|
||||||
await _create_room("private-other", "other-user-id", is_shared=False)
|
|
||||||
|
|
||||||
async with authenticated_client_ctx():
|
|
||||||
resp = await client.post(
|
|
||||||
"/rooms/meetings/bulk-status",
|
|
||||||
json={"room_names": ["private-other"]},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert resp.status_code == 200
|
|
||||||
status = resp.json()["private-other"]
|
|
||||||
assert status["active_meetings"] == []
|
|
||||||
assert status["upcoming_events"] == []
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_bulk_status_redacts_whereby_host_room_url_for_non_owner(client):
|
|
||||||
"""Non-owner of a shared whereby room gets empty host_room_url."""
|
|
||||||
room = await _create_room("shared-whereby", "other-user-id", is_shared=True)
|
|
||||||
# Force platform to whereby
|
|
||||||
from reflector.db import get_database
|
|
||||||
from reflector.db.rooms import rooms as rooms_table
|
|
||||||
|
|
||||||
await get_database().execute(
|
|
||||||
rooms_table.update()
|
|
||||||
.where(rooms_table.c.id == room.id)
|
|
||||||
.values(platform="whereby")
|
|
||||||
)
|
|
||||||
|
|
||||||
await _create_meeting(room, active=True)
|
|
||||||
|
|
||||||
async with authenticated_client_ctx():
|
|
||||||
resp = await client.post(
|
|
||||||
"/rooms/meetings/bulk-status",
|
|
||||||
json={"room_names": ["shared-whereby"]},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert resp.status_code == 200
|
|
||||||
status = resp.json()["shared-whereby"]
|
|
||||||
assert len(status["active_meetings"]) == 1
|
|
||||||
assert status["active_meetings"][0]["host_room_url"] == ""
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_bulk_status_unauthenticated_rejected_non_public(client):
|
|
||||||
"""Unauthenticated request on non-PUBLIC_MODE instance returns 401."""
|
|
||||||
original = settings.PUBLIC_MODE
|
|
||||||
try:
|
|
||||||
settings.PUBLIC_MODE = False
|
|
||||||
resp = await client.post(
|
|
||||||
"/rooms/meetings/bulk-status",
|
|
||||||
json={"room_names": ["any-room"]},
|
|
||||||
)
|
|
||||||
assert resp.status_code == 401
|
|
||||||
finally:
|
|
||||||
settings.PUBLIC_MODE = original
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_bulk_status_nonexistent_room_returns_empty(client):
|
|
||||||
"""Requesting a room that doesn't exist returns empty lists."""
|
|
||||||
async with authenticated_client_ctx():
|
|
||||||
resp = await client.post(
|
|
||||||
"/rooms/meetings/bulk-status",
|
|
||||||
json={"room_names": ["does-not-exist"]},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert resp.status_code == 200
|
|
||||||
status = resp.json()["does-not-exist"]
|
|
||||||
assert status["active_meetings"] == []
|
|
||||||
assert status["upcoming_events"] == []
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import time
|
import time
|
||||||
from unittest.mock import patch
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from httpx import ASGITransport, AsyncClient
|
from httpx import ASGITransport, AsyncClient
|
||||||
@@ -142,17 +142,17 @@ async def test_whereby_recording_uses_file_pipeline(client):
|
|||||||
"reflector.services.transcript_process.task_pipeline_file_process"
|
"reflector.services.transcript_process.task_pipeline_file_process"
|
||||||
) as mock_file_pipeline,
|
) as mock_file_pipeline,
|
||||||
patch(
|
patch(
|
||||||
"reflector.services.transcript_process.task_pipeline_multitrack_process"
|
"reflector.services.transcript_process.HatchetClientManager"
|
||||||
) as mock_multitrack_pipeline,
|
) as mock_hatchet,
|
||||||
):
|
):
|
||||||
response = await client.post(f"/transcripts/{transcript.id}/process")
|
response = await client.post(f"/transcripts/{transcript.id}/process")
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json()["status"] == "ok"
|
assert response.json()["status"] == "ok"
|
||||||
|
|
||||||
# Whereby recordings should use file pipeline
|
# Whereby recordings should use file pipeline, not Hatchet
|
||||||
mock_file_pipeline.delay.assert_called_once_with(transcript_id=transcript.id)
|
mock_file_pipeline.delay.assert_called_once_with(transcript_id=transcript.id)
|
||||||
mock_multitrack_pipeline.delay.assert_not_called()
|
mock_hatchet.start_workflow.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("setup_database")
|
@pytest.mark.usefixtures("setup_database")
|
||||||
@@ -177,8 +177,6 @@ async def test_dailyco_recording_uses_multitrack_pipeline(client):
|
|||||||
recording_trigger="automatic-2nd-participant",
|
recording_trigger="automatic-2nd-participant",
|
||||||
is_shared=False,
|
is_shared=False,
|
||||||
)
|
)
|
||||||
# Force Celery backend for test
|
|
||||||
await rooms_controller.update(room, {"use_celery": True})
|
|
||||||
|
|
||||||
transcript = await transcripts_controller.add(
|
transcript = await transcripts_controller.add(
|
||||||
"",
|
"",
|
||||||
@@ -213,18 +211,23 @@ async def test_dailyco_recording_uses_multitrack_pipeline(client):
|
|||||||
"reflector.services.transcript_process.task_pipeline_file_process"
|
"reflector.services.transcript_process.task_pipeline_file_process"
|
||||||
) as mock_file_pipeline,
|
) as mock_file_pipeline,
|
||||||
patch(
|
patch(
|
||||||
"reflector.services.transcript_process.task_pipeline_multitrack_process"
|
"reflector.services.transcript_process.HatchetClientManager"
|
||||||
) as mock_multitrack_pipeline,
|
) as mock_hatchet,
|
||||||
):
|
):
|
||||||
|
mock_hatchet.start_workflow = AsyncMock(return_value="test-workflow-id")
|
||||||
|
|
||||||
response = await client.post(f"/transcripts/{transcript.id}/process")
|
response = await client.post(f"/transcripts/{transcript.id}/process")
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
assert response.json()["status"] == "ok"
|
assert response.json()["status"] == "ok"
|
||||||
|
|
||||||
# Daily.co multitrack recordings should use multitrack pipeline
|
# Daily.co multitrack recordings should use Hatchet workflow
|
||||||
mock_multitrack_pipeline.delay.assert_called_once_with(
|
mock_hatchet.start_workflow.assert_called_once()
|
||||||
transcript_id=transcript.id,
|
call_kwargs = mock_hatchet.start_workflow.call_args.kwargs
|
||||||
bucket_name="daily-bucket",
|
assert call_kwargs["workflow_name"] == "DiarizationPipeline"
|
||||||
track_keys=track_keys,
|
assert call_kwargs["input_data"]["transcript_id"] == transcript.id
|
||||||
)
|
assert call_kwargs["input_data"]["bucket_name"] == "daily-bucket"
|
||||||
|
assert call_kwargs["input_data"]["tracks"] == [
|
||||||
|
{"s3_key": k} for k in track_keys
|
||||||
|
]
|
||||||
mock_file_pipeline.delay.assert_not_called()
|
mock_file_pipeline.delay.assert_not_called()
|
||||||
|
|||||||
@@ -1,10 +1,5 @@
|
|||||||
import { useMemo } from "react";
|
|
||||||
import { Box, Heading, Text, VStack } from "@chakra-ui/react";
|
import { Box, Heading, Text, VStack } from "@chakra-ui/react";
|
||||||
import type { components } from "../../../reflector-api";
|
import type { components } from "../../../reflector-api";
|
||||||
import {
|
|
||||||
useRoomsBulkMeetingStatus,
|
|
||||||
BulkMeetingStatusMap,
|
|
||||||
} from "../../../lib/apiHooks";
|
|
||||||
|
|
||||||
type Room = components["schemas"]["Room"];
|
type Room = components["schemas"]["Room"];
|
||||||
import { RoomTable } from "./RoomTable";
|
import { RoomTable } from "./RoomTable";
|
||||||
@@ -36,10 +31,6 @@ export function RoomList({
|
|||||||
pt,
|
pt,
|
||||||
loading,
|
loading,
|
||||||
}: RoomListProps) {
|
}: RoomListProps) {
|
||||||
const roomNames = useMemo(() => rooms.map((r) => r.name), [rooms]);
|
|
||||||
const bulkStatusQuery = useRoomsBulkMeetingStatus(roomNames);
|
|
||||||
const meetingStatusMap: BulkMeetingStatusMap = bulkStatusQuery.data ?? {};
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<VStack alignItems="start" gap={4} mb={mb} pt={pt}>
|
<VStack alignItems="start" gap={4} mb={mb} pt={pt}>
|
||||||
<Heading size="md">{title}</Heading>
|
<Heading size="md">{title}</Heading>
|
||||||
@@ -52,8 +43,6 @@ export function RoomList({
|
|||||||
onEdit={onEdit}
|
onEdit={onEdit}
|
||||||
onDelete={onDelete}
|
onDelete={onDelete}
|
||||||
loading={loading}
|
loading={loading}
|
||||||
meetingStatusMap={meetingStatusMap}
|
|
||||||
meetingStatusLoading={bulkStatusQuery.isLoading}
|
|
||||||
/>
|
/>
|
||||||
<RoomCards
|
<RoomCards
|
||||||
rooms={rooms}
|
rooms={rooms}
|
||||||
|
|||||||
@@ -14,7 +14,11 @@ import {
|
|||||||
import { LuLink, LuRefreshCw } from "react-icons/lu";
|
import { LuLink, LuRefreshCw } from "react-icons/lu";
|
||||||
import { FaCalendarAlt } from "react-icons/fa";
|
import { FaCalendarAlt } from "react-icons/fa";
|
||||||
import type { components } from "../../../reflector-api";
|
import type { components } from "../../../reflector-api";
|
||||||
import { useRoomIcsSync, BulkMeetingStatusMap } from "../../../lib/apiHooks";
|
import {
|
||||||
|
useRoomActiveMeetings,
|
||||||
|
useRoomUpcomingMeetings,
|
||||||
|
useRoomIcsSync,
|
||||||
|
} from "../../../lib/apiHooks";
|
||||||
|
|
||||||
type Room = components["schemas"]["Room"];
|
type Room = components["schemas"]["Room"];
|
||||||
type Meeting = components["schemas"]["Meeting"];
|
type Meeting = components["schemas"]["Meeting"];
|
||||||
@@ -58,8 +62,6 @@ interface RoomTableProps {
|
|||||||
onEdit: (roomId: string, roomData: any) => void;
|
onEdit: (roomId: string, roomData: any) => void;
|
||||||
onDelete: (roomId: string) => void;
|
onDelete: (roomId: string) => void;
|
||||||
loading?: boolean;
|
loading?: boolean;
|
||||||
meetingStatusMap: BulkMeetingStatusMap;
|
|
||||||
meetingStatusLoading: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const getRoomModeDisplay = (mode: string): string => {
|
const getRoomModeDisplay = (mode: string): string => {
|
||||||
@@ -102,16 +104,14 @@ const getZulipDisplay = (
|
|||||||
return "Enabled";
|
return "Enabled";
|
||||||
};
|
};
|
||||||
|
|
||||||
function MeetingStatus({
|
function MeetingStatus({ roomName }: { roomName: string }) {
|
||||||
activeMeetings,
|
const activeMeetingsQuery = useRoomActiveMeetings(roomName);
|
||||||
upcomingMeetings,
|
const upcomingMeetingsQuery = useRoomUpcomingMeetings(roomName);
|
||||||
isLoading,
|
|
||||||
}: {
|
const activeMeetings = activeMeetingsQuery.data || [];
|
||||||
activeMeetings: Meeting[];
|
const upcomingMeetings = upcomingMeetingsQuery.data || [];
|
||||||
upcomingMeetings: CalendarEventResponse[];
|
|
||||||
isLoading: boolean;
|
if (activeMeetingsQuery.isLoading || upcomingMeetingsQuery.isLoading) {
|
||||||
}) {
|
|
||||||
if (isLoading) {
|
|
||||||
return <Spinner size="sm" />;
|
return <Spinner size="sm" />;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -176,8 +176,6 @@ export function RoomTable({
|
|||||||
onEdit,
|
onEdit,
|
||||||
onDelete,
|
onDelete,
|
||||||
loading,
|
loading,
|
||||||
meetingStatusMap,
|
|
||||||
meetingStatusLoading,
|
|
||||||
}: RoomTableProps) {
|
}: RoomTableProps) {
|
||||||
const [syncingRooms, setSyncingRooms] = useState<Set<NonEmptyString>>(
|
const [syncingRooms, setSyncingRooms] = useState<Set<NonEmptyString>>(
|
||||||
new Set(),
|
new Set(),
|
||||||
@@ -254,15 +252,7 @@ export function RoomTable({
|
|||||||
<Link href={`/${room.name}`}>{room.name}</Link>
|
<Link href={`/${room.name}`}>{room.name}</Link>
|
||||||
</Table.Cell>
|
</Table.Cell>
|
||||||
<Table.Cell>
|
<Table.Cell>
|
||||||
<MeetingStatus
|
<MeetingStatus roomName={room.name} />
|
||||||
activeMeetings={
|
|
||||||
meetingStatusMap[room.name]?.active_meetings ?? []
|
|
||||||
}
|
|
||||||
upcomingMeetings={
|
|
||||||
meetingStatusMap[room.name]?.upcoming_events ?? []
|
|
||||||
}
|
|
||||||
isLoading={meetingStatusLoading}
|
|
||||||
/>
|
|
||||||
</Table.Cell>
|
</Table.Cell>
|
||||||
<Table.Cell>
|
<Table.Cell>
|
||||||
{getZulipDisplay(
|
{getZulipDisplay(
|
||||||
|
|||||||
@@ -1,246 +0,0 @@
|
|||||||
import "@testing-library/jest-dom";
|
|
||||||
|
|
||||||
// --- Module mocks (hoisted before imports) ---
|
|
||||||
|
|
||||||
jest.mock("../apiClient", () => ({
|
|
||||||
client: {
|
|
||||||
GET: jest.fn(),
|
|
||||||
POST: jest.fn(),
|
|
||||||
PUT: jest.fn(),
|
|
||||||
PATCH: jest.fn(),
|
|
||||||
DELETE: jest.fn(),
|
|
||||||
use: jest.fn(),
|
|
||||||
},
|
|
||||||
$api: {
|
|
||||||
useQuery: jest.fn(),
|
|
||||||
useMutation: jest.fn(),
|
|
||||||
queryOptions: (method: string, path: string, init?: unknown) =>
|
|
||||||
init === undefined
|
|
||||||
? { queryKey: [method, path] }
|
|
||||||
: { queryKey: [method, path, init] },
|
|
||||||
},
|
|
||||||
API_URL: "http://test",
|
|
||||||
WEBSOCKET_URL: "ws://test",
|
|
||||||
configureApiAuth: jest.fn(),
|
|
||||||
}));
|
|
||||||
|
|
||||||
jest.mock("../AuthProvider", () => ({
|
|
||||||
useAuth: () => ({
|
|
||||||
status: "authenticated" as const,
|
|
||||||
accessToken: "test-token",
|
|
||||||
accessTokenExpires: Date.now() + 3600000,
|
|
||||||
user: { id: "user1", name: "Test User" },
|
|
||||||
update: jest.fn(),
|
|
||||||
signIn: jest.fn(),
|
|
||||||
signOut: jest.fn(),
|
|
||||||
lastUserId: "user1",
|
|
||||||
}),
|
|
||||||
}));
|
|
||||||
|
|
||||||
// --- Imports (after mocks) ---
|
|
||||||
|
|
||||||
import React from "react";
|
|
||||||
import { render, waitFor, screen } from "@testing-library/react";
|
|
||||||
import { QueryClient, QueryClientProvider } from "@tanstack/react-query";
|
|
||||||
import { useRoomsBulkMeetingStatus, BulkMeetingStatusMap } from "../apiHooks";
|
|
||||||
import { client } from "../apiClient";
|
|
||||||
import { ErrorProvider } from "../../(errors)/errorContext";
|
|
||||||
|
|
||||||
const mockClient = client as { POST: jest.Mock };
|
|
||||||
|
|
||||||
// --- Helpers ---
|
|
||||||
|
|
||||||
function mockBulkStatusEndpoint(
|
|
||||||
roomData?: Record<
|
|
||||||
string,
|
|
||||||
{ active_meetings: unknown[]; upcoming_events: unknown[] }
|
|
||||||
>,
|
|
||||||
) {
|
|
||||||
mockClient.POST.mockImplementation(
|
|
||||||
async (_path: string, options: { body: { room_names: string[] } }) => {
|
|
||||||
const roomNames: string[] = options.body.room_names;
|
|
||||||
const src = roomData ?? {};
|
|
||||||
const data = Object.fromEntries(
|
|
||||||
roomNames.map((name) => [
|
|
||||||
name,
|
|
||||||
src[name] ?? { active_meetings: [], upcoming_events: [] },
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
return { data, error: undefined, response: {} };
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Test component: uses the bulk hook and displays results ---
|
|
||||||
|
|
||||||
function BulkStatusDisplay({ roomNames }: { roomNames: string[] }) {
|
|
||||||
const { data, isLoading } = useRoomsBulkMeetingStatus(roomNames);
|
|
||||||
|
|
||||||
if (isLoading) {
|
|
||||||
return <div data-testid="status">loading</div>;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!data) {
|
|
||||||
return <div data-testid="status">no data</div>;
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div data-testid="status">
|
|
||||||
{roomNames.map((name) => {
|
|
||||||
const status = data[name];
|
|
||||||
return (
|
|
||||||
<div key={name} data-testid={`room-${name}`}>
|
|
||||||
{status?.active_meetings?.length ?? 0} active,{" "}
|
|
||||||
{status?.upcoming_events?.length ?? 0} upcoming
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
})}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function createWrapper() {
|
|
||||||
const queryClient = new QueryClient({
|
|
||||||
defaultOptions: {
|
|
||||||
queries: { retry: false },
|
|
||||||
},
|
|
||||||
});
|
|
||||||
return function Wrapper({ children }: { children: React.ReactNode }) {
|
|
||||||
return (
|
|
||||||
<QueryClientProvider client={queryClient}>
|
|
||||||
<ErrorProvider>{children}</ErrorProvider>
|
|
||||||
</QueryClientProvider>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Tests ---
|
|
||||||
|
|
||||||
describe("bulk meeting status (prop-drilling)", () => {
|
|
||||||
afterEach(() => jest.clearAllMocks());
|
|
||||||
|
|
||||||
it("fetches all room statuses in a single POST request", async () => {
|
|
||||||
const rooms = Array.from({ length: 10 }, (_, i) => `room-${i}`);
|
|
||||||
|
|
||||||
mockBulkStatusEndpoint();
|
|
||||||
|
|
||||||
render(<BulkStatusDisplay roomNames={rooms} />, {
|
|
||||||
wrapper: createWrapper(),
|
|
||||||
});
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
for (const name of rooms) {
|
|
||||||
expect(screen.getByTestId(`room-${name}`)).toHaveTextContent(
|
|
||||||
"0 active, 0 upcoming",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const postCalls = mockClient.POST.mock.calls.filter(
|
|
||||||
([path]: [string]) => path === "/v1/rooms/meetings/bulk-status",
|
|
||||||
);
|
|
||||||
|
|
||||||
// Prop-drilling: exactly 1 POST for all rooms (no batcher needed)
|
|
||||||
expect(postCalls).toHaveLength(1);
|
|
||||||
|
|
||||||
// The single call contains all room names
|
|
||||||
const requestedRooms: string[] = postCalls[0][1].body.room_names;
|
|
||||||
expect(requestedRooms).toHaveLength(10);
|
|
||||||
for (const name of rooms) {
|
|
||||||
expect(requestedRooms).toContain(name);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
it("returns room-specific data correctly", async () => {
|
|
||||||
mockBulkStatusEndpoint({
|
|
||||||
"room-a": {
|
|
||||||
active_meetings: [{ id: "m1", room_name: "room-a" }],
|
|
||||||
upcoming_events: [],
|
|
||||||
},
|
|
||||||
"room-b": {
|
|
||||||
active_meetings: [],
|
|
||||||
upcoming_events: [{ id: "e1", title: "Standup" }],
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
render(<BulkStatusDisplay roomNames={["room-a", "room-b"]} />, {
|
|
||||||
wrapper: createWrapper(),
|
|
||||||
});
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(screen.getByTestId("room-room-a")).toHaveTextContent(
|
|
||||||
"1 active, 0 upcoming",
|
|
||||||
);
|
|
||||||
expect(screen.getByTestId("room-room-b")).toHaveTextContent(
|
|
||||||
"0 active, 1 upcoming",
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Still just 1 POST
|
|
||||||
expect(mockClient.POST).toHaveBeenCalledTimes(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it("does not fetch when roomNames is empty", async () => {
|
|
||||||
mockBulkStatusEndpoint();
|
|
||||||
|
|
||||||
render(<BulkStatusDisplay roomNames={[]} />, {
|
|
||||||
wrapper: createWrapper(),
|
|
||||||
});
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(screen.getByTestId("status")).toHaveTextContent("no data");
|
|
||||||
});
|
|
||||||
|
|
||||||
// No POST calls when no rooms
|
|
||||||
expect(mockClient.POST).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it("surfaces error when POST fails", async () => {
|
|
||||||
mockClient.POST.mockResolvedValue({
|
|
||||||
data: undefined,
|
|
||||||
error: { detail: "server error" },
|
|
||||||
response: {},
|
|
||||||
});
|
|
||||||
|
|
||||||
function ErrorDisplay({ roomNames }: { roomNames: string[] }) {
|
|
||||||
const { error } = useRoomsBulkMeetingStatus(roomNames);
|
|
||||||
if (error) return <div data-testid="error">{error.message}</div>;
|
|
||||||
return <div data-testid="error">no error</div>;
|
|
||||||
}
|
|
||||||
|
|
||||||
render(<ErrorDisplay roomNames={["room-x"]} />, {
|
|
||||||
wrapper: createWrapper(),
|
|
||||||
});
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(screen.getByTestId("error")).toHaveTextContent(
|
|
||||||
"bulk-status fetch failed",
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it("does not fetch when unauthenticated", async () => {
|
|
||||||
// Override useAuth to return unauthenticated
|
|
||||||
const authModule = jest.requireMock("../AuthProvider");
|
|
||||||
const originalUseAuth = authModule.useAuth;
|
|
||||||
authModule.useAuth = () => ({
|
|
||||||
...originalUseAuth(),
|
|
||||||
status: "unauthenticated",
|
|
||||||
});
|
|
||||||
|
|
||||||
mockBulkStatusEndpoint();
|
|
||||||
|
|
||||||
render(<BulkStatusDisplay roomNames={["room-1"]} />, {
|
|
||||||
wrapper: createWrapper(),
|
|
||||||
});
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(screen.getByTestId("status")).toHaveTextContent("no data");
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(mockClient.POST).not.toHaveBeenCalled();
|
|
||||||
|
|
||||||
// Restore
|
|
||||||
authModule.useAuth = originalUseAuth;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
import { $api, client } from "./apiClient";
|
import { $api } from "./apiClient";
|
||||||
import { useError } from "../(errors)/errorContext";
|
import { useError } from "../(errors)/errorContext";
|
||||||
import { QueryClient, useQuery, useQueryClient } from "@tanstack/react-query";
|
import { QueryClient, useQueryClient } from "@tanstack/react-query";
|
||||||
import type { components } from "../reflector-api";
|
import type { components } from "../reflector-api";
|
||||||
import { useAuth } from "./AuthProvider";
|
import { useAuth } from "./AuthProvider";
|
||||||
import { MeetingId } from "./types";
|
import { MeetingId } from "./types";
|
||||||
@@ -641,21 +641,16 @@ export function useMeetingDeactivate() {
|
|||||||
setError(error as Error, "Failed to end meeting");
|
setError(error as Error, "Failed to end meeting");
|
||||||
},
|
},
|
||||||
onSuccess: () => {
|
onSuccess: () => {
|
||||||
return Promise.all([
|
return queryClient.invalidateQueries({
|
||||||
queryClient.invalidateQueries({
|
predicate: (query) => {
|
||||||
predicate: (query) => {
|
const key = query.queryKey;
|
||||||
const key = query.queryKey;
|
return key.some(
|
||||||
return key.some(
|
(k) =>
|
||||||
(k) =>
|
typeof k === "string" &&
|
||||||
typeof k === "string" &&
|
!!MEETING_LIST_PATH_PARTIALS.find((e) => k.includes(e)),
|
||||||
!!MEETING_LIST_PATH_PARTIALS.find((e) => k.includes(e)),
|
);
|
||||||
);
|
},
|
||||||
},
|
});
|
||||||
}),
|
|
||||||
queryClient.invalidateQueries({
|
|
||||||
queryKey: ["bulk-meeting-status"],
|
|
||||||
}),
|
|
||||||
]);
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -712,9 +707,6 @@ export function useRoomsCreateMeeting() {
|
|||||||
},
|
},
|
||||||
).queryKey,
|
).queryKey,
|
||||||
}),
|
}),
|
||||||
queryClient.invalidateQueries({
|
|
||||||
queryKey: ["bulk-meeting-status"],
|
|
||||||
}),
|
|
||||||
]);
|
]);
|
||||||
},
|
},
|
||||||
onError: (error) => {
|
onError: (error) => {
|
||||||
@@ -780,32 +772,6 @@ export function useRoomActiveMeetings(roomName: string | null) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
type RoomMeetingStatus = components["schemas"]["RoomMeetingStatus"];
|
|
||||||
|
|
||||||
export type BulkMeetingStatusMap = Partial<Record<string, RoomMeetingStatus>>;
|
|
||||||
|
|
||||||
export function useRoomsBulkMeetingStatus(roomNames: string[]) {
|
|
||||||
const { isAuthenticated } = useAuthReady();
|
|
||||||
const sortedNames = [...roomNames].sort();
|
|
||||||
|
|
||||||
return useQuery({
|
|
||||||
queryKey: ["bulk-meeting-status", sortedNames],
|
|
||||||
queryFn: async (): Promise<BulkMeetingStatusMap> => {
|
|
||||||
const { data, error } = await client.POST(
|
|
||||||
"/v1/rooms/meetings/bulk-status",
|
|
||||||
{ body: { room_names: roomNames } },
|
|
||||||
);
|
|
||||||
if (error || !data) {
|
|
||||||
throw new Error(
|
|
||||||
`bulk-status fetch failed: ${JSON.stringify(error ?? "no data")}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return data;
|
|
||||||
},
|
|
||||||
enabled: sortedNames.length > 0 && isAuthenticated,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export function useRoomGetMeeting(
|
export function useRoomGetMeeting(
|
||||||
roomName: string | null,
|
roomName: string | null,
|
||||||
meetingId: MeetingId | null,
|
meetingId: MeetingId | null,
|
||||||
|
|||||||
64
www/app/reflector-api.d.ts
vendored
64
www/app/reflector-api.d.ts
vendored
@@ -118,23 +118,6 @@ export interface paths {
|
|||||||
patch?: never;
|
patch?: never;
|
||||||
trace?: never;
|
trace?: never;
|
||||||
};
|
};
|
||||||
"/v1/rooms/meetings/bulk-status": {
|
|
||||||
parameters: {
|
|
||||||
query?: never;
|
|
||||||
header?: never;
|
|
||||||
path?: never;
|
|
||||||
cookie?: never;
|
|
||||||
};
|
|
||||||
get?: never;
|
|
||||||
put?: never;
|
|
||||||
/** Rooms Bulk Meeting Status */
|
|
||||||
post: operations["v1_rooms_bulk_meeting_status"];
|
|
||||||
delete?: never;
|
|
||||||
options?: never;
|
|
||||||
head?: never;
|
|
||||||
patch?: never;
|
|
||||||
trace?: never;
|
|
||||||
};
|
|
||||||
"/v1/rooms/{room_id}": {
|
"/v1/rooms/{room_id}": {
|
||||||
parameters: {
|
parameters: {
|
||||||
query?: never;
|
query?: never;
|
||||||
@@ -816,11 +799,6 @@ export interface components {
|
|||||||
*/
|
*/
|
||||||
chunk: string;
|
chunk: string;
|
||||||
};
|
};
|
||||||
/** BulkStatusRequest */
|
|
||||||
BulkStatusRequest: {
|
|
||||||
/** Room Names */
|
|
||||||
room_names: string[];
|
|
||||||
};
|
|
||||||
/** CalendarEventResponse */
|
/** CalendarEventResponse */
|
||||||
CalendarEventResponse: {
|
CalendarEventResponse: {
|
||||||
/** Id */
|
/** Id */
|
||||||
@@ -1697,13 +1675,6 @@ export interface components {
|
|||||||
*/
|
*/
|
||||||
skip_consent: boolean;
|
skip_consent: boolean;
|
||||||
};
|
};
|
||||||
/** RoomMeetingStatus */
|
|
||||||
RoomMeetingStatus: {
|
|
||||||
/** Active Meetings */
|
|
||||||
active_meetings: components["schemas"]["Meeting"][];
|
|
||||||
/** Upcoming Events */
|
|
||||||
upcoming_events: components["schemas"]["CalendarEventResponse"][];
|
|
||||||
};
|
|
||||||
/** RoomDetails */
|
/** RoomDetails */
|
||||||
RoomDetails: {
|
RoomDetails: {
|
||||||
/** Id */
|
/** Id */
|
||||||
@@ -2301,41 +2272,6 @@ export interface operations {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
v1_rooms_bulk_meeting_status: {
|
|
||||||
parameters: {
|
|
||||||
query?: never;
|
|
||||||
header?: never;
|
|
||||||
path?: never;
|
|
||||||
cookie?: never;
|
|
||||||
};
|
|
||||||
requestBody: {
|
|
||||||
content: {
|
|
||||||
"application/json": components["schemas"]["BulkStatusRequest"];
|
|
||||||
};
|
|
||||||
};
|
|
||||||
responses: {
|
|
||||||
/** @description Successful Response */
|
|
||||||
200: {
|
|
||||||
headers: {
|
|
||||||
[name: string]: unknown;
|
|
||||||
};
|
|
||||||
content: {
|
|
||||||
"application/json": {
|
|
||||||
[key: string]: components["schemas"]["RoomMeetingStatus"];
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
/** @description Validation Error */
|
|
||||||
422: {
|
|
||||||
headers: {
|
|
||||||
[name: string]: unknown;
|
|
||||||
};
|
|
||||||
content: {
|
|
||||||
"application/json": components["schemas"]["HTTPValidationError"];
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
};
|
|
||||||
v1_rooms_get: {
|
v1_rooms_get: {
|
||||||
parameters: {
|
parameters: {
|
||||||
query?: never;
|
query?: never;
|
||||||
|
|||||||
@@ -1,22 +1,8 @@
|
|||||||
module.exports = {
|
module.exports = {
|
||||||
testEnvironment: "jest-environment-jsdom",
|
preset: "ts-jest",
|
||||||
|
testEnvironment: "node",
|
||||||
roots: ["<rootDir>/app"],
|
roots: ["<rootDir>/app"],
|
||||||
testMatch: ["**/__tests__/**/*.test.ts", "**/__tests__/**/*.test.tsx"],
|
testMatch: ["**/__tests__/**/*.test.ts"],
|
||||||
collectCoverage: false,
|
collectCoverage: true,
|
||||||
transform: {
|
collectCoverageFrom: ["app/**/*.ts", "!app/**/*.d.ts"],
|
||||||
"^.+\\.[jt]sx?$": [
|
|
||||||
"ts-jest",
|
|
||||||
{
|
|
||||||
tsconfig: {
|
|
||||||
jsx: "react-jsx",
|
|
||||||
module: "esnext",
|
|
||||||
moduleResolution: "bundler",
|
|
||||||
esModuleInterop: true,
|
|
||||||
strict: true,
|
|
||||||
downlevelIteration: true,
|
|
||||||
lib: ["dom", "dom.iterable", "esnext"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -61,13 +61,9 @@
|
|||||||
"author": "Andreas <andreas@monadical.com>",
|
"author": "Andreas <andreas@monadical.com>",
|
||||||
"license": "All Rights Reserved",
|
"license": "All Rights Reserved",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@testing-library/dom": "^10.4.1",
|
|
||||||
"@testing-library/jest-dom": "^6.9.1",
|
|
||||||
"@testing-library/react": "^16.3.2",
|
|
||||||
"@types/jest": "^30.0.0",
|
"@types/jest": "^30.0.0",
|
||||||
"@types/react": "18.2.20",
|
"@types/react": "18.2.20",
|
||||||
"jest": "^30.1.3",
|
"jest": "^30.1.3",
|
||||||
"jest-environment-jsdom": "^30.2.0",
|
|
||||||
"openapi-typescript": "^7.9.1",
|
"openapi-typescript": "^7.9.1",
|
||||||
"prettier": "^3.0.0",
|
"prettier": "^3.0.0",
|
||||||
"ts-jest": "^29.4.1"
|
"ts-jest": "^29.4.1"
|
||||||
|
|||||||
787
www/pnpm-lock.yaml
generated
787
www/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user