mirror of
https://github.com/Monadical-SAS/reflector.git
synced 2026-04-21 12:45:19 +00:00
Compare commits
7 Commits
feat/dag-z
...
fix-room-q
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cdc495499b | ||
| ae44f5227b | |||
|
|
4339ffffcf | ||
|
|
9dc6c20ef8 | ||
|
|
931c344ddf | ||
|
|
129290517e | ||
|
|
7e072219bf |
12
.github/workflows/test_next_server.yml
vendored
12
.github/workflows/test_next_server.yml
vendored
@@ -13,6 +13,9 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
test-next-server:
|
test-next-server:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
concurrency:
|
||||||
|
group: test-next-server-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
@@ -21,17 +24,12 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Node.js
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: '20'
|
|
||||||
|
|
||||||
- name: Install pnpm
|
- name: Install pnpm
|
||||||
uses: pnpm/action-setup@v4
|
uses: pnpm/action-setup@v4
|
||||||
with:
|
with:
|
||||||
version: 8
|
package_json_file: './www/package.json'
|
||||||
|
|
||||||
- name: Setup Node.js cache
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '20'
|
node-version: '20'
|
||||||
|
|||||||
12
CHANGELOG.md
12
CHANGELOG.md
@@ -1,17 +1,5 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
## [0.33.0](https://github.com/Monadical-SAS/reflector/compare/v0.32.2...v0.33.0) (2026-02-05)
|
|
||||||
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
* Daily+hatchet default ([#846](https://github.com/Monadical-SAS/reflector/issues/846)) ([15ab2e3](https://github.com/Monadical-SAS/reflector/commit/15ab2e306eacf575494b4b5d2b2ad779d44a1c7f))
|
|
||||||
|
|
||||||
|
|
||||||
### Bug Fixes
|
|
||||||
|
|
||||||
* websocket tests ([#825](https://github.com/Monadical-SAS/reflector/issues/825)) ([1ce1c7a](https://github.com/Monadical-SAS/reflector/commit/1ce1c7a910b6c374115d2437b17f9d288ef094dc))
|
|
||||||
|
|
||||||
## [0.32.2](https://github.com/Monadical-SAS/reflector/compare/v0.32.1...v0.32.2) (2026-02-03)
|
## [0.32.2](https://github.com/Monadical-SAS/reflector/compare/v0.32.1...v0.32.2) (2026-02-03)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -104,6 +104,26 @@ class CalendarEventController:
|
|||||||
results = await get_database().fetch_all(query)
|
results = await get_database().fetch_all(query)
|
||||||
return [CalendarEvent(**result) for result in results]
|
return [CalendarEvent(**result) for result in results]
|
||||||
|
|
||||||
|
async def get_upcoming_for_rooms(
|
||||||
|
self, room_ids: list[str], minutes_ahead: int = 120
|
||||||
|
) -> list[CalendarEvent]:
|
||||||
|
now = datetime.now(timezone.utc)
|
||||||
|
future_time = now + timedelta(minutes=minutes_ahead)
|
||||||
|
query = (
|
||||||
|
calendar_events.select()
|
||||||
|
.where(
|
||||||
|
sa.and_(
|
||||||
|
calendar_events.c.room_id.in_(room_ids),
|
||||||
|
calendar_events.c.is_deleted == False,
|
||||||
|
calendar_events.c.start_time <= future_time,
|
||||||
|
calendar_events.c.end_time >= now,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by(calendar_events.c.start_time.asc())
|
||||||
|
)
|
||||||
|
results = await get_database().fetch_all(query)
|
||||||
|
return [CalendarEvent(**result) for result in results]
|
||||||
|
|
||||||
async def get_by_id(self, event_id: str) -> CalendarEvent | None:
|
async def get_by_id(self, event_id: str) -> CalendarEvent | None:
|
||||||
query = calendar_events.select().where(calendar_events.c.id == event_id)
|
query = calendar_events.select().where(calendar_events.c.id == event_id)
|
||||||
result = await get_database().fetch_one(query)
|
result = await get_database().fetch_one(query)
|
||||||
|
|||||||
@@ -301,6 +301,23 @@ class MeetingController:
|
|||||||
results = await get_database().fetch_all(query)
|
results = await get_database().fetch_all(query)
|
||||||
return [Meeting(**result) for result in results]
|
return [Meeting(**result) for result in results]
|
||||||
|
|
||||||
|
async def get_all_active_for_rooms(
|
||||||
|
self, room_ids: list[str], current_time: datetime
|
||||||
|
) -> list[Meeting]:
|
||||||
|
query = (
|
||||||
|
meetings.select()
|
||||||
|
.where(
|
||||||
|
sa.and_(
|
||||||
|
meetings.c.room_id.in_(room_ids),
|
||||||
|
meetings.c.end_date > current_time,
|
||||||
|
meetings.c.is_active,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by(meetings.c.end_date.desc())
|
||||||
|
)
|
||||||
|
results = await get_database().fetch_all(query)
|
||||||
|
return [Meeting(**result) for result in results]
|
||||||
|
|
||||||
async def get_active_by_calendar_event(
|
async def get_active_by_calendar_event(
|
||||||
self, room: Room, calendar_event_id: str, current_time: datetime
|
self, room: Room, calendar_event_id: str, current_time: datetime
|
||||||
) -> Meeting | None:
|
) -> Meeting | None:
|
||||||
|
|||||||
@@ -238,6 +238,11 @@ class RoomController:
|
|||||||
|
|
||||||
return room
|
return room
|
||||||
|
|
||||||
|
async def get_by_names(self, names: list[str]) -> list[Room]:
|
||||||
|
query = rooms.select().where(rooms.c.name.in_(names))
|
||||||
|
results = await get_database().fetch_all(query)
|
||||||
|
return [Room(**r) for r in results]
|
||||||
|
|
||||||
async def get_ics_enabled(self) -> list[Room]:
|
async def get_ics_enabled(self) -> list[Room]:
|
||||||
query = rooms.select().where(
|
query = rooms.select().where(
|
||||||
rooms.c.ics_enabled == True, rooms.c.ics_url != None
|
rooms.c.ics_enabled == True, rooms.c.ics_url != None
|
||||||
|
|||||||
@@ -1,144 +0,0 @@
|
|||||||
"""
|
|
||||||
Hatchet DAG Status -> Zulip Live Updates.
|
|
||||||
|
|
||||||
Posts/updates/deletes a Zulip message showing the Hatchet workflow DAG status.
|
|
||||||
All functions are fire-and-forget (catch + warning log on failure).
|
|
||||||
|
|
||||||
Note: Uses deferred imports throughout for fork-safety,
|
|
||||||
consistent with the pipeline pattern in daily_multitrack_pipeline.py.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from reflector.logger import logger
|
|
||||||
from reflector.settings import settings
|
|
||||||
|
|
||||||
|
|
||||||
def _dag_zulip_enabled() -> bool:
|
|
||||||
return bool(
|
|
||||||
settings.ZULIP_REALM and settings.ZULIP_DAG_STREAM and settings.ZULIP_DAG_TOPIC
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def create_dag_zulip_message(transcript_id: str, workflow_run_id: str) -> None:
|
|
||||||
"""Post initial DAG status to Zulip. Called at dispatch time (normal DB context)."""
|
|
||||||
if not _dag_zulip_enabled():
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
from reflector.db.transcripts import transcripts_controller # noqa: PLC0415
|
|
||||||
from reflector.hatchet.client import HatchetClientManager # noqa: PLC0415
|
|
||||||
from reflector.tools.render_hatchet_run import ( # noqa: PLC0415
|
|
||||||
render_run_detail,
|
|
||||||
)
|
|
||||||
from reflector.zulip import send_message_to_zulip # noqa: PLC0415
|
|
||||||
|
|
||||||
client = HatchetClientManager.get_client()
|
|
||||||
details = await client.runs.aio_get(workflow_run_id)
|
|
||||||
content = render_run_detail(details)
|
|
||||||
|
|
||||||
response = await send_message_to_zulip(
|
|
||||||
settings.ZULIP_DAG_STREAM, settings.ZULIP_DAG_TOPIC, content
|
|
||||||
)
|
|
||||||
message_id = response.get("id")
|
|
||||||
|
|
||||||
if message_id:
|
|
||||||
transcript = await transcripts_controller.get_by_id(transcript_id)
|
|
||||||
if transcript:
|
|
||||||
await transcripts_controller.update(
|
|
||||||
transcript, {"zulip_message_id": message_id}
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
logger.warning(
|
|
||||||
"[DAG Zulip] Failed to create DAG message",
|
|
||||||
transcript_id=transcript_id,
|
|
||||||
workflow_run_id=workflow_run_id,
|
|
||||||
exc_info=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def update_dag_zulip_message(
|
|
||||||
transcript_id: str,
|
|
||||||
workflow_run_id: str,
|
|
||||||
error_message: str | None = None,
|
|
||||||
) -> None:
|
|
||||||
"""Update existing DAG status in Zulip. Called from Hatchet worker (forked).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
error_message: If set, appended as an error banner to the rendered DAG.
|
|
||||||
"""
|
|
||||||
if not _dag_zulip_enabled():
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
from reflector.db.transcripts import transcripts_controller # noqa: PLC0415
|
|
||||||
from reflector.hatchet.client import HatchetClientManager # noqa: PLC0415
|
|
||||||
from reflector.hatchet.workflows.daily_multitrack_pipeline import ( # noqa: PLC0415
|
|
||||||
fresh_db_connection,
|
|
||||||
)
|
|
||||||
from reflector.tools.render_hatchet_run import ( # noqa: PLC0415
|
|
||||||
render_run_detail,
|
|
||||||
)
|
|
||||||
from reflector.zulip import update_zulip_message # noqa: PLC0415
|
|
||||||
|
|
||||||
async with fresh_db_connection():
|
|
||||||
transcript = await transcripts_controller.get_by_id(transcript_id)
|
|
||||||
if not transcript or not transcript.zulip_message_id:
|
|
||||||
return
|
|
||||||
|
|
||||||
client = HatchetClientManager.get_client()
|
|
||||||
details = await client.runs.aio_get(workflow_run_id)
|
|
||||||
content = render_run_detail(details)
|
|
||||||
|
|
||||||
if error_message:
|
|
||||||
content += f"\n\n:cross_mark: **{error_message}**"
|
|
||||||
|
|
||||||
await update_zulip_message(
|
|
||||||
transcript.zulip_message_id,
|
|
||||||
settings.ZULIP_DAG_STREAM,
|
|
||||||
settings.ZULIP_DAG_TOPIC,
|
|
||||||
content,
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
logger.warning(
|
|
||||||
"[DAG Zulip] Failed to update DAG message",
|
|
||||||
transcript_id=transcript_id,
|
|
||||||
workflow_run_id=workflow_run_id,
|
|
||||||
exc_info=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def delete_dag_zulip_message(transcript_id: str) -> None:
|
|
||||||
"""Delete DAG Zulip message and clear zulip_message_id.
|
|
||||||
|
|
||||||
Called from post_zulip task (already inside fresh_db_connection).
|
|
||||||
Swallows InvalidMessageError (message already deleted).
|
|
||||||
"""
|
|
||||||
if not _dag_zulip_enabled():
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
from reflector.db.transcripts import transcripts_controller # noqa: PLC0415
|
|
||||||
from reflector.zulip import ( # noqa: PLC0415
|
|
||||||
InvalidMessageError,
|
|
||||||
delete_zulip_message,
|
|
||||||
)
|
|
||||||
|
|
||||||
transcript = await transcripts_controller.get_by_id(transcript_id)
|
|
||||||
if not transcript or not transcript.zulip_message_id:
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
await delete_zulip_message(transcript.zulip_message_id)
|
|
||||||
except InvalidMessageError:
|
|
||||||
logger.warning(
|
|
||||||
"[DAG Zulip] Message already deleted",
|
|
||||||
transcript_id=transcript_id,
|
|
||||||
zulip_message_id=transcript.zulip_message_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
await transcripts_controller.update(transcript, {"zulip_message_id": None})
|
|
||||||
except Exception:
|
|
||||||
logger.warning(
|
|
||||||
"[DAG Zulip] Failed to delete DAG message",
|
|
||||||
transcript_id=transcript_id,
|
|
||||||
exc_info=True,
|
|
||||||
)
|
|
||||||
@@ -45,7 +45,6 @@ from reflector.hatchet.constants import (
|
|||||||
TIMEOUT_SHORT,
|
TIMEOUT_SHORT,
|
||||||
TaskName,
|
TaskName,
|
||||||
)
|
)
|
||||||
from reflector.hatchet.dag_zulip import update_dag_zulip_message
|
|
||||||
from reflector.hatchet.workflows.models import (
|
from reflector.hatchet.workflows.models import (
|
||||||
ActionItemsResult,
|
ActionItemsResult,
|
||||||
ConsentResult,
|
ConsentResult,
|
||||||
@@ -239,14 +238,7 @@ def with_error_handling(
|
|||||||
@functools.wraps(func)
|
@functools.wraps(func)
|
||||||
async def wrapper(input: PipelineInput, ctx: Context) -> R:
|
async def wrapper(input: PipelineInput, ctx: Context) -> R:
|
||||||
try:
|
try:
|
||||||
result = await func(input, ctx)
|
return await func(input, ctx)
|
||||||
try:
|
|
||||||
await update_dag_zulip_message(
|
|
||||||
input.transcript_id, ctx.workflow_run_id
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
return result
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"[Hatchet] {step_name} failed",
|
f"[Hatchet] {step_name} failed",
|
||||||
@@ -254,14 +246,6 @@ def with_error_handling(
|
|||||||
error=str(e),
|
error=str(e),
|
||||||
exc_info=True,
|
exc_info=True,
|
||||||
)
|
)
|
||||||
try:
|
|
||||||
await update_dag_zulip_message(
|
|
||||||
input.transcript_id,
|
|
||||||
ctx.workflow_run_id,
|
|
||||||
error_message=f"{step_name} failed: {e}",
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
if set_error_status:
|
if set_error_status:
|
||||||
await set_workflow_error_status(input.transcript_id)
|
await set_workflow_error_status(input.transcript_id)
|
||||||
raise
|
raise
|
||||||
@@ -1310,11 +1294,6 @@ async def post_zulip(input: PipelineInput, ctx: Context) -> ZulipResult:
|
|||||||
|
|
||||||
async with fresh_db_connection():
|
async with fresh_db_connection():
|
||||||
from reflector.db.transcripts import transcripts_controller # noqa: PLC0415
|
from reflector.db.transcripts import transcripts_controller # noqa: PLC0415
|
||||||
from reflector.hatchet.dag_zulip import ( # noqa: PLC0415
|
|
||||||
delete_dag_zulip_message,
|
|
||||||
)
|
|
||||||
|
|
||||||
await delete_dag_zulip_message(input.transcript_id)
|
|
||||||
|
|
||||||
transcript = await transcripts_controller.get_by_id(input.transcript_id)
|
transcript = await transcripts_controller.get_by_id(input.transcript_id)
|
||||||
if transcript:
|
if transcript:
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ from hatchet_sdk.clients.rest.models import V1TaskStatus
|
|||||||
from reflector.db.recordings import recordings_controller
|
from reflector.db.recordings import recordings_controller
|
||||||
from reflector.db.transcripts import Transcript, transcripts_controller
|
from reflector.db.transcripts import Transcript, transcripts_controller
|
||||||
from reflector.hatchet.client import HatchetClientManager
|
from reflector.hatchet.client import HatchetClientManager
|
||||||
from reflector.hatchet.dag_zulip import create_dag_zulip_message
|
|
||||||
from reflector.logger import logger
|
from reflector.logger import logger
|
||||||
from reflector.pipelines.main_file_pipeline import task_pipeline_file_process
|
from reflector.pipelines.main_file_pipeline import task_pipeline_file_process
|
||||||
from reflector.utils.string import NonEmptyString
|
from reflector.utils.string import NonEmptyString
|
||||||
@@ -267,16 +266,6 @@ async def dispatch_transcript_processing(
|
|||||||
transcript, {"workflow_run_id": workflow_id}
|
transcript, {"workflow_run_id": workflow_id}
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
|
||||||
await create_dag_zulip_message(config.transcript_id, workflow_id)
|
|
||||||
except Exception:
|
|
||||||
logger.warning(
|
|
||||||
"[DAG Zulip] Failed to create DAG message at dispatch",
|
|
||||||
transcript_id=config.transcript_id,
|
|
||||||
workflow_id=workflow_id,
|
|
||||||
exc_info=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info("Hatchet workflow dispatched", workflow_id=workflow_id)
|
logger.info("Hatchet workflow dispatched", workflow_id=workflow_id)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|||||||
@@ -161,9 +161,6 @@ class Settings(BaseSettings):
|
|||||||
ZULIP_REALM: str | None = None
|
ZULIP_REALM: str | None = None
|
||||||
ZULIP_API_KEY: str | None = None
|
ZULIP_API_KEY: str | None = None
|
||||||
ZULIP_BOT_EMAIL: str | None = None
|
ZULIP_BOT_EMAIL: str | None = None
|
||||||
ZULIP_DAG_STREAM: str | None = None
|
|
||||||
ZULIP_DAG_TOPIC: str | None = None
|
|
||||||
ZULIP_HOST_HEADER: str | None = None
|
|
||||||
|
|
||||||
# Hatchet workflow orchestration (always enabled for multitrack processing)
|
# Hatchet workflow orchestration (always enabled for multitrack processing)
|
||||||
HATCHET_CLIENT_TOKEN: str | None = None
|
HATCHET_CLIENT_TOKEN: str | None = None
|
||||||
|
|||||||
@@ -1,412 +0,0 @@
|
|||||||
"""
|
|
||||||
Render Hatchet workflow runs as text DAG.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
# Show latest 5 runs (summary table)
|
|
||||||
uv run -m reflector.tools.render_hatchet_run
|
|
||||||
|
|
||||||
# Show specific run with full DAG + task details
|
|
||||||
uv run -m reflector.tools.render_hatchet_run <workflow_run_id>
|
|
||||||
|
|
||||||
# Drill into Nth run from the list (1-indexed)
|
|
||||||
uv run -m reflector.tools.render_hatchet_run --show 1
|
|
||||||
|
|
||||||
# Show latest N runs
|
|
||||||
uv run -m reflector.tools.render_hatchet_run --last 10
|
|
||||||
|
|
||||||
# Filter by status
|
|
||||||
uv run -m reflector.tools.render_hatchet_run --status FAILED
|
|
||||||
uv run -m reflector.tools.render_hatchet_run --status RUNNING
|
|
||||||
"""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import asyncio
|
|
||||||
from collections import defaultdict
|
|
||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
|
|
||||||
from hatchet_sdk.clients.rest.models import (
|
|
||||||
V1TaskEvent,
|
|
||||||
V1TaskStatus,
|
|
||||||
V1TaskSummary,
|
|
||||||
V1WorkflowRunDetails,
|
|
||||||
WorkflowRunShapeItemForWorkflowRunDetails,
|
|
||||||
)
|
|
||||||
|
|
||||||
from reflector.hatchet.client import HatchetClientManager
|
|
||||||
|
|
||||||
STATUS_ICON = {
|
|
||||||
V1TaskStatus.COMPLETED: "\u2705",
|
|
||||||
V1TaskStatus.RUNNING: "\u23f3",
|
|
||||||
V1TaskStatus.FAILED: "\u274c",
|
|
||||||
V1TaskStatus.QUEUED: "\u23f8\ufe0f",
|
|
||||||
V1TaskStatus.CANCELLED: "\u26a0\ufe0f",
|
|
||||||
}
|
|
||||||
|
|
||||||
STATUS_LABEL = {
|
|
||||||
V1TaskStatus.COMPLETED: "Complete",
|
|
||||||
V1TaskStatus.RUNNING: "Running",
|
|
||||||
V1TaskStatus.FAILED: "FAILED",
|
|
||||||
V1TaskStatus.QUEUED: "Queued",
|
|
||||||
V1TaskStatus.CANCELLED: "Cancelled",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _fmt_time(dt: datetime | None) -> str:
|
|
||||||
if dt is None:
|
|
||||||
return "-"
|
|
||||||
return dt.strftime("%H:%M:%S")
|
|
||||||
|
|
||||||
|
|
||||||
def _fmt_duration(ms: int | None) -> str:
|
|
||||||
if ms is None:
|
|
||||||
return "-"
|
|
||||||
secs = ms / 1000
|
|
||||||
if secs < 60:
|
|
||||||
return f"{secs:.1f}s"
|
|
||||||
mins = secs / 60
|
|
||||||
return f"{mins:.1f}m"
|
|
||||||
|
|
||||||
|
|
||||||
def _fmt_status_line(task: V1TaskSummary) -> str:
|
|
||||||
"""Format a status line like: Complete (finished 20:31:44)"""
|
|
||||||
label = STATUS_LABEL.get(task.status, task.status.value)
|
|
||||||
icon = STATUS_ICON.get(task.status, "?")
|
|
||||||
|
|
||||||
if task.status == V1TaskStatus.COMPLETED and task.finished_at:
|
|
||||||
return f"{icon} {label} (finished {_fmt_time(task.finished_at)})"
|
|
||||||
elif task.status == V1TaskStatus.RUNNING and task.started_at:
|
|
||||||
parts = [f"started {_fmt_time(task.started_at)}"]
|
|
||||||
if task.duration:
|
|
||||||
parts.append(f"{_fmt_duration(task.duration)} elapsed")
|
|
||||||
return f"{icon} {label} ({', '.join(parts)})"
|
|
||||||
elif task.status == V1TaskStatus.FAILED and task.finished_at:
|
|
||||||
return f"{icon} {label} (failed {_fmt_time(task.finished_at)})"
|
|
||||||
elif task.status == V1TaskStatus.CANCELLED:
|
|
||||||
return f"{icon} {label}"
|
|
||||||
elif task.status == V1TaskStatus.QUEUED:
|
|
||||||
return f"{icon} {label}"
|
|
||||||
return f"{icon} {label}"
|
|
||||||
|
|
||||||
|
|
||||||
def _topo_sort(
|
|
||||||
shape: list[WorkflowRunShapeItemForWorkflowRunDetails],
|
|
||||||
) -> list[str]:
|
|
||||||
"""Topological sort of step_ids from shape DAG."""
|
|
||||||
step_ids = {s.step_id for s in shape}
|
|
||||||
children_map: dict[str, list[str]] = {}
|
|
||||||
in_degree: dict[str, int] = {sid: 0 for sid in step_ids}
|
|
||||||
|
|
||||||
for s in shape:
|
|
||||||
children = [c for c in (s.children_step_ids or []) if c in step_ids]
|
|
||||||
children_map[s.step_id] = children
|
|
||||||
for c in children:
|
|
||||||
in_degree[c] += 1
|
|
||||||
|
|
||||||
queue = sorted(sid for sid, deg in in_degree.items() if deg == 0)
|
|
||||||
result: list[str] = []
|
|
||||||
while queue:
|
|
||||||
node = queue.pop(0)
|
|
||||||
result.append(node)
|
|
||||||
for c in children_map.get(node, []):
|
|
||||||
in_degree[c] -= 1
|
|
||||||
if in_degree[c] == 0:
|
|
||||||
queue.append(c)
|
|
||||||
queue.sort()
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def render_run_detail(details: V1WorkflowRunDetails) -> str:
|
|
||||||
"""Render a single workflow run as markdown DAG with task details."""
|
|
||||||
shape = details.shape or []
|
|
||||||
tasks = details.tasks or []
|
|
||||||
events = details.task_events or []
|
|
||||||
run = details.run
|
|
||||||
|
|
||||||
if not shape:
|
|
||||||
return f"Run {run.metadata.id}: {run.status.value} (no shape data)"
|
|
||||||
|
|
||||||
# Build lookups
|
|
||||||
step_to_shape: dict[str, WorkflowRunShapeItemForWorkflowRunDetails] = {
|
|
||||||
s.step_id: s for s in shape
|
|
||||||
}
|
|
||||||
step_to_name: dict[str, str] = {s.step_id: s.task_name for s in shape}
|
|
||||||
|
|
||||||
# Reverse edges (parents)
|
|
||||||
parents: dict[str, list[str]] = {s.step_id: [] for s in shape}
|
|
||||||
for s in shape:
|
|
||||||
for child_id in s.children_step_ids or []:
|
|
||||||
if child_id in parents:
|
|
||||||
parents[child_id].append(s.step_id)
|
|
||||||
|
|
||||||
# Join tasks by step_id
|
|
||||||
task_by_step: dict[str, V1TaskSummary] = {}
|
|
||||||
for t in tasks:
|
|
||||||
if t.step_id and t.step_id in step_to_name:
|
|
||||||
task_by_step[t.step_id] = t
|
|
||||||
|
|
||||||
# Events indexed by task_external_id
|
|
||||||
events_by_task: dict[str, list[V1TaskEvent]] = defaultdict(list)
|
|
||||||
for ev in events:
|
|
||||||
events_by_task[ev.task_id].append(ev)
|
|
||||||
|
|
||||||
ordered = _topo_sort(shape)
|
|
||||||
|
|
||||||
lines: list[str] = []
|
|
||||||
|
|
||||||
# Run header
|
|
||||||
run_icon = STATUS_ICON.get(run.status, "?")
|
|
||||||
run_name = run.display_name or run.workflow_id
|
|
||||||
dur = _fmt_duration(run.duration)
|
|
||||||
lines.append(f"**{run_name}** {run_icon} {dur}")
|
|
||||||
lines.append(f"ID: `{run.metadata.id}`")
|
|
||||||
if run.additional_metadata:
|
|
||||||
meta_parts = [f"{k}=`{v}`" for k, v in run.additional_metadata.items()]
|
|
||||||
lines.append(f"Meta: {', '.join(meta_parts)}")
|
|
||||||
if run.error_message:
|
|
||||||
# Take first line of error only for header
|
|
||||||
first_line = run.error_message.split("\n")[0]
|
|
||||||
lines.append(f"Error: {first_line}")
|
|
||||||
lines.append("")
|
|
||||||
|
|
||||||
# DAG Status Overview table
|
|
||||||
lines.append("**DAG Status Overview**")
|
|
||||||
lines.append("")
|
|
||||||
lines.append("| Node | Status | Duration | Dependencies |")
|
|
||||||
lines.append("|------|--------|----------|--------------|")
|
|
||||||
|
|
||||||
for step_id in ordered:
|
|
||||||
s = step_to_shape[step_id]
|
|
||||||
t = task_by_step.get(step_id)
|
|
||||||
name = step_to_name[step_id]
|
|
||||||
icon = STATUS_ICON.get(t.status, "?") if t else "?"
|
|
||||||
dur = _fmt_duration(t.duration) if t else "-"
|
|
||||||
|
|
||||||
parent_names = [step_to_name[p] for p in parents[step_id]]
|
|
||||||
child_names = [
|
|
||||||
step_to_name[c] for c in (s.children_step_ids or []) if c in step_to_name
|
|
||||||
]
|
|
||||||
deps_left = ", ".join(parent_names) if parent_names else ""
|
|
||||||
deps_right = ", ".join(child_names) if child_names else ""
|
|
||||||
if deps_left and deps_right:
|
|
||||||
deps = f"{deps_left} \u2192 {deps_right}"
|
|
||||||
elif deps_right:
|
|
||||||
deps = f"\u2192 {deps_right}"
|
|
||||||
elif deps_left:
|
|
||||||
deps = f"{deps_left} \u2192"
|
|
||||||
else:
|
|
||||||
deps = "-"
|
|
||||||
|
|
||||||
lines.append(f"| {name} | {icon} | {dur} | {deps} |")
|
|
||||||
|
|
||||||
lines.append("")
|
|
||||||
lines.append("---")
|
|
||||||
lines.append("")
|
|
||||||
|
|
||||||
# Node details
|
|
||||||
for step_id in ordered:
|
|
||||||
t = task_by_step.get(step_id)
|
|
||||||
name = step_to_name[step_id]
|
|
||||||
|
|
||||||
if not t:
|
|
||||||
lines.append(f"**\U0001f4e6 {name}**")
|
|
||||||
lines.append("Status: no task data")
|
|
||||||
lines.append("")
|
|
||||||
continue
|
|
||||||
|
|
||||||
lines.append(f"**\U0001f4e6 {name}**")
|
|
||||||
lines.append(f"Status: {_fmt_status_line(t)}")
|
|
||||||
|
|
||||||
if t.duration:
|
|
||||||
lines.append(f"Duration: {_fmt_duration(t.duration)}")
|
|
||||||
if t.retry_count and t.retry_count > 0:
|
|
||||||
lines.append(f"Retries: {t.retry_count}")
|
|
||||||
|
|
||||||
# Fan-out children
|
|
||||||
if t.num_spawned_children and t.num_spawned_children > 0:
|
|
||||||
children = t.children or []
|
|
||||||
completed = sum(1 for c in children if c.status == V1TaskStatus.COMPLETED)
|
|
||||||
failed = sum(1 for c in children if c.status == V1TaskStatus.FAILED)
|
|
||||||
running = sum(1 for c in children if c.status == V1TaskStatus.RUNNING)
|
|
||||||
lines.append(
|
|
||||||
f"Spawned children: {completed}/{t.num_spawned_children} done"
|
|
||||||
f"{f', {running} running' if running else ''}"
|
|
||||||
f"{f', {failed} failed' if failed else ''}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Error message (first meaningful line only, full trace in events)
|
|
||||||
if t.error_message:
|
|
||||||
err_lines = t.error_message.strip().split("\n")
|
|
||||||
# Find first non-empty, non-traceback line
|
|
||||||
err_summary = err_lines[0]
|
|
||||||
for line in err_lines:
|
|
||||||
stripped = line.strip()
|
|
||||||
if stripped and not stripped.startswith(
|
|
||||||
("Traceback", "File ", "{", ")")
|
|
||||||
):
|
|
||||||
err_summary = stripped
|
|
||||||
break
|
|
||||||
lines.append(f"Error: `{err_summary}`")
|
|
||||||
|
|
||||||
# Events log
|
|
||||||
task_events = sorted(
|
|
||||||
events_by_task.get(t.task_external_id, []),
|
|
||||||
key=lambda e: e.timestamp,
|
|
||||||
)
|
|
||||||
if task_events:
|
|
||||||
lines.append("Events:")
|
|
||||||
for ev in task_events:
|
|
||||||
ts = ev.timestamp.strftime("%H:%M:%S")
|
|
||||||
ev_icon = ""
|
|
||||||
if ev.event_type.value == "FINISHED":
|
|
||||||
ev_icon = "\u2705 "
|
|
||||||
elif ev.event_type.value in ("FAILED", "TIMED_OUT"):
|
|
||||||
ev_icon = "\u274c "
|
|
||||||
elif ev.event_type.value == "STARTED":
|
|
||||||
ev_icon = "\u25b6\ufe0f "
|
|
||||||
elif ev.event_type.value == "RETRYING":
|
|
||||||
ev_icon = "\U0001f504 "
|
|
||||||
elif ev.event_type.value == "CANCELLED":
|
|
||||||
ev_icon = "\u26a0\ufe0f "
|
|
||||||
|
|
||||||
msg = ev.message.strip()
|
|
||||||
if ev.error_message:
|
|
||||||
# Just first line of error in event log
|
|
||||||
err_first = ev.error_message.strip().split("\n")[0]
|
|
||||||
if msg:
|
|
||||||
msg += f" | {err_first}"
|
|
||||||
else:
|
|
||||||
msg = err_first
|
|
||||||
|
|
||||||
if msg:
|
|
||||||
lines.append(f" `{ts}` {ev_icon}{ev.event_type.value}: {msg}")
|
|
||||||
else:
|
|
||||||
lines.append(f" `{ts}` {ev_icon}{ev.event_type.value}")
|
|
||||||
|
|
||||||
lines.append("")
|
|
||||||
|
|
||||||
return "\n".join(lines)
|
|
||||||
|
|
||||||
|
|
||||||
def render_run_summary(idx: int, run: V1TaskSummary) -> str:
|
|
||||||
"""One-line summary for a run in the list view."""
|
|
||||||
icon = STATUS_ICON.get(run.status, "?")
|
|
||||||
name = run.display_name or run.workflow_name or "?"
|
|
||||||
run_id = run.workflow_run_external_id or "?"
|
|
||||||
dur = _fmt_duration(run.duration)
|
|
||||||
started = _fmt_time(run.started_at)
|
|
||||||
meta = ""
|
|
||||||
if run.additional_metadata:
|
|
||||||
meta_parts = [f"{k}=`{v}`" for k, v in run.additional_metadata.items()]
|
|
||||||
meta = f" ({', '.join(meta_parts)})"
|
|
||||||
return (
|
|
||||||
f" {idx}. {icon} **{name}** started={started} dur={dur}{meta}\n"
|
|
||||||
f" `{run_id}`"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def _fetch_run_list(
|
|
||||||
count: int = 5,
|
|
||||||
statuses: list[V1TaskStatus] | None = None,
|
|
||||||
) -> list[V1TaskSummary]:
|
|
||||||
client = HatchetClientManager.get_client()
|
|
||||||
since = datetime.now(timezone.utc) - timedelta(days=7)
|
|
||||||
runs = await client.runs.aio_list(
|
|
||||||
since=since,
|
|
||||||
statuses=statuses,
|
|
||||||
limit=count,
|
|
||||||
)
|
|
||||||
return runs.rows or []
|
|
||||||
|
|
||||||
|
|
||||||
async def list_recent_runs(
|
|
||||||
count: int = 5,
|
|
||||||
statuses: list[V1TaskStatus] | None = None,
|
|
||||||
) -> str:
|
|
||||||
"""List recent workflow runs as text."""
|
|
||||||
rows = await _fetch_run_list(count, statuses)
|
|
||||||
|
|
||||||
if not rows:
|
|
||||||
return "No runs found in the last 7 days."
|
|
||||||
|
|
||||||
lines = [f"Recent runs ({len(rows)}):", ""]
|
|
||||||
for i, run in enumerate(rows, 1):
|
|
||||||
lines.append(render_run_summary(i, run))
|
|
||||||
|
|
||||||
lines.append("")
|
|
||||||
lines.append("Use `--show N` to see full DAG for run N")
|
|
||||||
return "\n".join(lines)
|
|
||||||
|
|
||||||
|
|
||||||
async def show_run(workflow_run_id: str) -> str:
|
|
||||||
"""Fetch and render a single run."""
|
|
||||||
client = HatchetClientManager.get_client()
|
|
||||||
details = await client.runs.aio_get(workflow_run_id)
|
|
||||||
return render_run_detail(details)
|
|
||||||
|
|
||||||
|
|
||||||
async def show_nth_run(
|
|
||||||
n: int,
|
|
||||||
count: int = 5,
|
|
||||||
statuses: list[V1TaskStatus] | None = None,
|
|
||||||
) -> str:
|
|
||||||
"""Fetch list, then drill into Nth run."""
|
|
||||||
rows = await _fetch_run_list(count, statuses)
|
|
||||||
|
|
||||||
if not rows:
|
|
||||||
return "No runs found in the last 7 days."
|
|
||||||
if n < 1 or n > len(rows):
|
|
||||||
return f"Invalid index {n}. Have {len(rows)} runs (1-{len(rows)})."
|
|
||||||
|
|
||||||
run = rows[n - 1]
|
|
||||||
return await show_run(run.workflow_run_external_id)
|
|
||||||
|
|
||||||
|
|
||||||
async def main_async(args: argparse.Namespace) -> None:
|
|
||||||
statuses = [V1TaskStatus(args.status)] if args.status else None
|
|
||||||
|
|
||||||
if args.run_id:
|
|
||||||
output = await show_run(args.run_id)
|
|
||||||
elif args.show is not None:
|
|
||||||
output = await show_nth_run(args.show, count=args.last, statuses=statuses)
|
|
||||||
else:
|
|
||||||
output = await list_recent_runs(count=args.last, statuses=statuses)
|
|
||||||
|
|
||||||
print(output)
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="Render Hatchet workflow runs as text DAG"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"run_id",
|
|
||||||
nargs="?",
|
|
||||||
default=None,
|
|
||||||
help="Workflow run ID to show in detail. If omitted, lists recent runs.",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--show",
|
|
||||||
type=int,
|
|
||||||
default=None,
|
|
||||||
metavar="N",
|
|
||||||
help="Show full DAG for the Nth run in the list (1-indexed)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--last",
|
|
||||||
type=int,
|
|
||||||
default=5,
|
|
||||||
help="Number of recent runs to list (default: 5)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--status",
|
|
||||||
choices=["QUEUED", "RUNNING", "COMPLETED", "FAILED", "CANCELLED"],
|
|
||||||
help="Filter by status",
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
asyncio.run(main_async(args))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,4 +1,6 @@
|
|||||||
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
from collections import defaultdict
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Annotated, Any, Literal, Optional
|
from typing import Annotated, Any, Literal, Optional
|
||||||
@@ -6,13 +8,14 @@ from typing import Annotated, Any, Literal, Optional
|
|||||||
from fastapi import APIRouter, Depends, HTTPException
|
from fastapi import APIRouter, Depends, HTTPException
|
||||||
from fastapi_pagination import Page
|
from fastapi_pagination import Page
|
||||||
from fastapi_pagination.ext.databases import apaginate
|
from fastapi_pagination.ext.databases import apaginate
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel, Field
|
||||||
from redis.exceptions import LockError
|
from redis.exceptions import LockError
|
||||||
|
|
||||||
import reflector.auth as auth
|
import reflector.auth as auth
|
||||||
from reflector.db import get_database
|
from reflector.db import get_database
|
||||||
from reflector.db.calendar_events import calendar_events_controller
|
from reflector.db.calendar_events import calendar_events_controller
|
||||||
from reflector.db.meetings import meetings_controller
|
from reflector.db.meetings import meetings_controller
|
||||||
|
from reflector.db.rooms import Room as DbRoom
|
||||||
from reflector.db.rooms import rooms_controller
|
from reflector.db.rooms import rooms_controller
|
||||||
from reflector.redis_cache import RedisAsyncLock
|
from reflector.redis_cache import RedisAsyncLock
|
||||||
from reflector.schemas.platform import Platform
|
from reflector.schemas.platform import Platform
|
||||||
@@ -195,6 +198,73 @@ async def rooms_list(
|
|||||||
return paginated
|
return paginated
|
||||||
|
|
||||||
|
|
||||||
|
class BulkStatusRequest(BaseModel):
|
||||||
|
room_names: list[str] = Field(max_length=100)
|
||||||
|
|
||||||
|
|
||||||
|
class RoomMeetingStatus(BaseModel):
|
||||||
|
active_meetings: list[Meeting]
|
||||||
|
upcoming_events: list[CalendarEventResponse]
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/rooms/meetings/bulk-status", response_model=dict[str, RoomMeetingStatus])
|
||||||
|
async def rooms_bulk_meeting_status(
|
||||||
|
request: BulkStatusRequest,
|
||||||
|
user: Annotated[Optional[auth.UserInfo], Depends(auth.current_user_optional)],
|
||||||
|
):
|
||||||
|
user_id = user["sub"] if user else None
|
||||||
|
|
||||||
|
all_rooms = await rooms_controller.get_by_names(request.room_names)
|
||||||
|
# Filter to rooms the user can see (owned or shared), matching rooms_list behavior
|
||||||
|
rooms = [
|
||||||
|
r
|
||||||
|
for r in all_rooms
|
||||||
|
if r.is_shared or (user_id is not None and r.user_id == user_id)
|
||||||
|
]
|
||||||
|
room_by_id: dict[str, DbRoom] = {r.id: r for r in rooms}
|
||||||
|
room_ids = list(room_by_id.keys())
|
||||||
|
|
||||||
|
current_time = datetime.now(timezone.utc)
|
||||||
|
active_meetings, upcoming_events = await asyncio.gather(
|
||||||
|
meetings_controller.get_all_active_for_rooms(room_ids, current_time),
|
||||||
|
calendar_events_controller.get_upcoming_for_rooms(room_ids),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Group by room name
|
||||||
|
active_by_room: dict[str, list[Meeting]] = defaultdict(list)
|
||||||
|
for m in active_meetings:
|
||||||
|
room = room_by_id.get(m.room_id)
|
||||||
|
if not room:
|
||||||
|
continue
|
||||||
|
m.platform = room.platform
|
||||||
|
if user_id != room.user_id and m.platform == "whereby":
|
||||||
|
m.host_room_url = ""
|
||||||
|
active_by_room[room.name].append(
|
||||||
|
Meeting.model_validate(m, from_attributes=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
upcoming_by_room: dict[str, list[CalendarEventResponse]] = defaultdict(list)
|
||||||
|
for e in upcoming_events:
|
||||||
|
room = room_by_id.get(e.room_id)
|
||||||
|
if not room:
|
||||||
|
continue
|
||||||
|
if user_id != room.user_id:
|
||||||
|
e.description = None
|
||||||
|
e.attendees = None
|
||||||
|
upcoming_by_room[room.name].append(
|
||||||
|
CalendarEventResponse.model_validate(e, from_attributes=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
result: dict[str, RoomMeetingStatus] = {}
|
||||||
|
for name in request.room_names:
|
||||||
|
result[name] = RoomMeetingStatus(
|
||||||
|
active_meetings=active_by_room.get(name, []),
|
||||||
|
upcoming_events=upcoming_by_room.get(name, []),
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
@router.get("/rooms/{room_id}", response_model=RoomDetails)
|
@router.get("/rooms/{room_id}", response_model=RoomDetails)
|
||||||
async def rooms_get(
|
async def rooms_get(
|
||||||
room_id: str,
|
room_id: str,
|
||||||
|
|||||||
@@ -25,7 +25,6 @@ from reflector.db.transcripts import (
|
|||||||
transcripts_controller,
|
transcripts_controller,
|
||||||
)
|
)
|
||||||
from reflector.hatchet.client import HatchetClientManager
|
from reflector.hatchet.client import HatchetClientManager
|
||||||
from reflector.hatchet.dag_zulip import create_dag_zulip_message
|
|
||||||
from reflector.pipelines.main_file_pipeline import task_pipeline_file_process
|
from reflector.pipelines.main_file_pipeline import task_pipeline_file_process
|
||||||
from reflector.pipelines.main_live_pipeline import asynctask
|
from reflector.pipelines.main_live_pipeline import asynctask
|
||||||
from reflector.pipelines.topic_processing import EmptyPipeline
|
from reflector.pipelines.topic_processing import EmptyPipeline
|
||||||
@@ -373,16 +372,6 @@ async def _process_multitrack_recording_inner(
|
|||||||
|
|
||||||
await transcripts_controller.update(transcript, {"workflow_run_id": workflow_id})
|
await transcripts_controller.update(transcript, {"workflow_run_id": workflow_id})
|
||||||
|
|
||||||
try:
|
|
||||||
await create_dag_zulip_message(transcript.id, workflow_id)
|
|
||||||
except Exception:
|
|
||||||
logger.warning(
|
|
||||||
"[DAG Zulip] Failed to create DAG message at dispatch",
|
|
||||||
transcript_id=transcript.id,
|
|
||||||
workflow_id=workflow_id,
|
|
||||||
exc_info=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@shared_task
|
@shared_task
|
||||||
@asynctask
|
@asynctask
|
||||||
@@ -1090,16 +1079,6 @@ async def reprocess_failed_daily_recordings():
|
|||||||
transcript, {"workflow_run_id": workflow_id}
|
transcript, {"workflow_run_id": workflow_id}
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
|
||||||
await create_dag_zulip_message(transcript.id, workflow_id)
|
|
||||||
except Exception:
|
|
||||||
logger.warning(
|
|
||||||
"[DAG Zulip] Failed to create DAG message at reprocess dispatch",
|
|
||||||
transcript_id=transcript.id,
|
|
||||||
workflow_id=workflow_id,
|
|
||||||
exc_info=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Queued Daily recording for Hatchet reprocessing",
|
"Queued Daily recording for Hatchet reprocessing",
|
||||||
recording_id=recording.id,
|
recording_id=recording.id,
|
||||||
|
|||||||
@@ -12,16 +12,9 @@ class InvalidMessageError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def _zulip_client() -> httpx.AsyncClient:
|
|
||||||
headers = {}
|
|
||||||
if settings.ZULIP_HOST_HEADER:
|
|
||||||
headers["Host"] = settings.ZULIP_HOST_HEADER
|
|
||||||
return httpx.AsyncClient(verify=False, headers=headers)
|
|
||||||
|
|
||||||
|
|
||||||
async def get_zulip_topics(stream_id: int) -> list[dict]:
|
async def get_zulip_topics(stream_id: int) -> list[dict]:
|
||||||
try:
|
try:
|
||||||
async with _zulip_client() as client:
|
async with httpx.AsyncClient() as client:
|
||||||
response = await client.get(
|
response = await client.get(
|
||||||
f"https://{settings.ZULIP_REALM}/api/v1/users/me/{stream_id}/topics",
|
f"https://{settings.ZULIP_REALM}/api/v1/users/me/{stream_id}/topics",
|
||||||
auth=(settings.ZULIP_BOT_EMAIL, settings.ZULIP_API_KEY),
|
auth=(settings.ZULIP_BOT_EMAIL, settings.ZULIP_API_KEY),
|
||||||
@@ -36,7 +29,7 @@ async def get_zulip_topics(stream_id: int) -> list[dict]:
|
|||||||
|
|
||||||
async def get_zulip_streams() -> list[dict]:
|
async def get_zulip_streams() -> list[dict]:
|
||||||
try:
|
try:
|
||||||
async with _zulip_client() as client:
|
async with httpx.AsyncClient() as client:
|
||||||
response = await client.get(
|
response = await client.get(
|
||||||
f"https://{settings.ZULIP_REALM}/api/v1/streams",
|
f"https://{settings.ZULIP_REALM}/api/v1/streams",
|
||||||
auth=(settings.ZULIP_BOT_EMAIL, settings.ZULIP_API_KEY),
|
auth=(settings.ZULIP_BOT_EMAIL, settings.ZULIP_API_KEY),
|
||||||
@@ -51,7 +44,7 @@ async def get_zulip_streams() -> list[dict]:
|
|||||||
|
|
||||||
async def send_message_to_zulip(stream: str, topic: str, content: str):
|
async def send_message_to_zulip(stream: str, topic: str, content: str):
|
||||||
try:
|
try:
|
||||||
async with _zulip_client() as client:
|
async with httpx.AsyncClient() as client:
|
||||||
response = await client.post(
|
response = await client.post(
|
||||||
f"https://{settings.ZULIP_REALM}/api/v1/messages",
|
f"https://{settings.ZULIP_REALM}/api/v1/messages",
|
||||||
data={
|
data={
|
||||||
@@ -73,7 +66,7 @@ async def send_message_to_zulip(stream: str, topic: str, content: str):
|
|||||||
|
|
||||||
async def update_zulip_message(message_id: int, stream: str, topic: str, content: str):
|
async def update_zulip_message(message_id: int, stream: str, topic: str, content: str):
|
||||||
try:
|
try:
|
||||||
async with _zulip_client() as client:
|
async with httpx.AsyncClient() as client:
|
||||||
response = await client.patch(
|
response = await client.patch(
|
||||||
f"https://{settings.ZULIP_REALM}/api/v1/messages/{message_id}",
|
f"https://{settings.ZULIP_REALM}/api/v1/messages/{message_id}",
|
||||||
data={
|
data={
|
||||||
@@ -97,27 +90,6 @@ async def update_zulip_message(message_id: int, stream: str, topic: str, content
|
|||||||
raise Exception(f"Failed to update Zulip message: {error}")
|
raise Exception(f"Failed to update Zulip message: {error}")
|
||||||
|
|
||||||
|
|
||||||
async def delete_zulip_message(message_id: int):
|
|
||||||
try:
|
|
||||||
async with _zulip_client() as client:
|
|
||||||
response = await client.delete(
|
|
||||||
f"https://{settings.ZULIP_REALM}/api/v1/messages/{message_id}",
|
|
||||||
auth=(settings.ZULIP_BOT_EMAIL, settings.ZULIP_API_KEY),
|
|
||||||
)
|
|
||||||
|
|
||||||
if (
|
|
||||||
response.status_code == 400
|
|
||||||
and response.json()["msg"] == "Invalid message(s)"
|
|
||||||
):
|
|
||||||
raise InvalidMessageError(f"There is no message with id: {message_id}")
|
|
||||||
|
|
||||||
response.raise_for_status()
|
|
||||||
|
|
||||||
return response.json()
|
|
||||||
except httpx.RequestError as error:
|
|
||||||
raise Exception(f"Failed to delete Zulip message: {error}")
|
|
||||||
|
|
||||||
|
|
||||||
def get_zulip_message(transcript: Transcript, include_topics: bool):
|
def get_zulip_message(transcript: Transcript, include_topics: bool):
|
||||||
transcript_url = f"{settings.UI_BASE_URL}/transcripts/{transcript.id}"
|
transcript_url = f"{settings.UI_BASE_URL}/transcripts/{transcript.id}"
|
||||||
|
|
||||||
|
|||||||
@@ -1,536 +0,0 @@
|
|||||||
"""
|
|
||||||
Tests for Hatchet DAG Status -> Zulip Live Updates.
|
|
||||||
|
|
||||||
Tests cover:
|
|
||||||
- _dag_zulip_enabled() guard logic
|
|
||||||
- create_dag_zulip_message: sends + stores message ID
|
|
||||||
- update_dag_zulip_message: updates existing; noop when no message_id
|
|
||||||
- delete_dag_zulip_message: deletes + clears; handles InvalidMessageError
|
|
||||||
- delete_zulip_message: sends HTTP DELETE; raises on 400
|
|
||||||
- with_error_handling integration: calls update after success + failure
|
|
||||||
"""
|
|
||||||
|
|
||||||
from unittest.mock import AsyncMock, MagicMock, patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from reflector.db.transcripts import Transcript
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def dag_settings():
|
|
||||||
"""Patch settings for DAG Zulip tests."""
|
|
||||||
with patch("reflector.hatchet.dag_zulip.settings") as mock_settings:
|
|
||||||
mock_settings.ZULIP_REALM = "zulip.example.com"
|
|
||||||
mock_settings.ZULIP_DAG_STREAM = "dag-stream"
|
|
||||||
mock_settings.ZULIP_DAG_TOPIC = "dag-topic"
|
|
||||||
mock_settings.ZULIP_BOT_EMAIL = "bot@example.com"
|
|
||||||
mock_settings.ZULIP_API_KEY = "fake-key"
|
|
||||||
yield mock_settings
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def dag_settings_disabled():
|
|
||||||
"""Patch settings with DAG Zulip disabled."""
|
|
||||||
with patch("reflector.hatchet.dag_zulip.settings") as mock_settings:
|
|
||||||
mock_settings.ZULIP_REALM = "zulip.example.com"
|
|
||||||
mock_settings.ZULIP_DAG_STREAM = None
|
|
||||||
mock_settings.ZULIP_DAG_TOPIC = None
|
|
||||||
yield mock_settings
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_transcript():
|
|
||||||
return Transcript(
|
|
||||||
id="test-transcript-id",
|
|
||||||
name="Test",
|
|
||||||
status="processing",
|
|
||||||
source_kind="room",
|
|
||||||
zulip_message_id=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_transcript_with_zulip_id():
|
|
||||||
return Transcript(
|
|
||||||
id="test-transcript-id",
|
|
||||||
name="Test",
|
|
||||||
status="processing",
|
|
||||||
source_kind="room",
|
|
||||||
zulip_message_id=42,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class TestDagZulipEnabled:
|
|
||||||
def test_enabled_when_all_set(self, dag_settings):
|
|
||||||
from reflector.hatchet.dag_zulip import _dag_zulip_enabled
|
|
||||||
|
|
||||||
assert _dag_zulip_enabled() is True
|
|
||||||
|
|
||||||
def test_disabled_when_realm_missing(self, dag_settings):
|
|
||||||
dag_settings.ZULIP_REALM = None
|
|
||||||
from reflector.hatchet.dag_zulip import _dag_zulip_enabled
|
|
||||||
|
|
||||||
assert _dag_zulip_enabled() is False
|
|
||||||
|
|
||||||
def test_disabled_when_stream_missing(self, dag_settings):
|
|
||||||
dag_settings.ZULIP_DAG_STREAM = None
|
|
||||||
from reflector.hatchet.dag_zulip import _dag_zulip_enabled
|
|
||||||
|
|
||||||
assert _dag_zulip_enabled() is False
|
|
||||||
|
|
||||||
def test_disabled_when_topic_missing(self, dag_settings):
|
|
||||||
dag_settings.ZULIP_DAG_TOPIC = None
|
|
||||||
from reflector.hatchet.dag_zulip import _dag_zulip_enabled
|
|
||||||
|
|
||||||
assert _dag_zulip_enabled() is False
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("setup_database")
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
class TestCreateDagZulipMessage:
|
|
||||||
async def test_sends_and_stores_message_id(self, dag_settings, mock_transcript):
|
|
||||||
mock_run_details = MagicMock()
|
|
||||||
rendered_md = "**DAG** rendered"
|
|
||||||
|
|
||||||
with (
|
|
||||||
patch(
|
|
||||||
"reflector.hatchet.client.HatchetClientManager.get_client"
|
|
||||||
) as mock_get_client,
|
|
||||||
patch(
|
|
||||||
"reflector.tools.render_hatchet_run.render_run_detail",
|
|
||||||
return_value=rendered_md,
|
|
||||||
),
|
|
||||||
patch(
|
|
||||||
"reflector.zulip.send_message_to_zulip",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
return_value={"id": 99},
|
|
||||||
) as mock_send,
|
|
||||||
patch(
|
|
||||||
"reflector.db.transcripts.transcripts_controller.get_by_id",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
return_value=mock_transcript,
|
|
||||||
),
|
|
||||||
patch(
|
|
||||||
"reflector.db.transcripts.transcripts_controller.update",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
) as mock_update,
|
|
||||||
):
|
|
||||||
mock_client = MagicMock()
|
|
||||||
mock_client.runs.aio_get = AsyncMock(return_value=mock_run_details)
|
|
||||||
mock_get_client.return_value = mock_client
|
|
||||||
|
|
||||||
from reflector.hatchet.dag_zulip import create_dag_zulip_message
|
|
||||||
|
|
||||||
await create_dag_zulip_message("test-transcript-id", "workflow-run-123")
|
|
||||||
|
|
||||||
mock_send.assert_called_once_with("dag-stream", "dag-topic", rendered_md)
|
|
||||||
mock_update.assert_called_once_with(
|
|
||||||
mock_transcript, {"zulip_message_id": 99}
|
|
||||||
)
|
|
||||||
|
|
||||||
async def test_noop_when_disabled(self, dag_settings_disabled):
|
|
||||||
with patch(
|
|
||||||
"reflector.zulip.send_message_to_zulip",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
) as mock_send:
|
|
||||||
from reflector.hatchet.dag_zulip import create_dag_zulip_message
|
|
||||||
|
|
||||||
await create_dag_zulip_message("test-transcript-id", "workflow-run-123")
|
|
||||||
mock_send.assert_not_called()
|
|
||||||
|
|
||||||
async def test_logs_warning_on_failure(self, dag_settings, mock_transcript):
|
|
||||||
with (
|
|
||||||
patch(
|
|
||||||
"reflector.hatchet.client.HatchetClientManager.get_client"
|
|
||||||
) as mock_get_client,
|
|
||||||
patch(
|
|
||||||
"reflector.tools.render_hatchet_run.render_run_detail",
|
|
||||||
return_value="rendered",
|
|
||||||
),
|
|
||||||
patch(
|
|
||||||
"reflector.zulip.send_message_to_zulip",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
side_effect=Exception("Zulip down"),
|
|
||||||
),
|
|
||||||
patch(
|
|
||||||
"reflector.db.transcripts.transcripts_controller.get_by_id",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
return_value=mock_transcript,
|
|
||||||
),
|
|
||||||
patch("reflector.hatchet.dag_zulip.logger") as mock_logger,
|
|
||||||
):
|
|
||||||
mock_client = MagicMock()
|
|
||||||
mock_client.runs.aio_get = AsyncMock(return_value=MagicMock())
|
|
||||||
mock_get_client.return_value = mock_client
|
|
||||||
|
|
||||||
from reflector.hatchet.dag_zulip import create_dag_zulip_message
|
|
||||||
|
|
||||||
# Should not raise
|
|
||||||
await create_dag_zulip_message("test-transcript-id", "workflow-run-123")
|
|
||||||
mock_logger.warning.assert_called()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("setup_database")
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
class TestUpdateDagZulipMessage:
|
|
||||||
async def test_updates_existing_message(
|
|
||||||
self, dag_settings, mock_transcript_with_zulip_id
|
|
||||||
):
|
|
||||||
mock_run_details = MagicMock()
|
|
||||||
rendered_md = "**DAG** updated"
|
|
||||||
|
|
||||||
with (
|
|
||||||
patch(
|
|
||||||
"reflector.hatchet.client.HatchetClientManager.get_client"
|
|
||||||
) as mock_get_client,
|
|
||||||
patch(
|
|
||||||
"reflector.tools.render_hatchet_run.render_run_detail",
|
|
||||||
return_value=rendered_md,
|
|
||||||
),
|
|
||||||
patch(
|
|
||||||
"reflector.zulip.update_zulip_message",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
) as mock_update,
|
|
||||||
patch(
|
|
||||||
"reflector.db.transcripts.transcripts_controller.get_by_id",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
return_value=mock_transcript_with_zulip_id,
|
|
||||||
),
|
|
||||||
patch(
|
|
||||||
"reflector.hatchet.workflows.daily_multitrack_pipeline.fresh_db_connection"
|
|
||||||
) as mock_fresh_db,
|
|
||||||
):
|
|
||||||
mock_client = MagicMock()
|
|
||||||
mock_client.runs.aio_get = AsyncMock(return_value=mock_run_details)
|
|
||||||
mock_get_client.return_value = mock_client
|
|
||||||
mock_fresh_db.return_value.__aenter__ = AsyncMock()
|
|
||||||
mock_fresh_db.return_value.__aexit__ = AsyncMock(return_value=False)
|
|
||||||
|
|
||||||
from reflector.hatchet.dag_zulip import update_dag_zulip_message
|
|
||||||
|
|
||||||
await update_dag_zulip_message("test-transcript-id", "workflow-run-123")
|
|
||||||
|
|
||||||
mock_update.assert_called_once_with(
|
|
||||||
42, "dag-stream", "dag-topic", rendered_md
|
|
||||||
)
|
|
||||||
|
|
||||||
async def test_appends_error_banner(
|
|
||||||
self, dag_settings, mock_transcript_with_zulip_id
|
|
||||||
):
|
|
||||||
mock_run_details = MagicMock()
|
|
||||||
rendered_md = "**DAG** updated"
|
|
||||||
|
|
||||||
with (
|
|
||||||
patch(
|
|
||||||
"reflector.hatchet.client.HatchetClientManager.get_client"
|
|
||||||
) as mock_get_client,
|
|
||||||
patch(
|
|
||||||
"reflector.tools.render_hatchet_run.render_run_detail",
|
|
||||||
return_value=rendered_md,
|
|
||||||
),
|
|
||||||
patch(
|
|
||||||
"reflector.zulip.update_zulip_message",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
) as mock_update,
|
|
||||||
patch(
|
|
||||||
"reflector.db.transcripts.transcripts_controller.get_by_id",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
return_value=mock_transcript_with_zulip_id,
|
|
||||||
),
|
|
||||||
patch(
|
|
||||||
"reflector.hatchet.workflows.daily_multitrack_pipeline.fresh_db_connection"
|
|
||||||
) as mock_fresh_db,
|
|
||||||
):
|
|
||||||
mock_client = MagicMock()
|
|
||||||
mock_client.runs.aio_get = AsyncMock(return_value=mock_run_details)
|
|
||||||
mock_get_client.return_value = mock_client
|
|
||||||
mock_fresh_db.return_value.__aenter__ = AsyncMock()
|
|
||||||
mock_fresh_db.return_value.__aexit__ = AsyncMock(return_value=False)
|
|
||||||
|
|
||||||
from reflector.hatchet.dag_zulip import update_dag_zulip_message
|
|
||||||
|
|
||||||
await update_dag_zulip_message(
|
|
||||||
"test-transcript-id",
|
|
||||||
"workflow-run-123",
|
|
||||||
error_message="get_recording failed: connection timeout",
|
|
||||||
)
|
|
||||||
|
|
||||||
call_args = mock_update.call_args
|
|
||||||
content = call_args[0][3]
|
|
||||||
assert rendered_md in content
|
|
||||||
assert "get_recording failed: connection timeout" in content
|
|
||||||
|
|
||||||
async def test_noop_when_no_message_id(self, dag_settings, mock_transcript):
|
|
||||||
with (
|
|
||||||
patch(
|
|
||||||
"reflector.zulip.update_zulip_message",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
) as mock_update,
|
|
||||||
patch(
|
|
||||||
"reflector.db.transcripts.transcripts_controller.get_by_id",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
return_value=mock_transcript,
|
|
||||||
),
|
|
||||||
patch(
|
|
||||||
"reflector.hatchet.workflows.daily_multitrack_pipeline.fresh_db_connection"
|
|
||||||
) as mock_fresh_db,
|
|
||||||
):
|
|
||||||
mock_fresh_db.return_value.__aenter__ = AsyncMock()
|
|
||||||
mock_fresh_db.return_value.__aexit__ = AsyncMock(return_value=False)
|
|
||||||
|
|
||||||
from reflector.hatchet.dag_zulip import update_dag_zulip_message
|
|
||||||
|
|
||||||
await update_dag_zulip_message("test-transcript-id", "workflow-run-123")
|
|
||||||
mock_update.assert_not_called()
|
|
||||||
|
|
||||||
async def test_noop_when_disabled(self, dag_settings_disabled):
|
|
||||||
with patch(
|
|
||||||
"reflector.zulip.update_zulip_message",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
) as mock_update:
|
|
||||||
from reflector.hatchet.dag_zulip import update_dag_zulip_message
|
|
||||||
|
|
||||||
await update_dag_zulip_message("test-transcript-id", "workflow-run-123")
|
|
||||||
mock_update.assert_not_called()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("setup_database")
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
class TestDeleteDagZulipMessage:
|
|
||||||
async def test_deletes_and_clears(
|
|
||||||
self, dag_settings, mock_transcript_with_zulip_id
|
|
||||||
):
|
|
||||||
with (
|
|
||||||
patch(
|
|
||||||
"reflector.zulip.delete_zulip_message",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
) as mock_delete,
|
|
||||||
patch(
|
|
||||||
"reflector.db.transcripts.transcripts_controller.get_by_id",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
return_value=mock_transcript_with_zulip_id,
|
|
||||||
),
|
|
||||||
patch(
|
|
||||||
"reflector.db.transcripts.transcripts_controller.update",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
) as mock_tc_update,
|
|
||||||
):
|
|
||||||
from reflector.hatchet.dag_zulip import delete_dag_zulip_message
|
|
||||||
|
|
||||||
await delete_dag_zulip_message("test-transcript-id")
|
|
||||||
|
|
||||||
mock_delete.assert_called_once_with(42)
|
|
||||||
mock_tc_update.assert_called_once_with(
|
|
||||||
mock_transcript_with_zulip_id, {"zulip_message_id": None}
|
|
||||||
)
|
|
||||||
|
|
||||||
async def test_noop_when_no_message_id(self, dag_settings, mock_transcript):
|
|
||||||
with (
|
|
||||||
patch(
|
|
||||||
"reflector.zulip.delete_zulip_message",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
) as mock_delete,
|
|
||||||
patch(
|
|
||||||
"reflector.db.transcripts.transcripts_controller.get_by_id",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
return_value=mock_transcript,
|
|
||||||
),
|
|
||||||
):
|
|
||||||
from reflector.hatchet.dag_zulip import delete_dag_zulip_message
|
|
||||||
|
|
||||||
await delete_dag_zulip_message("test-transcript-id")
|
|
||||||
mock_delete.assert_not_called()
|
|
||||||
|
|
||||||
async def test_handles_invalid_message_error(
|
|
||||||
self, dag_settings, mock_transcript_with_zulip_id
|
|
||||||
):
|
|
||||||
from reflector.zulip import InvalidMessageError
|
|
||||||
|
|
||||||
with (
|
|
||||||
patch(
|
|
||||||
"reflector.zulip.delete_zulip_message",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
side_effect=InvalidMessageError("gone"),
|
|
||||||
),
|
|
||||||
patch(
|
|
||||||
"reflector.db.transcripts.transcripts_controller.get_by_id",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
return_value=mock_transcript_with_zulip_id,
|
|
||||||
),
|
|
||||||
patch(
|
|
||||||
"reflector.db.transcripts.transcripts_controller.update",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
) as mock_tc_update,
|
|
||||||
patch("reflector.hatchet.dag_zulip.logger"),
|
|
||||||
):
|
|
||||||
from reflector.hatchet.dag_zulip import delete_dag_zulip_message
|
|
||||||
|
|
||||||
# Should not raise; should still clear the message_id
|
|
||||||
await delete_dag_zulip_message("test-transcript-id")
|
|
||||||
mock_tc_update.assert_called_once_with(
|
|
||||||
mock_transcript_with_zulip_id, {"zulip_message_id": None}
|
|
||||||
)
|
|
||||||
|
|
||||||
async def test_noop_when_disabled(self, dag_settings_disabled):
|
|
||||||
with patch(
|
|
||||||
"reflector.zulip.delete_zulip_message",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
) as mock_delete:
|
|
||||||
from reflector.hatchet.dag_zulip import delete_dag_zulip_message
|
|
||||||
|
|
||||||
await delete_dag_zulip_message("test-transcript-id")
|
|
||||||
mock_delete.assert_not_called()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
class TestDeleteZulipMessage:
|
|
||||||
async def test_sends_delete_request(self):
|
|
||||||
mock_response = MagicMock()
|
|
||||||
mock_response.status_code = 200
|
|
||||||
mock_response.raise_for_status = MagicMock()
|
|
||||||
mock_response.json.return_value = {"result": "success"}
|
|
||||||
|
|
||||||
mock_client = AsyncMock()
|
|
||||||
mock_client.delete = AsyncMock(return_value=mock_response)
|
|
||||||
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
|
|
||||||
mock_client.__aexit__ = AsyncMock(return_value=False)
|
|
||||||
|
|
||||||
with patch("reflector.zulip.httpx.AsyncClient", return_value=mock_client):
|
|
||||||
with patch("reflector.zulip.settings") as mock_settings:
|
|
||||||
mock_settings.ZULIP_REALM = "zulip.example.com"
|
|
||||||
mock_settings.ZULIP_BOT_EMAIL = "bot@example.com"
|
|
||||||
mock_settings.ZULIP_API_KEY = "fake-key"
|
|
||||||
|
|
||||||
from reflector.zulip import delete_zulip_message
|
|
||||||
|
|
||||||
result = await delete_zulip_message(123)
|
|
||||||
assert result == {"result": "success"}
|
|
||||||
|
|
||||||
mock_client.delete.assert_called_once()
|
|
||||||
call_args = mock_client.delete.call_args
|
|
||||||
assert "123" in call_args.args[0]
|
|
||||||
|
|
||||||
async def test_raises_invalid_message_on_400(self):
|
|
||||||
from reflector.zulip import InvalidMessageError
|
|
||||||
|
|
||||||
mock_response = MagicMock()
|
|
||||||
mock_response.status_code = 400
|
|
||||||
mock_response.json.return_value = {"msg": "Invalid message(s)"}
|
|
||||||
|
|
||||||
mock_client = AsyncMock()
|
|
||||||
mock_client.delete = AsyncMock(return_value=mock_response)
|
|
||||||
mock_client.__aenter__ = AsyncMock(return_value=mock_client)
|
|
||||||
mock_client.__aexit__ = AsyncMock(return_value=False)
|
|
||||||
|
|
||||||
with patch("reflector.zulip.httpx.AsyncClient", return_value=mock_client):
|
|
||||||
with patch("reflector.zulip.settings") as mock_settings:
|
|
||||||
mock_settings.ZULIP_REALM = "zulip.example.com"
|
|
||||||
mock_settings.ZULIP_BOT_EMAIL = "bot@example.com"
|
|
||||||
mock_settings.ZULIP_API_KEY = "fake-key"
|
|
||||||
|
|
||||||
from reflector.zulip import delete_zulip_message
|
|
||||||
|
|
||||||
with pytest.raises(InvalidMessageError):
|
|
||||||
await delete_zulip_message(999)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
class TestWithErrorHandlingDagUpdate:
|
|
||||||
"""Test that with_error_handling calls update_dag_zulip_message."""
|
|
||||||
|
|
||||||
async def test_calls_update_on_success(self):
|
|
||||||
from reflector.hatchet.constants import TaskName
|
|
||||||
from reflector.hatchet.workflows.daily_multitrack_pipeline import (
|
|
||||||
PipelineInput,
|
|
||||||
with_error_handling,
|
|
||||||
)
|
|
||||||
|
|
||||||
mock_ctx = MagicMock()
|
|
||||||
mock_ctx.workflow_run_id = "wfr-123"
|
|
||||||
|
|
||||||
input_data = PipelineInput(
|
|
||||||
recording_id="rec-1",
|
|
||||||
tracks=[{"s3_key": "k"}],
|
|
||||||
bucket_name="bucket",
|
|
||||||
transcript_id="tid-1",
|
|
||||||
)
|
|
||||||
|
|
||||||
@with_error_handling(TaskName.GET_RECORDING)
|
|
||||||
async def fake_task(input: PipelineInput, ctx) -> str:
|
|
||||||
return "ok"
|
|
||||||
|
|
||||||
with patch(
|
|
||||||
"reflector.hatchet.workflows.daily_multitrack_pipeline.update_dag_zulip_message",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
) as mock_update:
|
|
||||||
result = await fake_task(input_data, mock_ctx)
|
|
||||||
assert result == "ok"
|
|
||||||
mock_update.assert_called_once_with("tid-1", "wfr-123")
|
|
||||||
|
|
||||||
async def test_calls_update_on_failure_with_error_message(self):
|
|
||||||
from reflector.hatchet.constants import TaskName
|
|
||||||
from reflector.hatchet.workflows.daily_multitrack_pipeline import (
|
|
||||||
PipelineInput,
|
|
||||||
with_error_handling,
|
|
||||||
)
|
|
||||||
|
|
||||||
mock_ctx = MagicMock()
|
|
||||||
mock_ctx.workflow_run_id = "wfr-123"
|
|
||||||
|
|
||||||
input_data = PipelineInput(
|
|
||||||
recording_id="rec-1",
|
|
||||||
tracks=[{"s3_key": "k"}],
|
|
||||||
bucket_name="bucket",
|
|
||||||
transcript_id="tid-1",
|
|
||||||
)
|
|
||||||
|
|
||||||
@with_error_handling(TaskName.GET_RECORDING)
|
|
||||||
async def failing_task(input: PipelineInput, ctx) -> str:
|
|
||||||
raise ValueError("boom")
|
|
||||||
|
|
||||||
with (
|
|
||||||
patch(
|
|
||||||
"reflector.hatchet.workflows.daily_multitrack_pipeline.update_dag_zulip_message",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
) as mock_update,
|
|
||||||
patch(
|
|
||||||
"reflector.hatchet.workflows.daily_multitrack_pipeline.set_workflow_error_status",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
),
|
|
||||||
):
|
|
||||||
with pytest.raises(ValueError, match="boom"):
|
|
||||||
await failing_task(input_data, mock_ctx)
|
|
||||||
mock_update.assert_called_once_with(
|
|
||||||
"tid-1", "wfr-123", error_message="get_recording failed: boom"
|
|
||||||
)
|
|
||||||
|
|
||||||
async def test_dag_failure_doesnt_affect_task(self):
|
|
||||||
"""DAG update failure should not prevent task from succeeding."""
|
|
||||||
from reflector.hatchet.constants import TaskName
|
|
||||||
from reflector.hatchet.workflows.daily_multitrack_pipeline import (
|
|
||||||
PipelineInput,
|
|
||||||
with_error_handling,
|
|
||||||
)
|
|
||||||
|
|
||||||
mock_ctx = MagicMock()
|
|
||||||
mock_ctx.workflow_run_id = "wfr-123"
|
|
||||||
|
|
||||||
input_data = PipelineInput(
|
|
||||||
recording_id="rec-1",
|
|
||||||
tracks=[{"s3_key": "k"}],
|
|
||||||
bucket_name="bucket",
|
|
||||||
transcript_id="tid-1",
|
|
||||||
)
|
|
||||||
|
|
||||||
@with_error_handling(TaskName.GET_RECORDING)
|
|
||||||
async def ok_task(input: PipelineInput, ctx) -> str:
|
|
||||||
return "ok"
|
|
||||||
|
|
||||||
with patch(
|
|
||||||
"reflector.hatchet.workflows.daily_multitrack_pipeline.update_dag_zulip_message",
|
|
||||||
new_callable=AsyncMock,
|
|
||||||
side_effect=Exception("zulip exploded"),
|
|
||||||
):
|
|
||||||
result = await ok_task(input_data, mock_ctx)
|
|
||||||
assert result == "ok"
|
|
||||||
217
www/app/lib/__tests__/meetingStatusBatcher.test.tsx
Normal file
217
www/app/lib/__tests__/meetingStatusBatcher.test.tsx
Normal file
@@ -0,0 +1,217 @@
|
|||||||
|
import "@testing-library/jest-dom";
|
||||||
|
|
||||||
|
// --- Module mocks (hoisted before imports) ---
|
||||||
|
|
||||||
|
jest.mock("../apiClient", () => ({
|
||||||
|
client: {
|
||||||
|
GET: jest.fn(),
|
||||||
|
POST: jest.fn(),
|
||||||
|
PUT: jest.fn(),
|
||||||
|
PATCH: jest.fn(),
|
||||||
|
DELETE: jest.fn(),
|
||||||
|
use: jest.fn(),
|
||||||
|
},
|
||||||
|
$api: {
|
||||||
|
useQuery: jest.fn(),
|
||||||
|
useMutation: jest.fn(),
|
||||||
|
queryOptions: (method: string, path: string, init?: unknown) =>
|
||||||
|
init === undefined
|
||||||
|
? { queryKey: [method, path] }
|
||||||
|
: { queryKey: [method, path, init] },
|
||||||
|
},
|
||||||
|
API_URL: "http://test",
|
||||||
|
WEBSOCKET_URL: "ws://test",
|
||||||
|
configureApiAuth: jest.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock("../AuthProvider", () => ({
|
||||||
|
useAuth: () => ({
|
||||||
|
status: "authenticated" as const,
|
||||||
|
accessToken: "test-token",
|
||||||
|
accessTokenExpires: Date.now() + 3600000,
|
||||||
|
user: { id: "user1", name: "Test User" },
|
||||||
|
update: jest.fn(),
|
||||||
|
signIn: jest.fn(),
|
||||||
|
signOut: jest.fn(),
|
||||||
|
lastUserId: "user1",
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Recreate the batcher with a 0ms window. setTimeout(fn, 0) defers to the next
|
||||||
|
// macrotask boundary — after all synchronous React rendering completes. All
|
||||||
|
// useQuery queryFns fire within the same macrotask, so they all queue into one
|
||||||
|
// batch before the timer fires. This is deterministic and avoids fake timers.
|
||||||
|
jest.mock("../meetingStatusBatcher", () => {
|
||||||
|
const actual = jest.requireActual("../meetingStatusBatcher");
|
||||||
|
return {
|
||||||
|
...actual,
|
||||||
|
meetingStatusBatcher: actual.createMeetingStatusBatcher(0),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- Imports (after mocks) ---
|
||||||
|
|
||||||
|
import React from "react";
|
||||||
|
import { render, waitFor, screen } from "@testing-library/react";
|
||||||
|
import { QueryClient, QueryClientProvider } from "@tanstack/react-query";
|
||||||
|
import { useRoomActiveMeetings, useRoomUpcomingMeetings } from "../apiHooks";
|
||||||
|
import { client } from "../apiClient";
|
||||||
|
import { ErrorProvider } from "../../(errors)/errorContext";
|
||||||
|
|
||||||
|
const mockClient = client as { POST: jest.Mock };
|
||||||
|
|
||||||
|
// --- Helpers ---
|
||||||
|
|
||||||
|
function mockBulkStatusEndpoint(
|
||||||
|
roomData?: Record<
|
||||||
|
string,
|
||||||
|
{ active_meetings: unknown[]; upcoming_events: unknown[] }
|
||||||
|
>,
|
||||||
|
) {
|
||||||
|
mockClient.POST.mockImplementation(
|
||||||
|
async (_path: string, options: { body: { room_names: string[] } }) => {
|
||||||
|
const roomNames: string[] = options.body.room_names;
|
||||||
|
const src = roomData ?? {};
|
||||||
|
const data = Object.fromEntries(
|
||||||
|
roomNames.map((name) => [
|
||||||
|
name,
|
||||||
|
src[name] ?? { active_meetings: [], upcoming_events: [] },
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
return { data, error: undefined, response: {} };
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Test component: renders N room cards, each using both hooks ---
|
||||||
|
|
||||||
|
function RoomCard({ roomName }: { roomName: string }) {
|
||||||
|
const active = useRoomActiveMeetings(roomName);
|
||||||
|
const upcoming = useRoomUpcomingMeetings(roomName);
|
||||||
|
|
||||||
|
if (active.isLoading || upcoming.isLoading) {
|
||||||
|
return <div data-testid={`room-${roomName}`}>loading</div>;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div data-testid={`room-${roomName}`}>
|
||||||
|
{active.data?.length ?? 0} active, {upcoming.data?.length ?? 0} upcoming
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function RoomList({ roomNames }: { roomNames: string[] }) {
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
{roomNames.map((name) => (
|
||||||
|
<RoomCard key={name} roomName={name} />
|
||||||
|
))}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function createWrapper() {
|
||||||
|
const queryClient = new QueryClient({
|
||||||
|
defaultOptions: {
|
||||||
|
queries: { retry: false },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return function Wrapper({ children }: { children: React.ReactNode }) {
|
||||||
|
return (
|
||||||
|
<QueryClientProvider client={queryClient}>
|
||||||
|
<ErrorProvider>{children}</ErrorProvider>
|
||||||
|
</QueryClientProvider>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Tests ---
|
||||||
|
|
||||||
|
describe("meeting status batcher integration", () => {
|
||||||
|
afterEach(() => jest.clearAllMocks());
|
||||||
|
|
||||||
|
it("batches multiple room queries into a single POST request", async () => {
|
||||||
|
const rooms = Array.from({ length: 10 }, (_, i) => `room-${i}`);
|
||||||
|
|
||||||
|
mockBulkStatusEndpoint();
|
||||||
|
|
||||||
|
render(<RoomList roomNames={rooms} />, { wrapper: createWrapper() });
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
for (const name of rooms) {
|
||||||
|
expect(screen.getByTestId(`room-${name}`)).toHaveTextContent(
|
||||||
|
"0 active, 0 upcoming",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const postCalls = mockClient.POST.mock.calls.filter(
|
||||||
|
([path]: [string]) => path === "/v1/rooms/meetings/bulk-status",
|
||||||
|
);
|
||||||
|
|
||||||
|
// Without batching this would be 20 calls (2 hooks x 10 rooms).
|
||||||
|
expect(postCalls).toHaveLength(1);
|
||||||
|
|
||||||
|
// The single call should contain all 10 rooms (deduplicated)
|
||||||
|
const requestedRooms: string[] = postCalls[0][1].body.room_names;
|
||||||
|
for (const name of rooms) {
|
||||||
|
expect(requestedRooms).toContain(name);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it("batcher fetcher returns room-specific data", async () => {
|
||||||
|
const {
|
||||||
|
meetingStatusBatcher: batcher,
|
||||||
|
} = require("../meetingStatusBatcher");
|
||||||
|
|
||||||
|
mockBulkStatusEndpoint({
|
||||||
|
"room-a": {
|
||||||
|
active_meetings: [{ id: "m1", room_name: "room-a" }],
|
||||||
|
upcoming_events: [],
|
||||||
|
},
|
||||||
|
"room-b": {
|
||||||
|
active_meetings: [],
|
||||||
|
upcoming_events: [{ id: "e1", title: "Standup" }],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const [resultA, resultB] = await Promise.all([
|
||||||
|
batcher.fetch("room-a"),
|
||||||
|
batcher.fetch("room-b"),
|
||||||
|
]);
|
||||||
|
|
||||||
|
expect(mockClient.POST).toHaveBeenCalledTimes(1);
|
||||||
|
expect(resultA.active_meetings).toEqual([
|
||||||
|
{ id: "m1", room_name: "room-a" },
|
||||||
|
]);
|
||||||
|
expect(resultA.upcoming_events).toEqual([]);
|
||||||
|
expect(resultB.active_meetings).toEqual([]);
|
||||||
|
expect(resultB.upcoming_events).toEqual([{ id: "e1", title: "Standup" }]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("renders room-specific meeting data through hooks", async () => {
|
||||||
|
mockBulkStatusEndpoint({
|
||||||
|
"room-a": {
|
||||||
|
active_meetings: [{ id: "m1", room_name: "room-a" }],
|
||||||
|
upcoming_events: [],
|
||||||
|
},
|
||||||
|
"room-b": {
|
||||||
|
active_meetings: [],
|
||||||
|
upcoming_events: [{ id: "e1", title: "Standup" }],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
render(<RoomList roomNames={["room-a", "room-b"]} />, {
|
||||||
|
wrapper: createWrapper(),
|
||||||
|
});
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByTestId("room-room-a")).toHaveTextContent(
|
||||||
|
"1 active, 0 upcoming",
|
||||||
|
);
|
||||||
|
expect(screen.getByTestId("room-room-b")).toHaveTextContent(
|
||||||
|
"0 active, 1 upcoming",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -2,9 +2,10 @@
|
|||||||
|
|
||||||
import { $api } from "./apiClient";
|
import { $api } from "./apiClient";
|
||||||
import { useError } from "../(errors)/errorContext";
|
import { useError } from "../(errors)/errorContext";
|
||||||
import { QueryClient, useQueryClient } from "@tanstack/react-query";
|
import { QueryClient, useQuery, useQueryClient } from "@tanstack/react-query";
|
||||||
import type { components } from "../reflector-api";
|
import type { components } from "../reflector-api";
|
||||||
import { useAuth } from "./AuthProvider";
|
import { useAuth } from "./AuthProvider";
|
||||||
|
import { meetingStatusBatcher } from "./meetingStatusBatcher";
|
||||||
import { MeetingId } from "./types";
|
import { MeetingId } from "./types";
|
||||||
import { NonEmptyString } from "./utils";
|
import { NonEmptyString } from "./utils";
|
||||||
|
|
||||||
@@ -697,15 +698,7 @@ export function useRoomsCreateMeeting() {
|
|||||||
queryKey: $api.queryOptions("get", "/v1/rooms").queryKey,
|
queryKey: $api.queryOptions("get", "/v1/rooms").queryKey,
|
||||||
}),
|
}),
|
||||||
queryClient.invalidateQueries({
|
queryClient.invalidateQueries({
|
||||||
queryKey: $api.queryOptions(
|
queryKey: meetingStatusKeys.active(roomName),
|
||||||
"get",
|
|
||||||
"/v1/rooms/{room_name}/meetings/active" satisfies `/v1/rooms/{room_name}/${typeof MEETINGS_ACTIVE_PATH_PARTIAL}`,
|
|
||||||
{
|
|
||||||
params: {
|
|
||||||
path: { room_name: roomName },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
).queryKey,
|
|
||||||
}),
|
}),
|
||||||
]);
|
]);
|
||||||
},
|
},
|
||||||
@@ -734,42 +727,39 @@ export function useRoomGetByName(roomName: string | null) {
|
|||||||
export function useRoomUpcomingMeetings(roomName: string | null) {
|
export function useRoomUpcomingMeetings(roomName: string | null) {
|
||||||
const { isAuthenticated } = useAuthReady();
|
const { isAuthenticated } = useAuthReady();
|
||||||
|
|
||||||
return $api.useQuery(
|
return useQuery({
|
||||||
"get",
|
queryKey: meetingStatusKeys.upcoming(roomName!),
|
||||||
"/v1/rooms/{room_name}/meetings/upcoming" satisfies `/v1/rooms/{room_name}/${typeof MEETINGS_UPCOMING_PATH_PARTIAL}`,
|
queryFn: async () => {
|
||||||
{
|
const result = await meetingStatusBatcher.fetch(roomName!);
|
||||||
params: {
|
return result.upcoming_events;
|
||||||
path: { room_name: roomName! },
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
enabled: !!roomName && isAuthenticated,
|
||||||
enabled: !!roomName && isAuthenticated,
|
});
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const MEETINGS_PATH_PARTIAL = "meetings" as const;
|
// Query keys reuse $api.queryOptions so cache identity matches the original
|
||||||
const MEETINGS_ACTIVE_PATH_PARTIAL = `${MEETINGS_PATH_PARTIAL}/active` as const;
|
// per-room GET endpoints. The actual fetch goes through the batcher, but the
|
||||||
const MEETINGS_UPCOMING_PATH_PARTIAL =
|
// keys stay consistent with the rest of the codebase.
|
||||||
`${MEETINGS_PATH_PARTIAL}/upcoming` as const;
|
const meetingStatusKeys = {
|
||||||
const MEETING_LIST_PATH_PARTIALS = [
|
active: (roomName: string) =>
|
||||||
MEETINGS_ACTIVE_PATH_PARTIAL,
|
$api.queryOptions("get", "/v1/rooms/{room_name}/meetings/active", {
|
||||||
MEETINGS_UPCOMING_PATH_PARTIAL,
|
params: { path: { room_name: roomName } },
|
||||||
];
|
}).queryKey,
|
||||||
|
upcoming: (roomName: string) =>
|
||||||
|
$api.queryOptions("get", "/v1/rooms/{room_name}/meetings/upcoming", {
|
||||||
|
params: { path: { room_name: roomName } },
|
||||||
|
}).queryKey,
|
||||||
|
};
|
||||||
|
|
||||||
export function useRoomActiveMeetings(roomName: string | null) {
|
export function useRoomActiveMeetings(roomName: string | null) {
|
||||||
return $api.useQuery(
|
return useQuery({
|
||||||
"get",
|
queryKey: meetingStatusKeys.active(roomName!),
|
||||||
"/v1/rooms/{room_name}/meetings/active" satisfies `/v1/rooms/{room_name}/${typeof MEETINGS_ACTIVE_PATH_PARTIAL}`,
|
queryFn: async () => {
|
||||||
{
|
const result = await meetingStatusBatcher.fetch(roomName!);
|
||||||
params: {
|
return result.active_meetings;
|
||||||
path: { room_name: roomName! },
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
enabled: !!roomName,
|
||||||
enabled: !!roomName,
|
});
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function useRoomGetMeeting(
|
export function useRoomGetMeeting(
|
||||||
|
|||||||
37
www/app/lib/meetingStatusBatcher.ts
Normal file
37
www/app/lib/meetingStatusBatcher.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import { create, keyResolver, windowScheduler } from "@yornaath/batshit";
|
||||||
|
import { client } from "./apiClient";
|
||||||
|
import type { components } from "../reflector-api";
|
||||||
|
|
||||||
|
type MeetingStatusResult = {
|
||||||
|
roomName: string;
|
||||||
|
active_meetings: components["schemas"]["Meeting"][];
|
||||||
|
upcoming_events: components["schemas"]["CalendarEventResponse"][];
|
||||||
|
};
|
||||||
|
|
||||||
|
const BATCH_WINDOW_MS = 10;
|
||||||
|
|
||||||
|
export function createMeetingStatusBatcher(windowMs: number = BATCH_WINDOW_MS) {
|
||||||
|
return create({
|
||||||
|
fetcher: async (roomNames: string[]): Promise<MeetingStatusResult[]> => {
|
||||||
|
const unique = [...new Set(roomNames)];
|
||||||
|
const { data, error } = await client.POST(
|
||||||
|
"/v1/rooms/meetings/bulk-status",
|
||||||
|
{ body: { room_names: unique } },
|
||||||
|
);
|
||||||
|
if (error || !data) {
|
||||||
|
throw new Error(
|
||||||
|
`bulk-status fetch failed: ${JSON.stringify(error ?? "no data")}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return roomNames.map((name) => ({
|
||||||
|
roomName: name,
|
||||||
|
active_meetings: data[name]?.active_meetings ?? [],
|
||||||
|
upcoming_events: data[name]?.upcoming_events ?? [],
|
||||||
|
}));
|
||||||
|
},
|
||||||
|
resolver: keyResolver("roomName"),
|
||||||
|
scheduler: windowScheduler(windowMs),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export const meetingStatusBatcher = createMeetingStatusBatcher();
|
||||||
64
www/app/reflector-api.d.ts
vendored
64
www/app/reflector-api.d.ts
vendored
@@ -118,6 +118,23 @@ export interface paths {
|
|||||||
patch?: never;
|
patch?: never;
|
||||||
trace?: never;
|
trace?: never;
|
||||||
};
|
};
|
||||||
|
"/v1/rooms/meetings/bulk-status": {
|
||||||
|
parameters: {
|
||||||
|
query?: never;
|
||||||
|
header?: never;
|
||||||
|
path?: never;
|
||||||
|
cookie?: never;
|
||||||
|
};
|
||||||
|
get?: never;
|
||||||
|
put?: never;
|
||||||
|
/** Rooms Bulk Meeting Status */
|
||||||
|
post: operations["v1_rooms_bulk_meeting_status"];
|
||||||
|
delete?: never;
|
||||||
|
options?: never;
|
||||||
|
head?: never;
|
||||||
|
patch?: never;
|
||||||
|
trace?: never;
|
||||||
|
};
|
||||||
"/v1/rooms/{room_id}": {
|
"/v1/rooms/{room_id}": {
|
||||||
parameters: {
|
parameters: {
|
||||||
query?: never;
|
query?: never;
|
||||||
@@ -799,6 +816,11 @@ export interface components {
|
|||||||
*/
|
*/
|
||||||
chunk: string;
|
chunk: string;
|
||||||
};
|
};
|
||||||
|
/** BulkStatusRequest */
|
||||||
|
BulkStatusRequest: {
|
||||||
|
/** Room Names */
|
||||||
|
room_names: string[];
|
||||||
|
};
|
||||||
/** CalendarEventResponse */
|
/** CalendarEventResponse */
|
||||||
CalendarEventResponse: {
|
CalendarEventResponse: {
|
||||||
/** Id */
|
/** Id */
|
||||||
@@ -1735,6 +1757,13 @@ export interface components {
|
|||||||
/** Webhook Secret */
|
/** Webhook Secret */
|
||||||
webhook_secret: string | null;
|
webhook_secret: string | null;
|
||||||
};
|
};
|
||||||
|
/** RoomMeetingStatus */
|
||||||
|
RoomMeetingStatus: {
|
||||||
|
/** Active Meetings */
|
||||||
|
active_meetings: components["schemas"]["Meeting"][];
|
||||||
|
/** Upcoming Events */
|
||||||
|
upcoming_events: components["schemas"]["CalendarEventResponse"][];
|
||||||
|
};
|
||||||
/** RtcOffer */
|
/** RtcOffer */
|
||||||
RtcOffer: {
|
RtcOffer: {
|
||||||
/** Sdp */
|
/** Sdp */
|
||||||
@@ -2272,6 +2301,41 @@ export interface operations {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
v1_rooms_bulk_meeting_status: {
|
||||||
|
parameters: {
|
||||||
|
query?: never;
|
||||||
|
header?: never;
|
||||||
|
path?: never;
|
||||||
|
cookie?: never;
|
||||||
|
};
|
||||||
|
requestBody: {
|
||||||
|
content: {
|
||||||
|
"application/json": components["schemas"]["BulkStatusRequest"];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
responses: {
|
||||||
|
/** @description Successful Response */
|
||||||
|
200: {
|
||||||
|
headers: {
|
||||||
|
[name: string]: unknown;
|
||||||
|
};
|
||||||
|
content: {
|
||||||
|
"application/json": {
|
||||||
|
[key: string]: components["schemas"]["RoomMeetingStatus"];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
/** @description Validation Error */
|
||||||
|
422: {
|
||||||
|
headers: {
|
||||||
|
[name: string]: unknown;
|
||||||
|
};
|
||||||
|
content: {
|
||||||
|
"application/json": components["schemas"]["HTTPValidationError"];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
v1_rooms_get: {
|
v1_rooms_get: {
|
||||||
parameters: {
|
parameters: {
|
||||||
query?: never;
|
query?: never;
|
||||||
|
|||||||
@@ -1,8 +1,22 @@
|
|||||||
module.exports = {
|
module.exports = {
|
||||||
preset: "ts-jest",
|
testEnvironment: "jest-environment-jsdom",
|
||||||
testEnvironment: "node",
|
|
||||||
roots: ["<rootDir>/app"],
|
roots: ["<rootDir>/app"],
|
||||||
testMatch: ["**/__tests__/**/*.test.ts"],
|
testMatch: ["**/__tests__/**/*.test.ts", "**/__tests__/**/*.test.tsx"],
|
||||||
collectCoverage: true,
|
collectCoverage: false,
|
||||||
collectCoverageFrom: ["app/**/*.ts", "!app/**/*.d.ts"],
|
transform: {
|
||||||
|
"^.+\\.[jt]sx?$": [
|
||||||
|
"ts-jest",
|
||||||
|
{
|
||||||
|
tsconfig: {
|
||||||
|
jsx: "react-jsx",
|
||||||
|
module: "esnext",
|
||||||
|
moduleResolution: "bundler",
|
||||||
|
esModuleInterop: true,
|
||||||
|
strict: true,
|
||||||
|
downlevelIteration: true,
|
||||||
|
lib: ["dom", "dom.iterable", "esnext"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -23,6 +23,7 @@
|
|||||||
"@tanstack/react-query": "^5.85.9",
|
"@tanstack/react-query": "^5.85.9",
|
||||||
"@types/ioredis": "^5.0.0",
|
"@types/ioredis": "^5.0.0",
|
||||||
"@whereby.com/browser-sdk": "^3.3.4",
|
"@whereby.com/browser-sdk": "^3.3.4",
|
||||||
|
"@yornaath/batshit": "^0.14.0",
|
||||||
"autoprefixer": "10.4.20",
|
"autoprefixer": "10.4.20",
|
||||||
"axios": "^1.8.2",
|
"axios": "^1.8.2",
|
||||||
"eslint": "^9.33.0",
|
"eslint": "^9.33.0",
|
||||||
@@ -61,9 +62,13 @@
|
|||||||
"author": "Andreas <andreas@monadical.com>",
|
"author": "Andreas <andreas@monadical.com>",
|
||||||
"license": "All Rights Reserved",
|
"license": "All Rights Reserved",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@testing-library/dom": "^10.4.1",
|
||||||
|
"@testing-library/jest-dom": "^6.9.1",
|
||||||
|
"@testing-library/react": "^16.3.2",
|
||||||
"@types/jest": "^30.0.0",
|
"@types/jest": "^30.0.0",
|
||||||
"@types/react": "18.2.20",
|
"@types/react": "18.2.20",
|
||||||
"jest": "^30.1.3",
|
"jest": "^30.1.3",
|
||||||
|
"jest-environment-jsdom": "^30.2.0",
|
||||||
"openapi-typescript": "^7.9.1",
|
"openapi-typescript": "^7.9.1",
|
||||||
"prettier": "^3.0.0",
|
"prettier": "^3.0.0",
|
||||||
"ts-jest": "^29.4.1"
|
"ts-jest": "^29.4.1"
|
||||||
|
|||||||
808
www/pnpm-lock.yaml
generated
808
www/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user