style: use ruff for linting and formatting (#524)

This commit is contained in:
2025-07-31 17:57:43 -06:00
committed by GitHub
parent ad56165b54
commit f5b82d44e3
88 changed files with 263 additions and 197 deletions

View File

@@ -15,25 +15,16 @@ repos:
hooks: hooks:
- id: debug-statements - id: debug-statements
- id: trailing-whitespace - id: trailing-whitespace
exclude: ^server/trials
- id: detect-private-key - id: detect-private-key
- repo: https://github.com/psf/black
rev: 24.1.1
hooks:
- id: black
files: ^server/(reflector|tests)/
- repo: https://github.com/pycqa/isort
rev: 5.12.0
hooks:
- id: isort
name: isort (python)
files: ^server/(gpu|evaluate|reflector)/
args: [ "--profile", "black", "--filter-files" ]
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.6.5 rev: v0.8.2
hooks: hooks:
- id: ruff - id: ruff
files: ^server/(reflector|tests)/ args:
- --fix
- --select
- I,F401
files: ^server/
- id: ruff-format
files: ^server/

View File

@@ -1,9 +1,10 @@
from logging.config import fileConfig from logging.config import fileConfig
from alembic import context from alembic import context
from sqlalchemy import engine_from_config, pool
from reflector.db import metadata from reflector.db import metadata
from reflector.settings import settings from reflector.settings import settings
from sqlalchemy import engine_from_config, pool
# this is the Alembic Config object, which provides # this is the Alembic Config object, which provides
# access to the values within the .ini file in use. # access to the values within the .ini file in use.

View File

@@ -8,7 +8,6 @@ Create Date: 2024-09-24 16:12:56.944133
from typing import Sequence, Union from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.

View File

@@ -5,11 +5,11 @@ Revises: f819277e5169
Create Date: 2023-11-07 11:12:21.614198 Create Date: 2023-11-07 11:12:21.614198
""" """
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = "0fea6d96b096" revision: str = "0fea6d96b096"

View File

@@ -5,26 +5,26 @@ Revises: 0fea6d96b096
Create Date: 2023-11-30 15:56:03.341466 Create Date: 2023-11-30 15:56:03.341466
""" """
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = '125031f7cb78' revision: str = "125031f7cb78"
down_revision: Union[str, None] = '0fea6d96b096' down_revision: Union[str, None] = "0fea6d96b096"
branch_labels: Union[str, Sequence[str], None] = None branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None: def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.add_column('transcript', sa.Column('participants', sa.JSON(), nullable=True)) op.add_column("transcript", sa.Column("participants", sa.JSON(), nullable=True))
# ### end Alembic commands ### # ### end Alembic commands ###
def downgrade() -> None: def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.drop_column('transcript', 'participants') op.drop_column("transcript", "participants")
# ### end Alembic commands ### # ### end Alembic commands ###

View File

@@ -5,6 +5,7 @@ Revises: f819277e5169
Create Date: 2025-06-17 14:00:03.000000 Create Date: 2025-06-17 14:00:03.000000
""" """
from typing import Sequence, Union from typing import Sequence, Union
import sqlalchemy as sa import sqlalchemy as sa
@@ -19,16 +20,16 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None: def upgrade() -> None:
op.create_table( op.create_table(
'meeting_consent', "meeting_consent",
sa.Column('id', sa.String(), nullable=False), sa.Column("id", sa.String(), nullable=False),
sa.Column('meeting_id', sa.String(), nullable=False), sa.Column("meeting_id", sa.String(), nullable=False),
sa.Column('user_id', sa.String(), nullable=True), sa.Column("user_id", sa.String(), nullable=True),
sa.Column('consent_given', sa.Boolean(), nullable=False), sa.Column("consent_given", sa.Boolean(), nullable=False),
sa.Column('consent_timestamp', sa.DateTime(), nullable=False), sa.Column("consent_timestamp", sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint("id"),
sa.ForeignKeyConstraint(['meeting_id'], ['meeting.id']), sa.ForeignKeyConstraint(["meeting_id"], ["meeting.id"]),
) )
def downgrade() -> None: def downgrade() -> None:
op.drop_table('meeting_consent') op.drop_table("meeting_consent")

View File

@@ -5,6 +5,7 @@ Revises: 20250617140003
Create Date: 2025-06-18 14:00:00.000000 Create Date: 2025-06-18 14:00:00.000000
""" """
from typing import Sequence, Union from typing import Sequence, Union
import sqlalchemy as sa import sqlalchemy as sa
@@ -22,4 +23,4 @@ def upgrade() -> None:
def downgrade() -> None: def downgrade() -> None:
op.drop_column("transcript", "audio_deleted") op.drop_column("transcript", "audio_deleted")

View File

@@ -5,36 +5,40 @@ Revises: ccd68dc784ff
Create Date: 2025-07-15 16:53:40.397394 Create Date: 2025-07-15 16:53:40.397394
""" """
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = '2cf0b60a9d34' revision: str = "2cf0b60a9d34"
down_revision: Union[str, None] = 'ccd68dc784ff' down_revision: Union[str, None] = "ccd68dc784ff"
branch_labels: Union[str, Sequence[str], None] = None branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None: def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('transcript', schema=None) as batch_op: with op.batch_alter_table("transcript", schema=None) as batch_op:
batch_op.alter_column('duration', batch_op.alter_column(
existing_type=sa.INTEGER(), "duration",
type_=sa.Float(), existing_type=sa.INTEGER(),
existing_nullable=True) type_=sa.Float(),
existing_nullable=True,
)
# ### end Alembic commands ### # ### end Alembic commands ###
def downgrade() -> None: def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('transcript', schema=None) as batch_op: with op.batch_alter_table("transcript", schema=None) as batch_op:
batch_op.alter_column('duration', batch_op.alter_column(
existing_type=sa.Float(), "duration",
type_=sa.INTEGER(), existing_type=sa.Float(),
existing_nullable=True) type_=sa.INTEGER(),
existing_nullable=True,
)
# ### end Alembic commands ### # ### end Alembic commands ###

View File

@@ -5,17 +5,17 @@ Revises: 9920ecfe2735
Create Date: 2023-11-02 19:53:09.116240 Create Date: 2023-11-02 19:53:09.116240
""" """
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from sqlalchemy.sql import table, column from alembic import op
from sqlalchemy import select from sqlalchemy import select
from sqlalchemy.sql import column, table
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = '38a927dcb099' revision: str = "38a927dcb099"
down_revision: Union[str, None] = '9920ecfe2735' down_revision: Union[str, None] = "9920ecfe2735"
branch_labels: Union[str, Sequence[str], None] = None branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None

View File

@@ -5,13 +5,13 @@ Revises: 38a927dcb099
Create Date: 2023-11-10 18:12:17.886522 Create Date: 2023-11-10 18:12:17.886522
""" """
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from sqlalchemy.sql import table, column from alembic import op
from sqlalchemy import select from sqlalchemy import select
from sqlalchemy.sql import column, table
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = "4814901632bc" revision: str = "4814901632bc"
@@ -24,9 +24,11 @@ def upgrade() -> None:
# for all the transcripts, calculate the duration from the mp3 # for all the transcripts, calculate the duration from the mp3
# and update the duration column # and update the duration column
from pathlib import Path from pathlib import Path
from reflector.settings import settings
import av import av
from reflector.settings import settings
bind = op.get_bind() bind = op.get_bind()
transcript = table( transcript = table(
"transcript", column("id", sa.String), column("duration", sa.Float) "transcript", column("id", sa.String), column("duration", sa.Float)

View File

@@ -5,14 +5,11 @@ Revises:
Create Date: 2023-08-29 10:54:45.142974 Create Date: 2023-08-29 10:54:45.142974
""" """
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = '543ed284d69a' revision: str = "543ed284d69a"
down_revision: Union[str, None] = None down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None

View File

@@ -8,9 +8,8 @@ Create Date: 2025-06-27 09:04:21.006823
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = "62dea3db63a5" revision: str = "62dea3db63a5"

View File

@@ -5,26 +5,28 @@ Revises: 62dea3db63a5
Create Date: 2024-09-06 14:02:06.649665 Create Date: 2024-09-06 14:02:06.649665
""" """
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = '764ce6db4388' revision: str = "764ce6db4388"
down_revision: Union[str, None] = '62dea3db63a5' down_revision: Union[str, None] = "62dea3db63a5"
branch_labels: Union[str, Sequence[str], None] = None branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None: def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.add_column('transcript', sa.Column('zulip_message_id', sa.Integer(), nullable=True)) op.add_column(
"transcript", sa.Column("zulip_message_id", sa.Integer(), nullable=True)
)
# ### end Alembic commands ### # ### end Alembic commands ###
def downgrade() -> None: def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.drop_column('transcript', 'zulip_message_id') op.drop_column("transcript", "zulip_message_id")
# ### end Alembic commands ### # ### end Alembic commands ###

View File

@@ -9,8 +9,6 @@ Create Date: 2025-07-15 19:30:19.876332
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = "88d292678ba2" revision: str = "88d292678ba2"
@@ -21,7 +19,7 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None: def upgrade() -> None:
import json import json
import re
from sqlalchemy import text from sqlalchemy import text
# Get database connection # Get database connection
@@ -58,7 +56,9 @@ def upgrade() -> None:
fixed_events = json.dumps(jevents) fixed_events = json.dumps(jevents)
assert "NaN" not in fixed_events assert "NaN" not in fixed_events
except (json.JSONDecodeError, AssertionError) as e: except (json.JSONDecodeError, AssertionError) as e:
print(f"Warning: Invalid JSON for transcript {transcript_id}, skipping: {e}") print(
f"Warning: Invalid JSON for transcript {transcript_id}, skipping: {e}"
)
continue continue
# Update the record with fixed JSON # Update the record with fixed JSON

View File

@@ -5,13 +5,13 @@ Revises: 99365b0cd87b
Create Date: 2023-11-02 18:55:17.019498 Create Date: 2023-11-02 18:55:17.019498
""" """
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from sqlalchemy.sql import table, column from alembic import op
from sqlalchemy import select from sqlalchemy import select
from sqlalchemy.sql import column, table
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = "9920ecfe2735" revision: str = "9920ecfe2735"

View File

@@ -8,8 +8,8 @@ Create Date: 2023-09-01 20:19:47.216334
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = "99365b0cd87b" revision: str = "99365b0cd87b"

View File

@@ -9,8 +9,6 @@ Create Date: 2025-07-15 20:09:40.253018
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = "a9c9c229ee36" revision: str = "a9c9c229ee36"

View File

@@ -5,30 +5,34 @@ Revises: 6ea59639f30e
Create Date: 2025-01-28 10:06:50.446233 Create Date: 2025-01-28 10:06:50.446233
""" """
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = 'b0e5f7876032' revision: str = "b0e5f7876032"
down_revision: Union[str, None] = '6ea59639f30e' down_revision: Union[str, None] = "6ea59639f30e"
branch_labels: Union[str, Sequence[str], None] = None branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None: def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('meeting', schema=None) as batch_op: with op.batch_alter_table("meeting", schema=None) as batch_op:
batch_op.add_column(sa.Column('is_active', sa.Boolean(), server_default=sa.text('1'), nullable=False)) batch_op.add_column(
sa.Column(
"is_active", sa.Boolean(), server_default=sa.text("1"), nullable=False
)
)
# ### end Alembic commands ### # ### end Alembic commands ###
def downgrade() -> None: def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('meeting', schema=None) as batch_op: with op.batch_alter_table("meeting", schema=None) as batch_op:
batch_op.drop_column('is_active') batch_op.drop_column("is_active")
# ### end Alembic commands ### # ### end Alembic commands ###

View File

@@ -8,9 +8,8 @@ Create Date: 2025-06-27 08:57:16.306940
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = "b3df9681cae9" revision: str = "b3df9681cae9"

View File

@@ -8,9 +8,8 @@ Create Date: 2024-10-11 13:45:28.914902
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = "b469348df210" revision: str = "b469348df210"

View File

@@ -5,15 +5,15 @@ Revises: d7fbb74b673b
Create Date: 2025-07-25 16:27:06.959868 Create Date: 2025-07-25 16:27:06.959868
""" """
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = 'b7df9609542c' revision: str = "b7df9609542c"
down_revision: Union[str, None] = 'd7fbb74b673b' down_revision: Union[str, None] = "d7fbb74b673b"
branch_labels: Union[str, Sequence[str], None] = None branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None
@@ -22,14 +22,14 @@ def upgrade() -> None:
# Create a partial unique index that ensures only one active meeting per room # Create a partial unique index that ensures only one active meeting per room
# This works for both PostgreSQL and SQLite # This works for both PostgreSQL and SQLite
op.create_index( op.create_index(
'idx_one_active_meeting_per_room', "idx_one_active_meeting_per_room",
'meeting', "meeting",
['room_id'], ["room_id"],
unique=True, unique=True,
postgresql_where=sa.text('is_active = true'), postgresql_where=sa.text("is_active = true"),
sqlite_where=sa.text('is_active = 1') sqlite_where=sa.text("is_active = 1"),
) )
def downgrade() -> None: def downgrade() -> None:
op.drop_index('idx_one_active_meeting_per_room', table_name='meeting') op.drop_index("idx_one_active_meeting_per_room", table_name="meeting")

View File

@@ -5,25 +5,31 @@ Revises: 125031f7cb78
Create Date: 2023-12-13 15:37:51.303970 Create Date: 2023-12-13 15:37:51.303970
""" """
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = 'b9348748bbbc' revision: str = "b9348748bbbc"
down_revision: Union[str, None] = '125031f7cb78' down_revision: Union[str, None] = "125031f7cb78"
branch_labels: Union[str, Sequence[str], None] = None branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None: def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.add_column('transcript', sa.Column('reviewed', sa.Boolean(), server_default=sa.text('0'), nullable=False)) op.add_column(
"transcript",
sa.Column(
"reviewed", sa.Boolean(), server_default=sa.text("0"), nullable=False
),
)
# ### end Alembic commands ### # ### end Alembic commands ###
def downgrade() -> None: def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.drop_column('transcript', 'reviewed') op.drop_column("transcript", "reviewed")
# ### end Alembic commands ### # ### end Alembic commands ###

View File

@@ -9,8 +9,6 @@ Create Date: 2025-07-15 11:48:42.854741
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = "ccd68dc784ff" revision: str = "ccd68dc784ff"

View File

@@ -8,9 +8,8 @@ Create Date: 2025-06-27 09:27:25.302152
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = "d3ff3a39297f" revision: str = "d3ff3a39297f"

View File

@@ -56,4 +56,4 @@ def downgrade() -> None:
op.drop_index("idx_transcript_room_id", "transcript") op.drop_index("idx_transcript_room_id", "transcript")
# Drop the room_id column # Drop the room_id column
op.drop_column("transcript", "room_id") op.drop_column("transcript", "room_id")

View File

@@ -5,11 +5,11 @@ Revises: 4814901632bc
Create Date: 2023-11-16 10:29:09.351664 Create Date: 2023-11-16 10:29:09.351664
""" """
from typing import Sequence, Union from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision: str = "f819277e5169" revision: str = "f819277e5169"

View File

@@ -1,12 +1,13 @@
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
import reflector.auth # noqa
import reflector.db # noqa
from fastapi import FastAPI from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from fastapi.routing import APIRoute from fastapi.routing import APIRoute
from fastapi_pagination import add_pagination from fastapi_pagination import add_pagination
from prometheus_fastapi_instrumentator import Instrumentator from prometheus_fastapi_instrumentator import Instrumentator
import reflector.auth # noqa
import reflector.db # noqa
from reflector.events import subscribers_shutdown, subscribers_startup from reflector.events import subscribers_shutdown, subscribers_startup
from reflector.logger import logger from reflector.logger import logger
from reflector.metrics import metrics_init from reflector.metrics import metrics_init

View File

@@ -1,7 +1,8 @@
from reflector.settings import settings
from reflector.logger import logger
import importlib import importlib
from reflector.logger import logger
from reflector.settings import settings
logger.info(f"User authentication using {settings.AUTH_BACKEND}") logger.info(f"User authentication using {settings.AUTH_BACKEND}")
module_name = f"reflector.auth.auth_{settings.AUTH_BACKEND}" module_name = f"reflector.auth.auth_{settings.AUTH_BACKEND}"
auth_module = importlib.import_module(module_name) auth_module = importlib.import_module(module_name)

View File

@@ -4,6 +4,7 @@ from fastapi import Depends, HTTPException
from fastapi.security import OAuth2PasswordBearer from fastapi.security import OAuth2PasswordBearer
from jose import JWTError, jwt from jose import JWTError, jwt
from pydantic import BaseModel from pydantic import BaseModel
from reflector.logger import logger from reflector.logger import logger
from reflector.settings import settings from reflector.settings import settings

View File

@@ -1,7 +1,8 @@
from pydantic import BaseModel
from typing import Annotated from typing import Annotated
from fastapi import Depends from fastapi import Depends
from fastapi.security import OAuth2PasswordBearer from fastapi.security import OAuth2PasswordBearer
from pydantic import BaseModel
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token", auto_error=False) oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token", auto_error=False)

View File

@@ -1,12 +1,12 @@
import argparse import argparse
import asyncio import asyncio
import signal import signal
from typing import NoReturn
from aiortc.contrib.signaling import add_signaling_arguments, create_signaling from aiortc.contrib.signaling import add_signaling_arguments, create_signaling
from reflector.logger import logger from reflector.logger import logger
from reflector.stream_client import StreamClient from reflector.stream_client import StreamClient
from typing import NoReturn
async def main() -> NoReturn: async def main() -> NoReturn:

View File

@@ -1,5 +1,6 @@
import databases import databases
import sqlalchemy import sqlalchemy
from reflector.events import subscribers_shutdown, subscribers_startup from reflector.events import subscribers_shutdown, subscribers_startup
from reflector.settings import settings from reflector.settings import settings

View File

@@ -4,6 +4,7 @@ from typing import Literal
import sqlalchemy as sa import sqlalchemy as sa
from fastapi import HTTPException from fastapi import HTTPException
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from reflector.db import database, metadata from reflector.db import database, metadata
from reflector.db.rooms import Room from reflector.db.rooms import Room
from reflector.utils import generate_uuid4 from reflector.utils import generate_uuid4

View File

@@ -3,6 +3,7 @@ from typing import Literal
import sqlalchemy as sa import sqlalchemy as sa
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from reflector.db import database, metadata from reflector.db import database, metadata
from reflector.utils import generate_uuid4 from reflector.utils import generate_uuid4

View File

@@ -5,9 +5,10 @@ from typing import Literal
import sqlalchemy import sqlalchemy
from fastapi import HTTPException from fastapi import HTTPException
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from sqlalchemy.sql import false, or_
from reflector.db import database, metadata from reflector.db import database, metadata
from reflector.utils import generate_uuid4 from reflector.utils import generate_uuid4
from sqlalchemy.sql import false, or_
rooms = sqlalchemy.Table( rooms = sqlalchemy.Table(
"room", "room",

View File

@@ -10,13 +10,14 @@ from typing import Any, Literal
import sqlalchemy import sqlalchemy
from fastapi import HTTPException from fastapi import HTTPException
from pydantic import BaseModel, ConfigDict, Field, field_serializer from pydantic import BaseModel, ConfigDict, Field, field_serializer
from sqlalchemy import Enum
from sqlalchemy.sql import false, or_
from reflector.db import database, metadata from reflector.db import database, metadata
from reflector.processors.types import Word as ProcessorWord from reflector.processors.types import Word as ProcessorWord
from reflector.settings import settings from reflector.settings import settings
from reflector.storage import get_transcripts_storage from reflector.storage import get_transcripts_storage
from reflector.utils import generate_uuid4 from reflector.utils import generate_uuid4
from sqlalchemy import Enum
from sqlalchemy.sql import false, or_
class SourceKind(enum.StrEnum): class SourceKind(enum.StrEnum):

View File

@@ -5,11 +5,12 @@ from typing import TypeVar
import nltk import nltk
from prometheus_client import Counter, Histogram from prometheus_client import Counter, Histogram
from transformers import GenerationConfig
from reflector.llm.llm_params import TaskParams from reflector.llm.llm_params import TaskParams
from reflector.logger import logger as reflector_logger from reflector.logger import logger as reflector_logger
from reflector.settings import settings from reflector.settings import settings
from reflector.utils.retry import retry from reflector.utils.retry import retry
from transformers import GenerationConfig
T = TypeVar("T", bound="LLM") T = TypeVar("T", bound="LLM")

View File

@@ -1,9 +1,10 @@
import httpx import httpx
from transformers import AutoTokenizer, GenerationConfig
from reflector.llm.base import LLM from reflector.llm.base import LLM
from reflector.logger import logger as reflector_logger from reflector.logger import logger as reflector_logger
from reflector.settings import settings from reflector.settings import settings
from reflector.utils.retry import retry from reflector.utils.retry import retry
from transformers import AutoTokenizer, GenerationConfig
class ModalLLM(LLM): class ModalLLM(LLM):

View File

@@ -1,5 +1,6 @@
import httpx import httpx
from transformers import AutoTokenizer from transformers import AutoTokenizer
from reflector.logger import logger from reflector.logger import logger

View File

@@ -16,8 +16,10 @@ import functools
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
import boto3 import boto3
from celery import chord, group, shared_task, current_task from celery import chord, current_task, group, shared_task
from pydantic import BaseModel from pydantic import BaseModel
from structlog import BoundLogger as Logger
from reflector.db.meetings import meeting_consent_controller, meetings_controller from reflector.db.meetings import meeting_consent_controller, meetings_controller
from reflector.db.recordings import recordings_controller from reflector.db.recordings import recordings_controller
from reflector.db.rooms import rooms_controller from reflector.db.rooms import rooms_controller
@@ -61,7 +63,6 @@ from reflector.zulip import (
send_message_to_zulip, send_message_to_zulip,
update_zulip_message, update_zulip_message,
) )
from structlog import BoundLogger as Logger
def asynctask(f): def asynctask(f):

View File

@@ -18,6 +18,7 @@ During its lifecycle, it will emit the following status:
import asyncio import asyncio
from pydantic import BaseModel, ConfigDict from pydantic import BaseModel, ConfigDict
from reflector.logger import logger from reflector.logger import logger
from reflector.processors import Pipeline from reflector.processors import Pipeline

View File

@@ -1,6 +1,7 @@
from reflector.processors.base import Processor
import av import av
from reflector.processors.base import Processor
class AudioChunkerProcessor(Processor): class AudioChunkerProcessor(Processor):
""" """

View File

@@ -1,4 +1,5 @@
import httpx import httpx
from reflector.processors.audio_diarization import AudioDiarizationProcessor from reflector.processors.audio_diarization import AudioDiarizationProcessor
from reflector.processors.audio_diarization_auto import AudioDiarizationAutoProcessor from reflector.processors.audio_diarization_auto import AudioDiarizationAutoProcessor
from reflector.processors.types import AudioDiarizationInput, TitleSummary from reflector.processors.types import AudioDiarizationInput, TitleSummary

View File

@@ -1,6 +1,7 @@
from pathlib import Path from pathlib import Path
import av import av
from reflector.processors.base import Processor from reflector.processors.base import Processor

View File

@@ -1,10 +1,12 @@
from reflector.processors.base import Processor import io
from reflector.processors.types import AudioFile
from time import monotonic_ns from time import monotonic_ns
from uuid import uuid4 from uuid import uuid4
import io
import av import av
from reflector.processors.base import Processor
from reflector.processors.types import AudioFile
class AudioMergeProcessor(Processor): class AudioMergeProcessor(Processor):
""" """

View File

@@ -1,4 +1,5 @@
from prometheus_client import Counter, Histogram from prometheus_client import Counter, Histogram
from reflector.processors.base import Processor from reflector.processors.base import Processor
from reflector.processors.types import AudioFile, Transcript from reflector.processors.types import AudioFile, Transcript

View File

@@ -13,6 +13,7 @@ API will be a POST request to TRANSCRIPT_URL:
""" """
from openai import AsyncOpenAI from openai import AsyncOpenAI
from reflector.processors.audio_transcript import AudioTranscriptProcessor from reflector.processors.audio_transcript import AudioTranscriptProcessor
from reflector.processors.audio_transcript_auto import AudioTranscriptAutoProcessor from reflector.processors.audio_transcript_auto import AudioTranscriptAutoProcessor
from reflector.processors.types import AudioFile, Transcript, Word from reflector.processors.types import AudioFile, Transcript, Word

View File

@@ -1,4 +1,5 @@
from faster_whisper import WhisperModel from faster_whisper import WhisperModel
from reflector.processors.audio_transcript import AudioTranscriptProcessor from reflector.processors.audio_transcript import AudioTranscriptProcessor
from reflector.processors.audio_transcript_auto import AudioTranscriptAutoProcessor from reflector.processors.audio_transcript_auto import AudioTranscriptAutoProcessor
from reflector.processors.types import AudioFile, Transcript, Word from reflector.processors.types import AudioFile, Transcript, Word

View File

@@ -5,6 +5,7 @@ from uuid import uuid4
from prometheus_client import Counter, Gauge, Histogram from prometheus_client import Counter, Gauge, Histogram
from pydantic import BaseModel from pydantic import BaseModel
from reflector.logger import logger from reflector.logger import logger

View File

@@ -18,6 +18,7 @@ from llama_index.core.program import LLMTextCompletionProgram
from llama_index.core.response_synthesizers import TreeSummarize from llama_index.core.response_synthesizers import TreeSummarize
from llama_index.llms.openai_like import OpenAILike from llama_index.llms.openai_like import OpenAILike
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from reflector.llm.base import LLM from reflector.llm.base import LLM
from reflector.llm.openai_llm import OpenAILLM from reflector.llm.openai_llm import OpenAILLM
from reflector.settings import settings from reflector.settings import settings

View File

@@ -1,4 +1,5 @@
import httpx import httpx
from reflector.processors.base import Processor from reflector.processors.base import Processor
from reflector.processors.types import Transcript, TranslationLanguages from reflector.processors.types import Transcript, TranslationLanguages
from reflector.settings import settings from reflector.settings import settings

View File

@@ -5,6 +5,7 @@ from pathlib import Path
from profanityfilter import ProfanityFilter from profanityfilter import ProfanityFilter
from pydantic import BaseModel, PrivateAttr from pydantic import BaseModel, PrivateAttr
from reflector.redis_cache import redis_cache from reflector.redis_cache import redis_cache
PUNC_RE = re.compile(r"[.;:?!…]") PUNC_RE = re.compile(r"[.;:?!…]")

View File

@@ -2,6 +2,7 @@ import functools
import json import json
import redis import redis
from reflector.settings import settings from reflector.settings import settings
redis_clients = {} redis_clients = {}

View File

@@ -1,6 +1,7 @@
import importlib import importlib
from pydantic import BaseModel from pydantic import BaseModel
from reflector.settings import settings from reflector.settings import settings

View File

@@ -1,4 +1,5 @@
import aioboto3 import aioboto3
from reflector.logger import logger from reflector.logger import logger
from reflector.storage.base import FileResult, Storage from reflector.storage.base import FileResult, Storage

View File

@@ -7,6 +7,7 @@ import httpx
import stamina import stamina
from aiortc import RTCPeerConnection, RTCSessionDescription from aiortc import RTCPeerConnection, RTCSessionDescription
from aiortc.contrib.media import MediaPlayer, MediaRelay from aiortc.contrib.media import MediaPlayer, MediaRelay
from reflector.logger import logger from reflector.logger import logger

View File

@@ -1,6 +1,7 @@
import asyncio import asyncio
import av import av
from reflector.logger import logger from reflector.logger import logger
from reflector.processors import ( from reflector.processors import (
AudioChunkerProcessor, AudioChunkerProcessor,

View File

@@ -9,17 +9,18 @@ This tool processes audio files locally without requiring the full server infras
import asyncio import asyncio
import tempfile import tempfile
import uuid
from pathlib import Path from pathlib import Path
from typing import List from typing import List
import uuid
import av import av
from reflector.logger import logger from reflector.logger import logger
from reflector.processors import ( from reflector.processors import (
AudioChunkerProcessor, AudioChunkerProcessor,
AudioFileWriterProcessor,
AudioMergeProcessor, AudioMergeProcessor,
AudioTranscriptAutoProcessor, AudioTranscriptAutoProcessor,
AudioFileWriterProcessor,
Pipeline, Pipeline,
PipelineEvent, PipelineEvent,
TranscriptFinalSummaryProcessor, TranscriptFinalSummaryProcessor,
@@ -155,9 +156,10 @@ async def process_audio_file_with_diarization(
# For Modal backend, we need to upload the file to S3 first # For Modal backend, we need to upload the file to S3 first
if diarization_backend == "modal": if diarization_backend == "modal":
from datetime import datetime
from reflector.storage import get_transcripts_storage from reflector.storage import get_transcripts_storage
from reflector.utils.s3_temp_file import S3TemporaryFile from reflector.utils.s3_temp_file import S3TemporaryFile
from datetime import datetime
storage = get_transcripts_storage() storage = get_transcripts_storage()

View File

@@ -8,7 +8,6 @@ This script helps test the diarization functionality with sample audio files.
""" """
import asyncio import asyncio
import json
import sys import sys
from pathlib import Path from pathlib import Path
@@ -17,23 +16,20 @@ from reflector.logger import logger
async def test_diarization(audio_file: str): async def test_diarization(audio_file: str):
"""Test the diarization functionality""" """Test the diarization functionality"""
# Import the processing function # Import the processing function
from process_with_diarization import process_audio_file_with_diarization from process_with_diarization import process_audio_file_with_diarization
# Collect events # Collect events
events = [] events = []
async def event_callback(event): async def event_callback(event):
events.append({ events.append({"processor": event.processor, "data": event.data})
"processor": event.processor,
"data": event.data
})
logger.info(f"Event from {event.processor}") logger.info(f"Event from {event.processor}")
# Process the audio file # Process the audio file
logger.info(f"Processing audio file: {audio_file}") logger.info(f"Processing audio file: {audio_file}")
try: try:
await process_audio_file_with_diarization( await process_audio_file_with_diarization(
audio_file, audio_file,
@@ -44,10 +40,10 @@ async def test_diarization(audio_file: str):
enable_diarization=True, enable_diarization=True,
diarization_backend="modal", diarization_backend="modal",
) )
# Analyze results # Analyze results
logger.info(f"Processing complete. Received {len(events)} events") logger.info(f"Processing complete. Received {len(events)} events")
# Look for diarization results # Look for diarization results
diarized_topics = [] diarized_topics = []
for event in events: for event in events:
@@ -56,13 +52,17 @@ async def test_diarization(audio_file: str):
if hasattr(event["data"], "transcript") and event["data"].transcript: if hasattr(event["data"], "transcript") and event["data"].transcript:
words = event["data"].transcript.words words = event["data"].transcript.words
if words and hasattr(words[0], "speaker"): if words and hasattr(words[0], "speaker"):
speakers = set(w.speaker for w in words if hasattr(w, "speaker")) speakers = set(
logger.info(f"Found {len(speakers)} speakers in topic: {event['data'].title}") w.speaker for w in words if hasattr(w, "speaker")
)
logger.info(
f"Found {len(speakers)} speakers in topic: {event['data'].title}"
)
diarized_topics.append(event["data"]) diarized_topics.append(event["data"])
if diarized_topics: if diarized_topics:
logger.info(f"Successfully diarized {len(diarized_topics)} topics") logger.info(f"Successfully diarized {len(diarized_topics)} topics")
# Print sample output # Print sample output
sample_topic = diarized_topics[0] sample_topic = diarized_topics[0]
logger.info("Sample diarized output:") logger.info("Sample diarized output:")
@@ -70,9 +70,9 @@ async def test_diarization(audio_file: str):
logger.info(f" Word {i}: '{word.text}' - Speaker {word.speaker}") logger.info(f" Word {i}: '{word.text}' - Speaker {word.speaker}")
else: else:
logger.warning("No diarization results found in output") logger.warning("No diarization results found in output")
return events return events
except Exception as e: except Exception as e:
logger.error(f"Error during processing: {e}") logger.error(f"Error during processing: {e}")
raise raise
@@ -82,15 +82,15 @@ def main():
if len(sys.argv) < 2: if len(sys.argv) < 2:
print("Usage: python test_diarization.py <audio_file>") print("Usage: python test_diarization.py <audio_file>")
sys.exit(1) sys.exit(1)
audio_file = sys.argv[1] audio_file = sys.argv[1]
if not Path(audio_file).exists(): if not Path(audio_file).exists():
print(f"Error: Audio file '{audio_file}' not found") print(f"Error: Audio file '{audio_file}' not found")
sys.exit(1) sys.exit(1)
# Run the test # Run the test
asyncio.run(test_diarization(audio_file)) asyncio.run(test_diarization(audio_file))
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@@ -1,8 +1,10 @@
from reflector.logger import logger
from time import monotonic
from httpx import HTTPStatusError, Response
from random import random
import asyncio import asyncio
from random import random
from time import monotonic
from httpx import HTTPStatusError, Response
from reflector.logger import logger
class RetryException(Exception): class RetryException(Exception):

View File

@@ -6,8 +6,9 @@ Provides automatic cleanup of S3 files with retry logic and proper error handlin
""" """
from typing import Optional from typing import Optional
from reflector.storage.base import Storage
from reflector.logger import logger from reflector.logger import logger
from reflector.storage.base import Storage
from reflector.utils.retry import retry from reflector.utils.retry import retry

View File

@@ -1,10 +1,10 @@
from datetime import datetime from datetime import datetime
from typing import Annotated, Optional from typing import Annotated, Optional
import reflector.auth as auth from fastapi import APIRouter, Depends, HTTPException, Request
from fastapi import APIRouter, HTTPException, Request, Depends
from pydantic import BaseModel from pydantic import BaseModel
import reflector.auth as auth
from reflector.db.meetings import ( from reflector.db.meetings import (
MeetingConsent, MeetingConsent,
meeting_consent_controller, meeting_consent_controller,

View File

@@ -1,19 +1,20 @@
from datetime import datetime, timedelta
from typing import Annotated, Optional, Literal
import logging import logging
import sqlite3
from datetime import datetime, timedelta
from typing import Annotated, Literal, Optional
import reflector.auth as auth import asyncpg.exceptions
from fastapi import APIRouter, Depends, HTTPException from fastapi import APIRouter, Depends, HTTPException
from fastapi_pagination import Page from fastapi_pagination import Page
from fastapi_pagination.ext.databases import paginate from fastapi_pagination.ext.databases import paginate
from pydantic import BaseModel from pydantic import BaseModel
import reflector.auth as auth
from reflector.db import database from reflector.db import database
from reflector.db.meetings import meetings_controller from reflector.db.meetings import meetings_controller
from reflector.db.rooms import rooms_controller from reflector.db.rooms import rooms_controller
from reflector.settings import settings from reflector.settings import settings
from reflector.whereby import create_meeting, upload_logo from reflector.whereby import create_meeting, upload_logo
import asyncpg.exceptions
import sqlite3
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -6,6 +6,7 @@ from aiortc import MediaStreamTrack, RTCPeerConnection, RTCSessionDescription
from fastapi import APIRouter, Request from fastapi import APIRouter, Request
from prometheus_client import Gauge from prometheus_client import Gauge
from pydantic import BaseModel from pydantic import BaseModel
from reflector.events import subscribers_shutdown from reflector.events import subscribers_shutdown
from reflector.logger import logger from reflector.logger import logger
from reflector.pipelines.runner import PipelineRunner from reflector.pipelines.runner import PipelineRunner

View File

@@ -1,12 +1,13 @@
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from typing import Annotated, Literal, Optional from typing import Annotated, Literal, Optional
import reflector.auth as auth
from fastapi import APIRouter, Depends, HTTPException from fastapi import APIRouter, Depends, HTTPException
from fastapi_pagination import Page from fastapi_pagination import Page
from fastapi_pagination.ext.databases import paginate from fastapi_pagination.ext.databases import paginate
from jose import jwt from jose import jwt
from pydantic import BaseModel, Field, field_serializer from pydantic import BaseModel, Field, field_serializer
import reflector.auth as auth
from reflector.db.meetings import meetings_controller from reflector.db.meetings import meetings_controller
from reflector.db.rooms import rooms_controller from reflector.db.rooms import rooms_controller
from reflector.db.transcripts import ( from reflector.db.transcripts import (

View File

@@ -7,9 +7,10 @@ Transcripts audio related endpoints
from typing import Annotated, Optional from typing import Annotated, Optional
import httpx import httpx
import reflector.auth as auth
from fastapi import APIRouter, Depends, HTTPException, Request, Response, status from fastapi import APIRouter, Depends, HTTPException, Request, Response, status
from jose import jwt from jose import jwt
import reflector.auth as auth
from reflector.db.transcripts import AudioWaveform, transcripts_controller from reflector.db.transcripts import AudioWaveform, transcripts_controller
from reflector.settings import settings from reflector.settings import settings
from reflector.views.transcripts import ALGORITHM from reflector.views.transcripts import ALGORITHM

View File

@@ -6,9 +6,10 @@ Transcript participants API endpoints
from typing import Annotated, Optional from typing import Annotated, Optional
import reflector.auth as auth
from fastapi import APIRouter, Depends, HTTPException from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel, ConfigDict, Field from pydantic import BaseModel, ConfigDict, Field
import reflector.auth as auth
from reflector.db.transcripts import TranscriptParticipant, transcripts_controller from reflector.db.transcripts import TranscriptParticipant, transcripts_controller
from reflector.views.types import DeletionStatus from reflector.views.types import DeletionStatus

View File

@@ -1,9 +1,10 @@
from typing import Annotated, Optional from typing import Annotated, Optional
import celery import celery
import reflector.auth as auth
from fastapi import APIRouter, Depends, HTTPException from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel from pydantic import BaseModel
import reflector.auth as auth
from reflector.db.transcripts import transcripts_controller from reflector.db.transcripts import transcripts_controller
from reflector.pipelines.main_live_pipeline import task_pipeline_process from reflector.pipelines.main_live_pipeline import task_pipeline_process

View File

@@ -6,9 +6,10 @@ Reassign speakers in a transcript
from typing import Annotated, Optional from typing import Annotated, Optional
import reflector.auth as auth
from fastapi import APIRouter, Depends, HTTPException from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
import reflector.auth as auth
from reflector.db.transcripts import transcripts_controller from reflector.db.transcripts import transcripts_controller
router = APIRouter() router = APIRouter()

View File

@@ -1,9 +1,10 @@
from typing import Annotated, Optional from typing import Annotated, Optional
import av import av
import reflector.auth as auth
from fastapi import APIRouter, Depends, HTTPException, UploadFile from fastapi import APIRouter, Depends, HTTPException, UploadFile
from pydantic import BaseModel from pydantic import BaseModel
import reflector.auth as auth
from reflector.db.transcripts import transcripts_controller from reflector.db.transcripts import transcripts_controller
from reflector.pipelines.main_live_pipeline import task_pipeline_process from reflector.pipelines.main_live_pipeline import task_pipeline_process

View File

@@ -1,7 +1,8 @@
from typing import Annotated, Optional from typing import Annotated, Optional
import reflector.auth as auth
from fastapi import APIRouter, Depends, HTTPException, Request from fastapi import APIRouter, Depends, HTTPException, Request
import reflector.auth as auth
from reflector.db.transcripts import transcripts_controller from reflector.db.transcripts import transcripts_controller
from .rtc_offer import RtcOffer, rtc_offer_base from .rtc_offer import RtcOffer, rtc_offer_base

View File

@@ -5,6 +5,7 @@ Transcripts websocket API
""" """
from fastapi import APIRouter, HTTPException, WebSocket, WebSocketDisconnect from fastapi import APIRouter, HTTPException, WebSocket, WebSocketDisconnect
from reflector.db.transcripts import transcripts_controller from reflector.db.transcripts import transcripts_controller
from reflector.ws_manager import get_ws_manager from reflector.ws_manager import get_ws_manager

View File

@@ -1,9 +1,10 @@
from typing import Annotated, Optional from typing import Annotated, Optional
import reflector.auth as auth
from fastapi import APIRouter, Depends from fastapi import APIRouter, Depends
from pydantic import BaseModel from pydantic import BaseModel
import reflector.auth as auth
router = APIRouter() router = APIRouter()

View File

@@ -7,6 +7,7 @@ from hashlib import sha256
from fastapi import APIRouter, HTTPException, Request from fastapi import APIRouter, HTTPException, Request
from pydantic import BaseModel from pydantic import BaseModel
from reflector.db.meetings import meetings_controller from reflector.db.meetings import meetings_controller
from reflector.settings import settings from reflector.settings import settings

View File

@@ -1,8 +1,9 @@
from typing import Annotated, Optional from typing import Annotated, Optional
import reflector.auth as auth
from fastapi import APIRouter, Depends, HTTPException from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel from pydantic import BaseModel
import reflector.auth as auth
from reflector.zulip import get_zulip_streams, get_zulip_topics from reflector.zulip import get_zulip_streams, get_zulip_topics
router = APIRouter() router = APIRouter()

View File

@@ -1,6 +1,7 @@
from datetime import datetime from datetime import datetime
import httpx import httpx
from reflector.db.rooms import Room from reflector.db.rooms import Room
from reflector.settings import settings from reflector.settings import settings

View File

@@ -2,6 +2,7 @@ import celery
import structlog import structlog
from celery import Celery from celery import Celery
from celery.schedules import crontab from celery.schedules import crontab
from reflector.settings import settings from reflector.settings import settings
logger = structlog.get_logger(__name__) logger = structlog.get_logger(__name__)

View File

@@ -1,6 +1,7 @@
import httpx import httpx
import structlog import structlog
from celery import shared_task from celery import shared_task
from reflector.settings import settings from reflector.settings import settings
logger = structlog.get_logger(__name__) logger = structlog.get_logger(__name__)

View File

@@ -9,6 +9,7 @@ import structlog
from celery import shared_task from celery import shared_task
from celery.utils.log import get_task_logger from celery.utils.log import get_task_logger
from pydantic import ValidationError from pydantic import ValidationError
from reflector.db.meetings import meetings_controller from reflector.db.meetings import meetings_controller
from reflector.db.recordings import Recording, recordings_controller from reflector.db.recordings import Recording, recordings_controller
from reflector.db.rooms import rooms_controller from reflector.db.rooms import rooms_controller

View File

@@ -15,6 +15,7 @@ import threading
import redis.asyncio as redis import redis.asyncio as redis
from fastapi import WebSocket from fastapi import WebSocket
from reflector.settings import settings from reflector.settings import settings

View File

@@ -2,6 +2,7 @@ from datetime import timedelta
from urllib.parse import urlparse from urllib.parse import urlparse
import httpx import httpx
from reflector.db.transcripts import Transcript from reflector.db.transcripts import Transcript
from reflector.settings import settings from reflector.settings import settings

View File

@@ -1,5 +1,5 @@
from unittest.mock import patch
from tempfile import NamedTemporaryFile from tempfile import NamedTemporaryFile
from unittest.mock import patch
import pytest import pytest
@@ -178,14 +178,16 @@ def fake_mp3_upload():
@pytest.fixture @pytest.fixture
async def fake_transcript_with_topics(tmpdir): async def fake_transcript_with_topics(tmpdir):
from reflector.settings import settings import shutil
from pathlib import Path
from httpx import AsyncClient
from reflector.app import app from reflector.app import app
from reflector.views.transcripts import transcripts_controller
from reflector.db.transcripts import TranscriptTopic from reflector.db.transcripts import TranscriptTopic
from reflector.processors.types import Word from reflector.processors.types import Word
from pathlib import Path from reflector.settings import settings
from httpx import AsyncClient from reflector.views.transcripts import transcripts_controller
import shutil
settings.DATA_DIR = Path(tmpdir) settings.DATA_DIR = Path(tmpdir)

View File

@@ -1,6 +1,7 @@
import pytest
from unittest import mock from unittest import mock
import pytest
@pytest.mark.parametrize( @pytest.mark.parametrize(
"name,diarization,expected", "name,diarization,expected",
@@ -87,10 +88,10 @@ from unittest import mock
async def test_processors_audio_diarization(name, diarization, expected): async def test_processors_audio_diarization(name, diarization, expected):
from reflector.processors.audio_diarization import AudioDiarizationProcessor from reflector.processors.audio_diarization import AudioDiarizationProcessor
from reflector.processors.types import ( from reflector.processors.types import (
AudioDiarizationInput,
TitleSummaryWithId, TitleSummaryWithId,
Transcript, Transcript,
Word, Word,
AudioDiarizationInput,
) )
# create fake topic # create fake topic

View File

@@ -3,7 +3,7 @@ import pytest
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_processor_broadcast(nltk): async def test_processor_broadcast(nltk):
from reflector.processors.base import Processor, BroadcastProcessor, Pipeline from reflector.processors.base import BroadcastProcessor, Pipeline, Processor
class TestProcessor(Processor): class TestProcessor(Processor):
INPUT_TYPE = str INPUT_TYPE = str

View File

@@ -11,10 +11,11 @@ async def test_basic_process(
): ):
# goal is to start the server, and send rtc audio to it # goal is to start the server, and send rtc audio to it
# validate the events received # validate the events received
from reflector.tools.process import process_audio_file
from reflector.settings import settings
from pathlib import Path from pathlib import Path
from reflector.settings import settings
from reflector.tools.process import process_audio_file
# use an LLM test backend # use an LLM test backend
settings.LLM_BACKEND = "test" settings.LLM_BACKEND = "test"
settings.TRANSCRIPT_BACKEND = "whisper" settings.TRANSCRIPT_BACKEND = "whisper"

View File

@@ -3,8 +3,10 @@
Tests for S3 temporary file context manager. Tests for S3 temporary file context manager.
""" """
from unittest.mock import AsyncMock, Mock
import pytest import pytest
from unittest.mock import Mock, AsyncMock
from reflector.utils.s3_temp_file import S3TemporaryFile from reflector.utils.s3_temp_file import S3TemporaryFile

View File

@@ -1,13 +1,14 @@
import pytest
import shutil import shutil
from httpx import AsyncClient
from pathlib import Path from pathlib import Path
import pytest
from httpx import AsyncClient
@pytest.fixture @pytest.fixture
async def fake_transcript(tmpdir): async def fake_transcript(tmpdir):
from reflector.settings import settings
from reflector.app import app from reflector.app import app
from reflector.settings import settings
from reflector.views.transcripts import transcripts_controller from reflector.views.transcripts import transcripts_controller
settings.DATA_DIR = Path(tmpdir) settings.DATA_DIR = Path(tmpdir)

View File

@@ -33,8 +33,8 @@ class ThreadedUvicorn:
@pytest.fixture @pytest.fixture
async def appserver(tmpdir, setup_database, celery_session_app, celery_session_worker): async def appserver(tmpdir, setup_database, celery_session_app, celery_session_worker):
from reflector.settings import settings
from reflector.app import app from reflector.app import app
from reflector.settings import settings
DATA_DIR = settings.DATA_DIR DATA_DIR = settings.DATA_DIR
settings.DATA_DIR = Path(tmpdir) settings.DATA_DIR = Path(tmpdir)
@@ -110,9 +110,11 @@ async def test_transcript_rtc_and_websocket(
# create stream client # create stream client
import argparse import argparse
from reflector.stream_client import StreamClient
from aiortc.contrib.signaling import add_signaling_arguments, create_signaling from aiortc.contrib.signaling import add_signaling_arguments, create_signaling
from reflector.stream_client import StreamClient
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
add_signaling_arguments(parser) add_signaling_arguments(parser)
args = parser.parse_args(["-s", "tcp-socket"]) args = parser.parse_args(["-s", "tcp-socket"])
@@ -270,9 +272,11 @@ async def test_transcript_rtc_and_websocket_and_fr(
# create stream client # create stream client
import argparse import argparse
from reflector.stream_client import StreamClient
from aiortc.contrib.signaling import add_signaling_arguments, create_signaling from aiortc.contrib.signaling import add_signaling_arguments, create_signaling
from reflector.stream_client import StreamClient
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
add_signaling_arguments(parser) add_signaling_arguments(parser)
args = parser.parse_args(["-s", "tcp-socket"]) args = parser.parse_args(["-s", "tcp-socket"])