feat: full backend (untested)
This commit is contained in:
12
backend/.dockerignore
Normal file
12
backend/.dockerignore
Normal file
@@ -0,0 +1,12 @@
|
||||
__pycache__
|
||||
*.py[cod]
|
||||
.git
|
||||
.gitignore
|
||||
.env
|
||||
.venv
|
||||
venv/
|
||||
.uv/
|
||||
.pytest_cache/
|
||||
.ruff_cache/
|
||||
.mypy_cache/
|
||||
tests/
|
||||
13
backend/.gitignore
vendored
Normal file
13
backend/.gitignore
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
.Python
|
||||
*.so
|
||||
.env
|
||||
.venv
|
||||
venv/
|
||||
.uv/
|
||||
*.egg-info/
|
||||
.pytest_cache/
|
||||
.ruff_cache/
|
||||
.mypy_cache/
|
||||
18
backend/Dockerfile
Normal file
18
backend/Dockerfile
Normal file
@@ -0,0 +1,18 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN pip install uv
|
||||
|
||||
COPY pyproject.toml .
|
||||
RUN uv sync --frozen --no-dev 2>/dev/null || uv sync --no-dev
|
||||
|
||||
COPY alembic.ini .
|
||||
COPY alembic/ alembic/
|
||||
COPY src/ src/
|
||||
|
||||
ENV PYTHONPATH=/app/src
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["uv", "run", "uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
38
backend/alembic.ini
Normal file
38
backend/alembic.ini
Normal file
@@ -0,0 +1,38 @@
|
||||
[alembic]
|
||||
script_location = alembic
|
||||
prepend_sys_path = src
|
||||
sqlalchemy.url = postgresql://postgres:postgres@db:5432/availability
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
43
backend/alembic/env.py
Normal file
43
backend/alembic/env.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
from app.models import Base
|
||||
|
||||
config = context.config
|
||||
fileConfig(config.config_file_name)
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
25
backend/alembic/script.py.mako
Normal file
25
backend/alembic/script.py.mako
Normal file
@@ -0,0 +1,25 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
48
backend/alembic/versions/001_initial.py
Normal file
48
backend/alembic/versions/001_initial.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""Initial migration
|
||||
|
||||
Revision ID: 001
|
||||
Revises:
|
||||
Create Date: 2024-01-08
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
revision: str = "001"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"participants",
|
||||
sa.Column("id", sa.UUID(), nullable=False),
|
||||
sa.Column("name", sa.String(255), nullable=False),
|
||||
sa.Column("email", sa.String(255), nullable=False),
|
||||
sa.Column("ics_url", sa.Text(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("email"),
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
"busy_blocks",
|
||||
sa.Column("id", sa.UUID(), nullable=False),
|
||||
sa.Column("participant_id", sa.UUID(), nullable=False),
|
||||
sa.Column("start_time", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("end_time", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index(
|
||||
"ix_busy_blocks_participant_id", "busy_blocks", ["participant_id"]
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index("ix_busy_blocks_participant_id", table_name="busy_blocks")
|
||||
op.drop_table("busy_blocks")
|
||||
op.drop_table("participants")
|
||||
36
backend/pyproject.toml
Normal file
36
backend/pyproject.toml
Normal file
@@ -0,0 +1,36 @@
|
||||
[project]
|
||||
name = "common-availability"
|
||||
version = "0.1.0"
|
||||
description = "Calendar availability coordination service"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"fastapi>=0.115.0",
|
||||
"uvicorn[standard]>=0.32.0",
|
||||
"sqlalchemy>=2.0.0",
|
||||
"alembic>=1.14.0",
|
||||
"asyncpg>=0.30.0",
|
||||
"psycopg2-binary>=2.9.0",
|
||||
"httpx>=0.28.0",
|
||||
"icalendar>=6.0.0",
|
||||
"python-dateutil>=2.9.0",
|
||||
"pydantic[email]>=2.10.0",
|
||||
"pydantic-settings>=2.6.0",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"pytest>=8.3.0",
|
||||
"pytest-asyncio>=0.24.0",
|
||||
"ruff>=0.8.0",
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 88
|
||||
target-version = "py312"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = ["E", "F", "I", "N", "W"]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_mode = "auto"
|
||||
asyncio_default_fixture_loop_scope = "function"
|
||||
0
backend/src/app/__init__.py
Normal file
0
backend/src/app/__init__.py
Normal file
105
backend/src/app/availability_service.py
Normal file
105
backend/src/app/availability_service.py
Normal file
@@ -0,0 +1,105 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from uuid import UUID
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models import BusyBlock, Participant
|
||||
|
||||
|
||||
def get_week_boundaries(reference_date: datetime | None = None) -> tuple[datetime, datetime]:
|
||||
if reference_date is None:
|
||||
reference_date = datetime.now(timezone.utc)
|
||||
|
||||
days_since_monday = reference_date.weekday()
|
||||
monday = reference_date - timedelta(days=days_since_monday)
|
||||
monday = monday.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
friday = monday + timedelta(days=4)
|
||||
friday = friday.replace(hour=23, minute=59, second=59, microsecond=999999)
|
||||
|
||||
return monday, friday
|
||||
|
||||
|
||||
async def get_busy_blocks_for_participants(
|
||||
db: AsyncSession,
|
||||
participant_ids: list[UUID],
|
||||
start_time: datetime,
|
||||
end_time: datetime,
|
||||
) -> dict[UUID, list[tuple[datetime, datetime]]]:
|
||||
stmt = select(BusyBlock).where(
|
||||
BusyBlock.participant_id.in_(participant_ids),
|
||||
BusyBlock.start_time < end_time,
|
||||
BusyBlock.end_time > start_time,
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
blocks = result.scalars().all()
|
||||
|
||||
busy_map: dict[UUID, list[tuple[datetime, datetime]]] = {
|
||||
pid: [] for pid in participant_ids
|
||||
}
|
||||
for block in blocks:
|
||||
busy_map[block.participant_id].append((block.start_time, block.end_time))
|
||||
|
||||
return busy_map
|
||||
|
||||
|
||||
def is_participant_free(
|
||||
busy_blocks: list[tuple[datetime, datetime]],
|
||||
slot_start: datetime,
|
||||
slot_end: datetime,
|
||||
) -> bool:
|
||||
for block_start, block_end in busy_blocks:
|
||||
if block_start < slot_end and block_end > slot_start:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
async def calculate_availability(
|
||||
db: AsyncSession,
|
||||
participant_ids: list[UUID],
|
||||
reference_date: datetime | None = None,
|
||||
) -> list[dict]:
|
||||
week_start, week_end = get_week_boundaries(reference_date)
|
||||
busy_map = await get_busy_blocks_for_participants(
|
||||
db, participant_ids, week_start, week_end
|
||||
)
|
||||
|
||||
participants_stmt = select(Participant).where(Participant.id.in_(participant_ids))
|
||||
participants_result = await db.execute(participants_stmt)
|
||||
participants = {p.id: p for p in participants_result.scalars().all()}
|
||||
|
||||
days = ["Mon", "Tue", "Wed", "Thu", "Fri"]
|
||||
hours = list(range(9, 18))
|
||||
slots = []
|
||||
|
||||
for day_offset, day_name in enumerate(days):
|
||||
for hour in hours:
|
||||
slot_start = week_start + timedelta(days=day_offset, hours=hour)
|
||||
slot_end = slot_start + timedelta(hours=1)
|
||||
|
||||
available_participants = []
|
||||
for pid in participant_ids:
|
||||
if is_participant_free(busy_map.get(pid, []), slot_start, slot_end):
|
||||
participant = participants.get(pid)
|
||||
if participant:
|
||||
available_participants.append(participant.name)
|
||||
|
||||
total = len(participant_ids)
|
||||
available_count = len(available_participants)
|
||||
|
||||
if available_count == total:
|
||||
availability = "full"
|
||||
elif available_count > 0:
|
||||
availability = "partial"
|
||||
else:
|
||||
availability = "none"
|
||||
|
||||
slots.append({
|
||||
"day": day_name,
|
||||
"hour": hour,
|
||||
"availability": availability,
|
||||
"availableParticipants": available_participants,
|
||||
})
|
||||
|
||||
return slots
|
||||
13
backend/src/app/config.py
Normal file
13
backend/src/app/config.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
database_url: str = "postgresql+asyncpg://postgres:postgres@db:5432/availability"
|
||||
sync_database_url: str = "postgresql://postgres:postgres@db:5432/availability"
|
||||
ics_refresh_interval_minutes: int = 15
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
|
||||
|
||||
settings = Settings()
|
||||
13
backend/src/app/database.py
Normal file
13
backend/src/app/database.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from collections.abc import AsyncGenerator
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
|
||||
from app.config import settings
|
||||
|
||||
engine = create_async_engine(settings.database_url, echo=False)
|
||||
async_session_maker = async_sessionmaker(engine, expire_on_commit=False)
|
||||
|
||||
|
||||
async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
||||
async with async_session_maker() as session:
|
||||
yield session
|
||||
88
backend/src/app/ics_service.py
Normal file
88
backend/src/app/ics_service.py
Normal file
@@ -0,0 +1,88 @@
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import httpx
|
||||
from icalendar import Calendar
|
||||
from sqlalchemy import delete
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models import BusyBlock, Participant
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def fetch_ics_content(url: str) -> str:
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
response = await client.get(url)
|
||||
response.raise_for_status()
|
||||
return response.text
|
||||
|
||||
|
||||
def parse_ics_to_busy_blocks(
|
||||
ics_content: str, participant_id: str
|
||||
) -> list[BusyBlock]:
|
||||
calendar = Calendar.from_ical(ics_content)
|
||||
blocks = []
|
||||
|
||||
for component in calendar.walk():
|
||||
if component.name == "VEVENT":
|
||||
dtstart = component.get("dtstart")
|
||||
dtend = component.get("dtend")
|
||||
|
||||
if dtstart is None or dtend is None:
|
||||
continue
|
||||
|
||||
start_dt = dtstart.dt
|
||||
end_dt = dtend.dt
|
||||
|
||||
if not isinstance(start_dt, datetime):
|
||||
start_dt = datetime.combine(start_dt, datetime.min.time())
|
||||
if not isinstance(end_dt, datetime):
|
||||
end_dt = datetime.combine(end_dt, datetime.min.time())
|
||||
|
||||
if start_dt.tzinfo is None:
|
||||
start_dt = start_dt.replace(tzinfo=timezone.utc)
|
||||
if end_dt.tzinfo is None:
|
||||
end_dt = end_dt.replace(tzinfo=timezone.utc)
|
||||
|
||||
blocks.append(
|
||||
BusyBlock(
|
||||
participant_id=participant_id,
|
||||
start_time=start_dt,
|
||||
end_time=end_dt,
|
||||
)
|
||||
)
|
||||
|
||||
return blocks
|
||||
|
||||
|
||||
async def sync_participant_calendar(
|
||||
db: AsyncSession, participant: Participant
|
||||
) -> int:
|
||||
logger.info(f"Syncing calendar for {participant.email}")
|
||||
|
||||
ics_content = await fetch_ics_content(participant.ics_url)
|
||||
blocks = parse_ics_to_busy_blocks(ics_content, str(participant.id))
|
||||
|
||||
await db.execute(
|
||||
delete(BusyBlock).where(BusyBlock.participant_id == participant.id)
|
||||
)
|
||||
|
||||
for block in blocks:
|
||||
db.add(block)
|
||||
|
||||
await db.commit()
|
||||
logger.info(f"Synced {len(blocks)} busy blocks for {participant.email}")
|
||||
return len(blocks)
|
||||
|
||||
|
||||
async def sync_all_calendars(db: AsyncSession, participants: list[Participant]) -> dict:
|
||||
results = {}
|
||||
for participant in participants:
|
||||
try:
|
||||
count = await sync_participant_calendar(db, participant)
|
||||
results[str(participant.id)] = {"status": "success", "blocks": count}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to sync {participant.email}: {e}")
|
||||
results[str(participant.id)] = {"status": "error", "error": str(e)}
|
||||
return results
|
||||
127
backend/src/app/main.py
Normal file
127
backend/src/app/main.py
Normal file
@@ -0,0 +1,127 @@
|
||||
import logging
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import Depends, FastAPI, HTTPException
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.availability_service import calculate_availability
|
||||
from app.database import get_db
|
||||
from app.ics_service import sync_all_calendars, sync_participant_calendar
|
||||
from app.models import Participant
|
||||
from app.schemas import (
|
||||
AvailabilityRequest,
|
||||
AvailabilityResponse,
|
||||
ParticipantCreate,
|
||||
ParticipantResponse,
|
||||
SyncResponse,
|
||||
)
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
app = FastAPI(title="Common Availability API")
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
return {"status": "healthy"}
|
||||
|
||||
|
||||
@app.post("/api/participants", response_model=ParticipantResponse)
|
||||
async def create_participant(
|
||||
data: ParticipantCreate, db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
existing = await db.execute(
|
||||
select(Participant).where(Participant.email == data.email)
|
||||
)
|
||||
if existing.scalar_one_or_none():
|
||||
raise HTTPException(status_code=400, detail="Email already registered")
|
||||
|
||||
participant = Participant(
|
||||
name=data.name,
|
||||
email=data.email,
|
||||
ics_url=data.ics_url,
|
||||
)
|
||||
db.add(participant)
|
||||
await db.commit()
|
||||
await db.refresh(participant)
|
||||
|
||||
try:
|
||||
await sync_participant_calendar(db, participant)
|
||||
except Exception as e:
|
||||
logger.warning(f"Initial sync failed for {participant.email}: {e}")
|
||||
|
||||
return participant
|
||||
|
||||
|
||||
@app.get("/api/participants", response_model=list[ParticipantResponse])
|
||||
async def list_participants(db: AsyncSession = Depends(get_db)):
|
||||
result = await db.execute(select(Participant))
|
||||
return result.scalars().all()
|
||||
|
||||
|
||||
@app.get("/api/participants/{participant_id}", response_model=ParticipantResponse)
|
||||
async def get_participant(participant_id: UUID, db: AsyncSession = Depends(get_db)):
|
||||
result = await db.execute(
|
||||
select(Participant).where(Participant.id == participant_id)
|
||||
)
|
||||
participant = result.scalar_one_or_none()
|
||||
if not participant:
|
||||
raise HTTPException(status_code=404, detail="Participant not found")
|
||||
return participant
|
||||
|
||||
|
||||
@app.delete("/api/participants/{participant_id}")
|
||||
async def delete_participant(participant_id: UUID, db: AsyncSession = Depends(get_db)):
|
||||
result = await db.execute(
|
||||
select(Participant).where(Participant.id == participant_id)
|
||||
)
|
||||
participant = result.scalar_one_or_none()
|
||||
if not participant:
|
||||
raise HTTPException(status_code=404, detail="Participant not found")
|
||||
|
||||
await db.delete(participant)
|
||||
await db.commit()
|
||||
return {"status": "deleted"}
|
||||
|
||||
|
||||
@app.post("/api/availability", response_model=AvailabilityResponse)
|
||||
async def get_availability(
|
||||
request: AvailabilityRequest, db: AsyncSession = Depends(get_db)
|
||||
):
|
||||
slots = await calculate_availability(db, request.participant_ids)
|
||||
return {"slots": slots}
|
||||
|
||||
|
||||
@app.post("/api/sync", response_model=SyncResponse)
|
||||
async def sync_calendars(db: AsyncSession = Depends(get_db)):
|
||||
result = await db.execute(select(Participant))
|
||||
participants = result.scalars().all()
|
||||
results = await sync_all_calendars(db, list(participants))
|
||||
return {"results": results}
|
||||
|
||||
|
||||
@app.post("/api/sync/{participant_id}")
|
||||
async def sync_participant(participant_id: UUID, db: AsyncSession = Depends(get_db)):
|
||||
result = await db.execute(
|
||||
select(Participant).where(Participant.id == participant_id)
|
||||
)
|
||||
participant = result.scalar_one_or_none()
|
||||
if not participant:
|
||||
raise HTTPException(status_code=404, detail="Participant not found")
|
||||
|
||||
try:
|
||||
count = await sync_participant_calendar(db, participant)
|
||||
return {"status": "success", "blocks_synced": count}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
40
backend/src/app/models.py
Normal file
40
backend/src/app/models.py
Normal file
@@ -0,0 +1,40 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import DateTime, String, Text
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
pass
|
||||
|
||||
|
||||
class Participant(Base):
|
||||
__tablename__ = "participants"
|
||||
|
||||
id: Mapped[uuid.UUID] = mapped_column(
|
||||
UUID(as_uuid=True), primary_key=True, default=uuid.uuid4
|
||||
)
|
||||
name: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
email: Mapped[str] = mapped_column(String(255), nullable=False, unique=True)
|
||||
ics_url: Mapped[str] = mapped_column(Text, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, default=datetime.utcnow, nullable=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False
|
||||
)
|
||||
|
||||
|
||||
class BusyBlock(Base):
|
||||
__tablename__ = "busy_blocks"
|
||||
|
||||
id: Mapped[uuid.UUID] = mapped_column(
|
||||
UUID(as_uuid=True), primary_key=True, default=uuid.uuid4
|
||||
)
|
||||
participant_id: Mapped[uuid.UUID] = mapped_column(
|
||||
UUID(as_uuid=True), nullable=False, index=True
|
||||
)
|
||||
start_time: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
|
||||
end_time: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
|
||||
41
backend/src/app/schemas.py
Normal file
41
backend/src/app/schemas.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, EmailStr
|
||||
|
||||
|
||||
class ParticipantCreate(BaseModel):
|
||||
name: str
|
||||
email: EmailStr
|
||||
ics_url: str
|
||||
|
||||
|
||||
class ParticipantResponse(BaseModel):
|
||||
id: UUID
|
||||
name: str
|
||||
email: str
|
||||
ics_url: str
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class TimeSlot(BaseModel):
|
||||
day: str
|
||||
hour: int
|
||||
availability: str
|
||||
availableParticipants: list[str]
|
||||
|
||||
|
||||
class AvailabilityRequest(BaseModel):
|
||||
participant_ids: list[UUID]
|
||||
|
||||
|
||||
class AvailabilityResponse(BaseModel):
|
||||
slots: list[TimeSlot]
|
||||
|
||||
|
||||
class SyncResponse(BaseModel):
|
||||
results: dict[str, dict]
|
||||
0
backend/tests/__init__.py
Normal file
0
backend/tests/__init__.py
Normal file
16
backend/tests/conftest.py
Normal file
16
backend/tests/conftest.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_ics():
|
||||
return """BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
PRODID:-//Test//Test//EN
|
||||
BEGIN:VEVENT
|
||||
DTSTART:20260106T150000Z
|
||||
DTEND:20260106T160000Z
|
||||
SUMMARY:Meeting
|
||||
UID:test-1
|
||||
DTSTAMP:20260101T000000Z
|
||||
END:VEVENT
|
||||
END:VCALENDAR"""
|
||||
73
backend/tests/test_availability.py
Normal file
73
backend/tests/test_availability.py
Normal file
@@ -0,0 +1,73 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from app.availability_service import (
|
||||
get_week_boundaries,
|
||||
is_participant_free,
|
||||
)
|
||||
|
||||
|
||||
def test_get_week_boundaries_returns_monday_to_friday():
|
||||
wednesday = datetime(2026, 1, 7, 12, 0, 0, tzinfo=timezone.utc)
|
||||
monday, friday = get_week_boundaries(wednesday)
|
||||
|
||||
assert monday.weekday() == 0
|
||||
assert friday.weekday() == 4
|
||||
assert monday.hour == 0
|
||||
assert friday.hour == 23
|
||||
|
||||
|
||||
def test_get_week_boundaries_monday_input():
|
||||
monday_input = datetime(2026, 1, 5, 12, 0, 0, tzinfo=timezone.utc)
|
||||
monday, friday = get_week_boundaries(monday_input)
|
||||
|
||||
assert monday.day == 5
|
||||
assert friday.day == 9
|
||||
|
||||
|
||||
def test_is_participant_free_no_blocks():
|
||||
slot_start = datetime(2026, 1, 6, 10, 0, 0, tzinfo=timezone.utc)
|
||||
slot_end = datetime(2026, 1, 6, 11, 0, 0, tzinfo=timezone.utc)
|
||||
|
||||
assert is_participant_free([], slot_start, slot_end) is True
|
||||
|
||||
|
||||
def test_is_participant_free_with_non_overlapping_block():
|
||||
slot_start = datetime(2026, 1, 6, 10, 0, 0, tzinfo=timezone.utc)
|
||||
slot_end = datetime(2026, 1, 6, 11, 0, 0, tzinfo=timezone.utc)
|
||||
|
||||
busy_blocks = [
|
||||
(
|
||||
datetime(2026, 1, 6, 14, 0, 0, tzinfo=timezone.utc),
|
||||
datetime(2026, 1, 6, 15, 0, 0, tzinfo=timezone.utc),
|
||||
)
|
||||
]
|
||||
|
||||
assert is_participant_free(busy_blocks, slot_start, slot_end) is True
|
||||
|
||||
|
||||
def test_is_participant_busy_with_overlapping_block():
|
||||
slot_start = datetime(2026, 1, 6, 10, 0, 0, tzinfo=timezone.utc)
|
||||
slot_end = datetime(2026, 1, 6, 11, 0, 0, tzinfo=timezone.utc)
|
||||
|
||||
busy_blocks = [
|
||||
(
|
||||
datetime(2026, 1, 6, 10, 30, 0, tzinfo=timezone.utc),
|
||||
datetime(2026, 1, 6, 11, 30, 0, tzinfo=timezone.utc),
|
||||
)
|
||||
]
|
||||
|
||||
assert is_participant_free(busy_blocks, slot_start, slot_end) is False
|
||||
|
||||
|
||||
def test_is_participant_busy_with_containing_block():
|
||||
slot_start = datetime(2026, 1, 6, 10, 0, 0, tzinfo=timezone.utc)
|
||||
slot_end = datetime(2026, 1, 6, 11, 0, 0, tzinfo=timezone.utc)
|
||||
|
||||
busy_blocks = [
|
||||
(
|
||||
datetime(2026, 1, 6, 9, 0, 0, tzinfo=timezone.utc),
|
||||
datetime(2026, 1, 6, 12, 0, 0, tzinfo=timezone.utc),
|
||||
)
|
||||
]
|
||||
|
||||
assert is_participant_free(busy_blocks, slot_start, slot_end) is False
|
||||
58
backend/tests/test_ics_parsing.py
Normal file
58
backend/tests/test_ics_parsing.py
Normal file
@@ -0,0 +1,58 @@
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
|
||||
from app.ics_service import parse_ics_to_busy_blocks
|
||||
|
||||
SAMPLE_ICS = """BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
PRODID:-//Test//Test//EN
|
||||
BEGIN:VEVENT
|
||||
DTSTART:20260106T150000Z
|
||||
DTEND:20260106T160000Z
|
||||
SUMMARY:Meeting
|
||||
UID:test-1
|
||||
DTSTAMP:20260101T000000Z
|
||||
END:VEVENT
|
||||
BEGIN:VEVENT
|
||||
DTSTART:20260107T140000Z
|
||||
DTEND:20260107T153000Z
|
||||
SUMMARY:Another Meeting
|
||||
UID:test-2
|
||||
DTSTAMP:20260101T000000Z
|
||||
END:VEVENT
|
||||
END:VCALENDAR"""
|
||||
|
||||
|
||||
def test_parse_ics_extracts_busy_blocks():
|
||||
participant_id = str(uuid.uuid4())
|
||||
blocks = parse_ics_to_busy_blocks(SAMPLE_ICS, participant_id)
|
||||
|
||||
assert len(blocks) == 2
|
||||
assert all(str(b.participant_id) == participant_id for b in blocks)
|
||||
|
||||
|
||||
def test_parse_ics_extracts_correct_times():
|
||||
participant_id = str(uuid.uuid4())
|
||||
blocks = parse_ics_to_busy_blocks(SAMPLE_ICS, participant_id)
|
||||
|
||||
sorted_blocks = sorted(blocks, key=lambda b: b.start_time)
|
||||
|
||||
assert sorted_blocks[0].start_time.hour == 15
|
||||
assert sorted_blocks[0].end_time.hour == 16
|
||||
|
||||
assert sorted_blocks[1].start_time.hour == 14
|
||||
assert sorted_blocks[1].end_time.hour == 15
|
||||
assert sorted_blocks[1].end_time.minute == 30
|
||||
|
||||
|
||||
def test_parse_empty_ics():
|
||||
empty_ics = """BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
PRODID:-//Test//Test//EN
|
||||
END:VCALENDAR"""
|
||||
|
||||
participant_id = str(uuid.uuid4())
|
||||
blocks = parse_ics_to_busy_blocks(empty_ics, participant_id)
|
||||
|
||||
assert len(blocks) == 0
|
||||
Reference in New Issue
Block a user