diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..0f2282f --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +.env +.DS_Store +*.log diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..73107b2 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,86 @@ +# LLM Development Guide + +## Code Style Guidelines +- **Package Management**: ALWAYS use `uv` for all Python package management (install, run, sync, etc.). Never use pip, poetry, or other package managers. +- **Imports**: Group standard library, third-party, and local imports with a blank line between groups +- **Formatting**: Use Ruff with 88 character line length +- **Types**: Use type annotations everywhere; import types from typing module +- **Naming**: Use snake_case for variables/functions, PascalCase for classes, UPPER_CASE for constants +- **Error Handling**: Use specific exceptions with meaningful error messages +- **Documentation**: Use docstrings for all public functions, classes, and methods +- **Logging**: Use the structured logging module; avoid print statements +- **Async**: Use async/await for non-blocking operations, especially in FastAPI endpoints +- **Configuration**: Use environment variables with YAML for configuration +- **Requirements**: Use the most up-to-date versions of dependencies unless specifically instructed not to + +## Process Guide + +Always start by taking a look at the project. Any .md file in the top level directory is important context. + +Generally, we will use `PLAN.md` as our "current state" scratchpad. We'll keep notes for ourselves there that include any context we'll need for future steps, including: + + * Any design principles or core ideas we landed on that might be relevant later on + * A summary of where we are in our overall development cycle + * What we imagine our next steps will be + +In total, PLAN.md should never exceed ~100 lines or so. A human being should be able to read & understand it in under 5 minutes. + +### Updating PLAN.md + +* If we need to update a PLAN, it often makes sense to ask the user clarifying questions before starting. Explain how you intend to update the PLAN document before actually doing so. Ask the user for feedback! + +* PLANs should be as test-driven as possible. We will focus on Component/Acceptance tests, at the interface boundary between components. No unit tests! And we'll generally use the Red-Green-Refactor method of Test-Driven-Development. More on this in the next section. + +* Sometimes a goal within a PLAN cannot be encapsulated within an automated test. In those cases, we'll still want to have some kind of validation step after a unit of work. This may be running a command and seeing that the result is as expected. Or asking the user to confirm that a frontend looks the way it should. Etc. + +* A given goal within a plan should be a logical unit of work that would take a senior developer 4-8 hours to implement. + +* The final output of a goal in our PLAN should typically be under a thousand lines of code and make sense as a single PR. + +* Always stop and ask clarifying questions after implementing `PLAN.md`. You should not move on to implementing the plan until you have gotten an explicit go-ahead instrcution from the user. + +### Testing + +Coming up with a good test is **always the first step** of any plan. Tests should validate that the behaviour of a module is correct, by interacting with its interface. If the module's internals change, the tests should still pass! + +Tests should generally follow this pattern (pseudocode): + +self.set_up() +example_input = {'example': 'input'} +self.assert_property(self.get('/v1/api/endpoint', example_input)) + +Not every interface will be a REST API, and not every test will be directly measurable property of the output like this. + +But in all cases, the test should include a setup, a call to an interface, and a test of some assumption we will make about calling that interface. + +We should NEVER USE MOCKS. + +Interfaces should be as pure-functional as possible. Use dependency injection. If a function has a side-effect, then *the side-effect should be in the name of the function, and its main purpose* if at all possible. + +**At the beginning of any new goal, ask the user questions until you understand what tests you should write.** Then write the tests and ask the user if they match their expectations. + +### Code style + +**CRITICAL RULES - NEVER VIOLATE THESE:** + +* **KISS (Keep It Simple, Stupid)**: Always choose the simplest solution that solves the problem. Avoid clever tricks, complex abstractions, and over-engineering. + +* **Readable & Production-Ready**: All code must be immediately readable by any developer and production-ready. No shortcuts, no "TODO" comments, no placeholder implementations. + +* **NO BOILERPLATE**: Do not write any boilerplate code. Do not create classes, functions, or structures "for future use." Write only what is needed right now for the current goal. + +* **NO COMMENTS**: Code must be self-documenting through clear naming and simple structure. Comments are forbidden (except docstrings for public APIs). If you think you need a comment, refactor the code to be clearer instead. + +**Additional Guidelines:** + +Write the minimum code needed to accomplish a task. + +Do not build for untested contingencies. If you believe there is an edgecase or contingency that needs to be accomodated in your code, that case should be explicitly tested. + +Functions should clearly identify their purpose through their names and signatures. + +Simple is better than complex, explicit is better than implicit, and boring code is better than clever code. + +No magic; don't abuse metaprogramming. When I read code its behaviour should be obvious to me. + +Tests should effectively serve as documentation. By reading the tests in a pull request, I should be able to infer the PR's primary purpose. \ No newline at end of file diff --git a/PLAN.md b/PLAN.md new file mode 100644 index 0000000..cd83287 --- /dev/null +++ b/PLAN.md @@ -0,0 +1,55 @@ +# Common Availability - Implementation Plan + +## Project Overview +ICS-based calendar availability coordination system. Company members submit ICS URLs, and the system continuously syncs and calculates common availability for scheduling meetings. + +## Architecture + +### Backend (FastAPI + PostgreSQL) +- **Models**: `Participant` (id, name, email, ics_url) and `BusyBlock` (participant_id, start_time, end_time) +- **Services**: ICS fetching/parsing via `icalendar` library, availability calculation +- **API Endpoints**: + - `POST /api/participants` - Add participant with ICS URL (auto-syncs) + - `GET /api/participants` - List all participants + - `DELETE /api/participants/{id}` - Remove participant + - `POST /api/availability` - Calculate availability for given participants + - `POST /api/sync` - Sync all calendars + - `POST /api/sync/{id}` - Sync specific participant + +### Frontend (React + TypeScript) +- Existing UI preserved (React, Vite, shadcn-ui) +- API client in `src/api/client.ts` +- Real-time availability from backend (replaces mock data) + +## Current State +Implementation complete and tested. All systems operational. + +## How to Run +```bash +just fresh # Build, start containers, run migrations +just up # Start services +just logs # View logs +just migrate # Run Alembic migrations +just sync-calendars # Sync all ICS feeds +``` + +Access: +- Frontend: http://localhost:5173 +- Backend API: http://localhost:8000 +- API docs: http://localhost:8000/docs + +## Test Flow +1. Add participant with ICS URL (e.g., https://user.fm/freebusy/v1-.../Calendar.ics) +2. Select participants in Schedule tab +3. View availability heatmap (green = all free, yellow = partial, red = busy) +4. Click sync button to refresh calendars + +## Verified Working +- [x] ICS parsing and sync (321 busy blocks from sample ICS) +- [x] Availability calculation +- [x] Participant CRUD operations +- [x] Frontend integration + +## Next Steps +- [ ] Add background scheduler for periodic ICS sync +- [ ] Add calendar revocation mechanism diff --git a/backend/.dockerignore b/backend/.dockerignore new file mode 100644 index 0000000..e95e187 --- /dev/null +++ b/backend/.dockerignore @@ -0,0 +1,12 @@ +__pycache__ +*.py[cod] +.git +.gitignore +.env +.venv +venv/ +.uv/ +.pytest_cache/ +.ruff_cache/ +.mypy_cache/ +tests/ diff --git a/backend/.gitignore b/backend/.gitignore new file mode 100644 index 0000000..8e32fff --- /dev/null +++ b/backend/.gitignore @@ -0,0 +1,13 @@ +__pycache__/ +*.py[cod] +*$py.class +.Python +*.so +.env +.venv +venv/ +.uv/ +*.egg-info/ +.pytest_cache/ +.ruff_cache/ +.mypy_cache/ diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 0000000..ebfb770 --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,18 @@ +FROM python:3.12-slim + +WORKDIR /app + +RUN pip install uv + +COPY pyproject.toml . +RUN uv sync --frozen --no-dev 2>/dev/null || uv sync --no-dev + +COPY alembic.ini . +COPY alembic/ alembic/ +COPY src/ src/ + +ENV PYTHONPATH=/app/src + +EXPOSE 8000 + +CMD ["uv", "run", "uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/backend/alembic.ini b/backend/alembic.ini new file mode 100644 index 0000000..f98f79e --- /dev/null +++ b/backend/alembic.ini @@ -0,0 +1,38 @@ +[alembic] +script_location = alembic +prepend_sys_path = src +sqlalchemy.url = postgresql://postgres:postgres@db:5432/availability + +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/backend/alembic/env.py b/backend/alembic/env.py new file mode 100644 index 0000000..f762caa --- /dev/null +++ b/backend/alembic/env.py @@ -0,0 +1,43 @@ +from logging.config import fileConfig + +from alembic import context +from sqlalchemy import engine_from_config, pool + +from app.models import Base + +config = context.config +fileConfig(config.config_file_name) +target_metadata = Base.metadata + + +def run_migrations_offline(): + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/backend/alembic/script.py.mako b/backend/alembic/script.py.mako new file mode 100644 index 0000000..17dcba0 --- /dev/null +++ b/backend/alembic/script.py.mako @@ -0,0 +1,25 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/backend/alembic/versions/001_initial.py b/backend/alembic/versions/001_initial.py new file mode 100644 index 0000000..c0f685c --- /dev/null +++ b/backend/alembic/versions/001_initial.py @@ -0,0 +1,48 @@ +"""Initial migration + +Revision ID: 001 +Revises: +Create Date: 2024-01-08 + +""" +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op + +revision: str = "001" +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + "participants", + sa.Column("id", sa.UUID(), nullable=False), + sa.Column("name", sa.String(255), nullable=False), + sa.Column("email", sa.String(255), nullable=False), + sa.Column("ics_url", sa.Text(), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("email"), + ) + + op.create_table( + "busy_blocks", + sa.Column("id", sa.UUID(), nullable=False), + sa.Column("participant_id", sa.UUID(), nullable=False), + sa.Column("start_time", sa.DateTime(timezone=True), nullable=False), + sa.Column("end_time", sa.DateTime(timezone=True), nullable=False), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + "ix_busy_blocks_participant_id", "busy_blocks", ["participant_id"] + ) + + +def downgrade() -> None: + op.drop_index("ix_busy_blocks_participant_id", table_name="busy_blocks") + op.drop_table("busy_blocks") + op.drop_table("participants") diff --git a/backend/pyproject.toml b/backend/pyproject.toml new file mode 100644 index 0000000..8722f96 --- /dev/null +++ b/backend/pyproject.toml @@ -0,0 +1,36 @@ +[project] +name = "common-availability" +version = "0.1.0" +description = "Calendar availability coordination service" +requires-python = ">=3.12" +dependencies = [ + "fastapi>=0.115.0", + "uvicorn[standard]>=0.32.0", + "sqlalchemy>=2.0.0", + "alembic>=1.14.0", + "asyncpg>=0.30.0", + "psycopg2-binary>=2.9.0", + "httpx>=0.28.0", + "icalendar>=6.0.0", + "python-dateutil>=2.9.0", + "pydantic[email]>=2.10.0", + "pydantic-settings>=2.6.0", +] + +[project.optional-dependencies] +dev = [ + "pytest>=8.3.0", + "pytest-asyncio>=0.24.0", + "ruff>=0.8.0", +] + +[tool.ruff] +line-length = 88 +target-version = "py312" + +[tool.ruff.lint] +select = ["E", "F", "I", "N", "W"] + +[tool.pytest.ini_options] +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" diff --git a/backend/src/app/__init__.py b/backend/src/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/src/app/availability_service.py b/backend/src/app/availability_service.py new file mode 100644 index 0000000..38f8d7b --- /dev/null +++ b/backend/src/app/availability_service.py @@ -0,0 +1,105 @@ +from datetime import datetime, timedelta, timezone +from uuid import UUID + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.models import BusyBlock, Participant + + +def get_week_boundaries(reference_date: datetime | None = None) -> tuple[datetime, datetime]: + if reference_date is None: + reference_date = datetime.now(timezone.utc) + + days_since_monday = reference_date.weekday() + monday = reference_date - timedelta(days=days_since_monday) + monday = monday.replace(hour=0, minute=0, second=0, microsecond=0) + + friday = monday + timedelta(days=4) + friday = friday.replace(hour=23, minute=59, second=59, microsecond=999999) + + return monday, friday + + +async def get_busy_blocks_for_participants( + db: AsyncSession, + participant_ids: list[UUID], + start_time: datetime, + end_time: datetime, +) -> dict[UUID, list[tuple[datetime, datetime]]]: + stmt = select(BusyBlock).where( + BusyBlock.participant_id.in_(participant_ids), + BusyBlock.start_time < end_time, + BusyBlock.end_time > start_time, + ) + result = await db.execute(stmt) + blocks = result.scalars().all() + + busy_map: dict[UUID, list[tuple[datetime, datetime]]] = { + pid: [] for pid in participant_ids + } + for block in blocks: + busy_map[block.participant_id].append((block.start_time, block.end_time)) + + return busy_map + + +def is_participant_free( + busy_blocks: list[tuple[datetime, datetime]], + slot_start: datetime, + slot_end: datetime, +) -> bool: + for block_start, block_end in busy_blocks: + if block_start < slot_end and block_end > slot_start: + return False + return True + + +async def calculate_availability( + db: AsyncSession, + participant_ids: list[UUID], + reference_date: datetime | None = None, +) -> list[dict]: + week_start, week_end = get_week_boundaries(reference_date) + busy_map = await get_busy_blocks_for_participants( + db, participant_ids, week_start, week_end + ) + + participants_stmt = select(Participant).where(Participant.id.in_(participant_ids)) + participants_result = await db.execute(participants_stmt) + participants = {p.id: p for p in participants_result.scalars().all()} + + days = ["Mon", "Tue", "Wed", "Thu", "Fri"] + hours = list(range(9, 18)) + slots = [] + + for day_offset, day_name in enumerate(days): + for hour in hours: + slot_start = week_start + timedelta(days=day_offset, hours=hour) + slot_end = slot_start + timedelta(hours=1) + + available_participants = [] + for pid in participant_ids: + if is_participant_free(busy_map.get(pid, []), slot_start, slot_end): + participant = participants.get(pid) + if participant: + available_participants.append(participant.name) + + total = len(participant_ids) + available_count = len(available_participants) + + if available_count == total: + availability = "full" + elif available_count > 0: + availability = "partial" + else: + availability = "none" + + slots.append({ + "day": day_name, + "hour": hour, + "availability": availability, + "availableParticipants": available_participants, + }) + + return slots diff --git a/backend/src/app/config.py b/backend/src/app/config.py new file mode 100644 index 0000000..8286c28 --- /dev/null +++ b/backend/src/app/config.py @@ -0,0 +1,13 @@ +from pydantic_settings import BaseSettings + + +class Settings(BaseSettings): + database_url: str = "postgresql+asyncpg://postgres:postgres@db:5432/availability" + sync_database_url: str = "postgresql://postgres:postgres@db:5432/availability" + ics_refresh_interval_minutes: int = 15 + + class Config: + env_file = ".env" + + +settings = Settings() diff --git a/backend/src/app/database.py b/backend/src/app/database.py new file mode 100644 index 0000000..7b01b5f --- /dev/null +++ b/backend/src/app/database.py @@ -0,0 +1,13 @@ +from collections.abc import AsyncGenerator + +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine + +from app.config import settings + +engine = create_async_engine(settings.database_url, echo=False) +async_session_maker = async_sessionmaker(engine, expire_on_commit=False) + + +async def get_db() -> AsyncGenerator[AsyncSession, None]: + async with async_session_maker() as session: + yield session diff --git a/backend/src/app/ics_service.py b/backend/src/app/ics_service.py new file mode 100644 index 0000000..02a3aab --- /dev/null +++ b/backend/src/app/ics_service.py @@ -0,0 +1,88 @@ +import logging +from datetime import datetime, timezone + +import httpx +from icalendar import Calendar +from sqlalchemy import delete +from sqlalchemy.ext.asyncio import AsyncSession + +from app.models import BusyBlock, Participant + +logger = logging.getLogger(__name__) + + +async def fetch_ics_content(url: str) -> str: + async with httpx.AsyncClient(timeout=30.0) as client: + response = await client.get(url) + response.raise_for_status() + return response.text + + +def parse_ics_to_busy_blocks( + ics_content: str, participant_id: str +) -> list[BusyBlock]: + calendar = Calendar.from_ical(ics_content) + blocks = [] + + for component in calendar.walk(): + if component.name == "VEVENT": + dtstart = component.get("dtstart") + dtend = component.get("dtend") + + if dtstart is None or dtend is None: + continue + + start_dt = dtstart.dt + end_dt = dtend.dt + + if not isinstance(start_dt, datetime): + start_dt = datetime.combine(start_dt, datetime.min.time()) + if not isinstance(end_dt, datetime): + end_dt = datetime.combine(end_dt, datetime.min.time()) + + if start_dt.tzinfo is None: + start_dt = start_dt.replace(tzinfo=timezone.utc) + if end_dt.tzinfo is None: + end_dt = end_dt.replace(tzinfo=timezone.utc) + + blocks.append( + BusyBlock( + participant_id=participant_id, + start_time=start_dt, + end_time=end_dt, + ) + ) + + return blocks + + +async def sync_participant_calendar( + db: AsyncSession, participant: Participant +) -> int: + logger.info(f"Syncing calendar for {participant.email}") + + ics_content = await fetch_ics_content(participant.ics_url) + blocks = parse_ics_to_busy_blocks(ics_content, str(participant.id)) + + await db.execute( + delete(BusyBlock).where(BusyBlock.participant_id == participant.id) + ) + + for block in blocks: + db.add(block) + + await db.commit() + logger.info(f"Synced {len(blocks)} busy blocks for {participant.email}") + return len(blocks) + + +async def sync_all_calendars(db: AsyncSession, participants: list[Participant]) -> dict: + results = {} + for participant in participants: + try: + count = await sync_participant_calendar(db, participant) + results[str(participant.id)] = {"status": "success", "blocks": count} + except Exception as e: + logger.error(f"Failed to sync {participant.email}: {e}") + results[str(participant.id)] = {"status": "error", "error": str(e)} + return results diff --git a/backend/src/app/main.py b/backend/src/app/main.py new file mode 100644 index 0000000..bd18001 --- /dev/null +++ b/backend/src/app/main.py @@ -0,0 +1,127 @@ +import logging +from uuid import UUID + +from fastapi import Depends, FastAPI, HTTPException +from fastapi.middleware.cors import CORSMiddleware +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.availability_service import calculate_availability +from app.database import get_db +from app.ics_service import sync_all_calendars, sync_participant_calendar +from app.models import Participant +from app.schemas import ( + AvailabilityRequest, + AvailabilityResponse, + ParticipantCreate, + ParticipantResponse, + SyncResponse, +) + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +app = FastAPI(title="Common Availability API") + +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + + +@app.get("/health") +async def health_check(): + return {"status": "healthy"} + + +@app.post("/api/participants", response_model=ParticipantResponse) +async def create_participant( + data: ParticipantCreate, db: AsyncSession = Depends(get_db) +): + existing = await db.execute( + select(Participant).where(Participant.email == data.email) + ) + if existing.scalar_one_or_none(): + raise HTTPException(status_code=400, detail="Email already registered") + + participant = Participant( + name=data.name, + email=data.email, + ics_url=data.ics_url, + ) + db.add(participant) + await db.commit() + await db.refresh(participant) + + try: + await sync_participant_calendar(db, participant) + except Exception as e: + logger.warning(f"Initial sync failed for {participant.email}: {e}") + + return participant + + +@app.get("/api/participants", response_model=list[ParticipantResponse]) +async def list_participants(db: AsyncSession = Depends(get_db)): + result = await db.execute(select(Participant)) + return result.scalars().all() + + +@app.get("/api/participants/{participant_id}", response_model=ParticipantResponse) +async def get_participant(participant_id: UUID, db: AsyncSession = Depends(get_db)): + result = await db.execute( + select(Participant).where(Participant.id == participant_id) + ) + participant = result.scalar_one_or_none() + if not participant: + raise HTTPException(status_code=404, detail="Participant not found") + return participant + + +@app.delete("/api/participants/{participant_id}") +async def delete_participant(participant_id: UUID, db: AsyncSession = Depends(get_db)): + result = await db.execute( + select(Participant).where(Participant.id == participant_id) + ) + participant = result.scalar_one_or_none() + if not participant: + raise HTTPException(status_code=404, detail="Participant not found") + + await db.delete(participant) + await db.commit() + return {"status": "deleted"} + + +@app.post("/api/availability", response_model=AvailabilityResponse) +async def get_availability( + request: AvailabilityRequest, db: AsyncSession = Depends(get_db) +): + slots = await calculate_availability(db, request.participant_ids) + return {"slots": slots} + + +@app.post("/api/sync", response_model=SyncResponse) +async def sync_calendars(db: AsyncSession = Depends(get_db)): + result = await db.execute(select(Participant)) + participants = result.scalars().all() + results = await sync_all_calendars(db, list(participants)) + return {"results": results} + + +@app.post("/api/sync/{participant_id}") +async def sync_participant(participant_id: UUID, db: AsyncSession = Depends(get_db)): + result = await db.execute( + select(Participant).where(Participant.id == participant_id) + ) + participant = result.scalar_one_or_none() + if not participant: + raise HTTPException(status_code=404, detail="Participant not found") + + try: + count = await sync_participant_calendar(db, participant) + return {"status": "success", "blocks_synced": count} + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) diff --git a/backend/src/app/models.py b/backend/src/app/models.py new file mode 100644 index 0000000..8ca4106 --- /dev/null +++ b/backend/src/app/models.py @@ -0,0 +1,40 @@ +import uuid +from datetime import datetime + +from sqlalchemy import DateTime, String, Text +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column + + +class Base(DeclarativeBase): + pass + + +class Participant(Base): + __tablename__ = "participants" + + id: Mapped[uuid.UUID] = mapped_column( + UUID(as_uuid=True), primary_key=True, default=uuid.uuid4 + ) + name: Mapped[str] = mapped_column(String(255), nullable=False) + email: Mapped[str] = mapped_column(String(255), nullable=False, unique=True) + ics_url: Mapped[str] = mapped_column(Text, nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime, default=datetime.utcnow, nullable=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False + ) + + +class BusyBlock(Base): + __tablename__ = "busy_blocks" + + id: Mapped[uuid.UUID] = mapped_column( + UUID(as_uuid=True), primary_key=True, default=uuid.uuid4 + ) + participant_id: Mapped[uuid.UUID] = mapped_column( + UUID(as_uuid=True), nullable=False, index=True + ) + start_time: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) + end_time: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) diff --git a/backend/src/app/schemas.py b/backend/src/app/schemas.py new file mode 100644 index 0000000..6d47a0d --- /dev/null +++ b/backend/src/app/schemas.py @@ -0,0 +1,41 @@ +from datetime import datetime +from uuid import UUID + +from pydantic import BaseModel, EmailStr + + +class ParticipantCreate(BaseModel): + name: str + email: EmailStr + ics_url: str + + +class ParticipantResponse(BaseModel): + id: UUID + name: str + email: str + ics_url: str + created_at: datetime + updated_at: datetime + + class Config: + from_attributes = True + + +class TimeSlot(BaseModel): + day: str + hour: int + availability: str + availableParticipants: list[str] + + +class AvailabilityRequest(BaseModel): + participant_ids: list[UUID] + + +class AvailabilityResponse(BaseModel): + slots: list[TimeSlot] + + +class SyncResponse(BaseModel): + results: dict[str, dict] diff --git a/backend/tests/__init__.py b/backend/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py new file mode 100644 index 0000000..617af14 --- /dev/null +++ b/backend/tests/conftest.py @@ -0,0 +1,16 @@ +import pytest + + +@pytest.fixture +def sample_ics(): + return """BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//Test//Test//EN +BEGIN:VEVENT +DTSTART:20260106T150000Z +DTEND:20260106T160000Z +SUMMARY:Meeting +UID:test-1 +DTSTAMP:20260101T000000Z +END:VEVENT +END:VCALENDAR""" diff --git a/backend/tests/test_availability.py b/backend/tests/test_availability.py new file mode 100644 index 0000000..417bb26 --- /dev/null +++ b/backend/tests/test_availability.py @@ -0,0 +1,73 @@ +from datetime import datetime, timedelta, timezone + +from app.availability_service import ( + get_week_boundaries, + is_participant_free, +) + + +def test_get_week_boundaries_returns_monday_to_friday(): + wednesday = datetime(2026, 1, 7, 12, 0, 0, tzinfo=timezone.utc) + monday, friday = get_week_boundaries(wednesday) + + assert monday.weekday() == 0 + assert friday.weekday() == 4 + assert monday.hour == 0 + assert friday.hour == 23 + + +def test_get_week_boundaries_monday_input(): + monday_input = datetime(2026, 1, 5, 12, 0, 0, tzinfo=timezone.utc) + monday, friday = get_week_boundaries(monday_input) + + assert monday.day == 5 + assert friday.day == 9 + + +def test_is_participant_free_no_blocks(): + slot_start = datetime(2026, 1, 6, 10, 0, 0, tzinfo=timezone.utc) + slot_end = datetime(2026, 1, 6, 11, 0, 0, tzinfo=timezone.utc) + + assert is_participant_free([], slot_start, slot_end) is True + + +def test_is_participant_free_with_non_overlapping_block(): + slot_start = datetime(2026, 1, 6, 10, 0, 0, tzinfo=timezone.utc) + slot_end = datetime(2026, 1, 6, 11, 0, 0, tzinfo=timezone.utc) + + busy_blocks = [ + ( + datetime(2026, 1, 6, 14, 0, 0, tzinfo=timezone.utc), + datetime(2026, 1, 6, 15, 0, 0, tzinfo=timezone.utc), + ) + ] + + assert is_participant_free(busy_blocks, slot_start, slot_end) is True + + +def test_is_participant_busy_with_overlapping_block(): + slot_start = datetime(2026, 1, 6, 10, 0, 0, tzinfo=timezone.utc) + slot_end = datetime(2026, 1, 6, 11, 0, 0, tzinfo=timezone.utc) + + busy_blocks = [ + ( + datetime(2026, 1, 6, 10, 30, 0, tzinfo=timezone.utc), + datetime(2026, 1, 6, 11, 30, 0, tzinfo=timezone.utc), + ) + ] + + assert is_participant_free(busy_blocks, slot_start, slot_end) is False + + +def test_is_participant_busy_with_containing_block(): + slot_start = datetime(2026, 1, 6, 10, 0, 0, tzinfo=timezone.utc) + slot_end = datetime(2026, 1, 6, 11, 0, 0, tzinfo=timezone.utc) + + busy_blocks = [ + ( + datetime(2026, 1, 6, 9, 0, 0, tzinfo=timezone.utc), + datetime(2026, 1, 6, 12, 0, 0, tzinfo=timezone.utc), + ) + ] + + assert is_participant_free(busy_blocks, slot_start, slot_end) is False diff --git a/backend/tests/test_ics_parsing.py b/backend/tests/test_ics_parsing.py new file mode 100644 index 0000000..959764f --- /dev/null +++ b/backend/tests/test_ics_parsing.py @@ -0,0 +1,58 @@ +import uuid + +import pytest + +from app.ics_service import parse_ics_to_busy_blocks + +SAMPLE_ICS = """BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//Test//Test//EN +BEGIN:VEVENT +DTSTART:20260106T150000Z +DTEND:20260106T160000Z +SUMMARY:Meeting +UID:test-1 +DTSTAMP:20260101T000000Z +END:VEVENT +BEGIN:VEVENT +DTSTART:20260107T140000Z +DTEND:20260107T153000Z +SUMMARY:Another Meeting +UID:test-2 +DTSTAMP:20260101T000000Z +END:VEVENT +END:VCALENDAR""" + + +def test_parse_ics_extracts_busy_blocks(): + participant_id = str(uuid.uuid4()) + blocks = parse_ics_to_busy_blocks(SAMPLE_ICS, participant_id) + + assert len(blocks) == 2 + assert all(str(b.participant_id) == participant_id for b in blocks) + + +def test_parse_ics_extracts_correct_times(): + participant_id = str(uuid.uuid4()) + blocks = parse_ics_to_busy_blocks(SAMPLE_ICS, participant_id) + + sorted_blocks = sorted(blocks, key=lambda b: b.start_time) + + assert sorted_blocks[0].start_time.hour == 15 + assert sorted_blocks[0].end_time.hour == 16 + + assert sorted_blocks[1].start_time.hour == 14 + assert sorted_blocks[1].end_time.hour == 15 + assert sorted_blocks[1].end_time.minute == 30 + + +def test_parse_empty_ics(): + empty_ics = """BEGIN:VCALENDAR +VERSION:2.0 +PRODID:-//Test//Test//EN +END:VCALENDAR""" + + participant_id = str(uuid.uuid4()) + blocks = parse_ics_to_busy_blocks(empty_ics, participant_id) + + assert len(blocks) == 0 diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..d42b741 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,48 @@ +services: + db: + image: postgres:16-alpine + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: availability + volumes: + - postgres_data:/var/lib/postgresql/data + ports: + - "5432:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 5s + timeout: 5s + retries: 5 + + backend: + build: + context: ./backend + dockerfile: Dockerfile + environment: + DATABASE_URL: postgresql+asyncpg://postgres:postgres@db:5432/availability + SYNC_DATABASE_URL: postgresql://postgres:postgres@db:5432/availability + ports: + - "8000:8000" + depends_on: + db: + condition: service_healthy + volumes: + - ./backend/src:/app/src + - ./backend/alembic:/app/alembic + + frontend: + build: + context: ./frontend + dockerfile: Dockerfile + ports: + - "5173:8080" + environment: + VITE_API_URL: http://localhost:8000 + depends_on: + - backend + volumes: + - ./frontend/src:/app/src + +volumes: + postgres_data: diff --git a/frontend/.dockerignore b/frontend/.dockerignore new file mode 100644 index 0000000..325b561 --- /dev/null +++ b/frontend/.dockerignore @@ -0,0 +1,4 @@ +node_modules +.git +.gitignore +dist diff --git a/frontend/Dockerfile b/frontend/Dockerfile new file mode 100644 index 0000000..198fe66 --- /dev/null +++ b/frontend/Dockerfile @@ -0,0 +1,12 @@ +FROM node:20-alpine + +WORKDIR /app + +COPY package*.json ./ +RUN npm ci + +COPY . . + +EXPOSE 8080 + +CMD ["npm", "run", "dev", "--", "--host", "0.0.0.0"] diff --git a/frontend/package-lock.json b/frontend/package-lock.json index e1e8e54..d706b79 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -2859,6 +2859,7 @@ "integrity": "sha512-bJFoMATwIGaxxx8VJPeM8TonI8t579oRvgAuT8zFugJsJZgzqv0Fu8Mhp68iecjzG7cnN3mO2dJQ5uUM2EFrgQ==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "undici-types": "~6.21.0" } @@ -2876,6 +2877,7 @@ "integrity": "sha512-/LDXMQh55EzZQ0uVAZmKKhfENivEvWz6E+EYzh+/MCjMhNsotd+ZHhBGIjFDTi6+fz0OhQQQLbTgdQIxxCsC0w==", "devOptional": true, "license": "MIT", + "peer": true, "dependencies": { "@types/prop-types": "*", "csstype": "^3.0.2" @@ -2887,6 +2889,7 @@ "integrity": "sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==", "devOptional": true, "license": "MIT", + "peer": true, "peerDependencies": { "@types/react": "^18.0.0" } @@ -2937,6 +2940,7 @@ "integrity": "sha512-Zhy8HCvBUEfBECzIl1PKqF4p11+d0aUJS1GeUiuqK9WmOug8YCmC4h4bjyBvMyAMI9sbRczmrYL5lKg/YMbrcQ==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@typescript-eslint/scope-manager": "8.38.0", "@typescript-eslint/types": "8.38.0", @@ -3169,6 +3173,7 @@ "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "dev": true, "license": "MIT", + "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -3373,6 +3378,7 @@ } ], "license": "MIT", + "peer": true, "dependencies": { "caniuse-lite": "^1.0.30001726", "electron-to-chromium": "^1.5.173", @@ -3706,6 +3712,7 @@ "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-3.6.0.tgz", "integrity": "sha512-fRHTG8g/Gif+kSh50gaGEdToemgfj74aRX3swtiouboip5JDLAyDE9F11nHMIcvOaXeOC6D7SpNhi7uFyB7Uww==", "license": "MIT", + "peer": true, "funding": { "type": "github", "url": "https://github.com/sponsors/kossnocorp" @@ -3787,7 +3794,8 @@ "version": "8.6.0", "resolved": "https://registry.npmjs.org/embla-carousel/-/embla-carousel-8.6.0.tgz", "integrity": "sha512-SjWyZBHJPbqxHOzckOfo8lHisEaJWmwd23XppYFYVh10bU66/Pn5tkVkbkCMZVdbUE5eTCI2nD8OyIP4Z+uwkA==", - "license": "MIT" + "license": "MIT", + "peer": true }, "node_modules/embla-carousel-react": { "version": "8.6.0", @@ -3885,6 +3893,7 @@ "integrity": "sha512-LSehfdpgMeWcTZkWZVIJl+tkZ2nuSkyyB9C27MZqFWXuph7DvaowgcTvKqxvpLW1JZIk8PN7hFY3Rj9LQ7m7lg==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", @@ -5406,6 +5415,7 @@ } ], "license": "MIT", + "peer": true, "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", @@ -5592,6 +5602,7 @@ "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", "license": "MIT", + "peer": true, "dependencies": { "loose-envify": "^1.1.0" }, @@ -5618,6 +5629,7 @@ "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", "license": "MIT", + "peer": true, "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" @@ -5631,6 +5643,7 @@ "resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.61.1.tgz", "integrity": "sha512-2vbXUFDYgqEgM2RcXcAT2PwDW/80QARi+PKmHy5q2KhuKvOlG8iIYgf7eIlIANR5trW9fJbP4r5aub3a4egsew==", "license": "MIT", + "peer": true, "engines": { "node": ">=18.0.0" }, @@ -6184,6 +6197,7 @@ "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.17.tgz", "integrity": "sha512-w33E2aCvSDP0tW9RZuNXadXlkHXqFzSkQew/aIa2i/Sj8fThxwovwlXHSPXTbAHwEIhBFXAedUhP2tueAKP8Og==", "license": "MIT", + "peer": true, "dependencies": { "@alloc/quick-lru": "^5.2.0", "arg": "^5.0.2", @@ -6308,6 +6322,7 @@ "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", "dev": true, "license": "Apache-2.0", + "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -6487,6 +6502,7 @@ "integrity": "sha512-qO3aKv3HoQC8QKiNSTuUM1l9o/XX3+c+VTgLHbJWHZGeTPVAg2XwazI9UWzoxjIJCGCV2zU60uqMzjeLZuULqA==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "esbuild": "^0.21.3", "postcss": "^8.4.43", diff --git a/frontend/src/api/client.ts b/frontend/src/api/client.ts new file mode 100644 index 0000000..f45bbee --- /dev/null +++ b/frontend/src/api/client.ts @@ -0,0 +1,78 @@ +const API_URL = import.meta.env.VITE_API_URL || 'http://localhost:8000'; + +export interface ParticipantAPI { + id: string; + name: string; + email: string; + ics_url: string; + created_at: string; + updated_at: string; +} + +export interface TimeSlotAPI { + day: string; + hour: number; + availability: 'full' | 'partial' | 'none'; + availableParticipants: string[]; +} + +export interface CreateParticipantRequest { + name: string; + email: string; + ics_url: string; +} + +async function handleResponse(response: Response): Promise { + if (!response.ok) { + const error = await response.json().catch(() => ({ detail: 'Request failed' })); + throw new Error(error.detail || 'Request failed'); + } + return response.json(); +} + +export async function fetchParticipants(): Promise { + const response = await fetch(`${API_URL}/api/participants`); + return handleResponse(response); +} + +export async function createParticipant(data: CreateParticipantRequest): Promise { + const response = await fetch(`${API_URL}/api/participants`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(data), + }); + return handleResponse(response); +} + +export async function deleteParticipant(id: string): Promise { + const response = await fetch(`${API_URL}/api/participants/${id}`, { + method: 'DELETE', + }); + if (!response.ok) { + throw new Error('Failed to delete participant'); + } +} + +export async function fetchAvailability(participantIds: string[]): Promise { + const response = await fetch(`${API_URL}/api/availability`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ participant_ids: participantIds }), + }); + const data = await handleResponse<{ slots: TimeSlotAPI[] }>(response); + return data.slots; +} + +export async function syncCalendars(): Promise { + const response = await fetch(`${API_URL}/api/sync`, { method: 'POST' }); + if (!response.ok) { + throw new Error('Failed to sync calendars'); + } +} + +export async function syncParticipant(id: string): Promise { + const response = await fetch(`${API_URL}/api/sync/${id}`, { method: 'POST' }); + if (!response.ok) { + throw new Error('Failed to sync participant calendar'); + } +} diff --git a/frontend/src/components/AvailabilityHeatmap.tsx b/frontend/src/components/AvailabilityHeatmap.tsx index 5075113..9a7dfad 100644 --- a/frontend/src/components/AvailabilityHeatmap.tsx +++ b/frontend/src/components/AvailabilityHeatmap.tsx @@ -1,5 +1,4 @@ import { TimeSlot, Participant } from '@/types/calendar'; -import { days, hours } from '@/data/mockData'; import { cn } from '@/lib/utils'; import { Popover, @@ -7,13 +6,17 @@ import { PopoverTrigger, } from '@/components/ui/popover'; import { Button } from '@/components/ui/button'; -import { Check, X } from 'lucide-react'; +import { Check, X, Loader2 } from 'lucide-react'; + +const days = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri']; +const hours = [9, 10, 11, 12, 13, 14, 15, 16, 17]; interface AvailabilityHeatmapProps { slots: TimeSlot[]; selectedParticipants: Participant[]; onSlotSelect: (slot: TimeSlot) => void; showPartialAvailability?: boolean; + isLoading?: boolean; } export const AvailabilityHeatmap = ({ @@ -21,6 +24,7 @@ export const AvailabilityHeatmap = ({ selectedParticipants, onSlotSelect, showPartialAvailability = false, + isLoading = false, }: AvailabilityHeatmapProps) => { const getSlot = (day: string, hour: number) => { return slots.find((s) => s.day === day && s.hour === hour); @@ -59,6 +63,15 @@ export const AvailabilityHeatmap = ({ ); } + if (isLoading) { + return ( +
+ +

Loading availability...

+
+ ); + } + return (
@@ -72,7 +85,6 @@ export const AvailabilityHeatmap = ({
- {/* Header */}
{days.map((day) => ( @@ -85,7 +97,6 @@ export const AvailabilityHeatmap = ({ ))}
- {/* Grid */}
{hours.map((hour) => (
@@ -157,7 +168,6 @@ export const AvailabilityHeatmap = ({
- {/* Legend */}
diff --git a/frontend/src/pages/Index.tsx b/frontend/src/pages/Index.tsx index 59d1854..016ab87 100644 --- a/frontend/src/pages/Index.tsx +++ b/frontend/src/pages/Index.tsx @@ -1,10 +1,9 @@ -import { useState, useEffect, useMemo } from 'react'; +import { useState, useEffect } from 'react'; import { Header } from '@/components/Header'; import { ParticipantSelector } from '@/components/ParticipantSelector'; import { ParticipantManager } from '@/components/ParticipantManager'; import { AvailabilityHeatmap } from '@/components/AvailabilityHeatmap'; import { ScheduleModal } from '@/components/ScheduleModal'; -import { generateMockAvailability } from '@/data/mockData'; import { Participant, TimeSlot } from '@/types/calendar'; import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs'; import { Switch } from '@/components/ui/switch'; @@ -15,39 +14,48 @@ import { PopoverTrigger, } from '@/components/ui/popover'; import { Button } from '@/components/ui/button'; -import { Users, CalendarDays, Settings } from 'lucide-react'; +import { Users, CalendarDays, Settings, RefreshCw } from 'lucide-react'; +import { useToast } from '@/hooks/use-toast'; +import { + fetchParticipants, + createParticipant, + deleteParticipant, + fetchAvailability, + syncCalendars, + ParticipantAPI, +} from '@/api/client'; -const STORAGE_KEY = 'calendar-participants'; const SETTINGS_KEY = 'calendar-settings'; -interface Settings { +interface SettingsState { showPartialAvailability: boolean; } -const defaultSettings: Settings = { +const defaultSettings: SettingsState = { showPartialAvailability: false, }; +function apiToParticipant(p: ParticipantAPI): Participant { + return { + id: p.id, + name: p.name, + email: p.email, + icsLink: p.ics_url, + connected: true, + }; +} + const Index = () => { const [participants, setParticipants] = useState([]); const [selectedParticipants, setSelectedParticipants] = useState([]); + const [availabilitySlots, setAvailabilitySlots] = useState([]); const [selectedSlot, setSelectedSlot] = useState(null); const [isModalOpen, setIsModalOpen] = useState(false); - const [settings, setSettings] = useState(defaultSettings); + const [settings, setSettings] = useState(defaultSettings); + const [isLoading, setIsLoading] = useState(false); + const [isSyncing, setIsSyncing] = useState(false); + const { toast } = useToast(); - // Load participants from localStorage on mount - useEffect(() => { - const stored = localStorage.getItem(STORAGE_KEY); - if (stored) { - try { - setParticipants(JSON.parse(stored)); - } catch (e) { - console.error('Failed to parse stored participants'); - } - } - }, []); - - // Load settings from localStorage on mount useEffect(() => { const stored = localStorage.getItem(SETTINGS_KEY); if (stored) { @@ -59,37 +67,112 @@ const Index = () => { } }, []); - // Save participants to localStorage when changed - useEffect(() => { - localStorage.setItem(STORAGE_KEY, JSON.stringify(participants)); - }, [participants]); - - // Save settings to localStorage when changed useEffect(() => { localStorage.setItem(SETTINGS_KEY, JSON.stringify(settings)); }, [settings]); - const handleAddParticipant = (data: { name: string; email: string; icsLink: string }) => { - const newParticipant: Participant = { - id: crypto.randomUUID(), - name: data.name, - email: data.email, - icsLink: data.icsLink, - connected: true, - }; - setParticipants((prev) => [...prev, newParticipant]); - }; + useEffect(() => { + loadParticipants(); + }, []); - const handleRemoveParticipant = (id: string) => { - setParticipants((prev) => prev.filter((p) => p.id !== id)); - setSelectedParticipants((prev) => prev.filter((p) => p.id !== id)); - }; - - // Generate availability when participants change - const availabilitySlots = useMemo(() => { - return generateMockAvailability(selectedParticipants); + useEffect(() => { + if (selectedParticipants.length > 0) { + loadAvailability(); + } else { + setAvailabilitySlots([]); + } }, [selectedParticipants]); + const loadParticipants = async () => { + try { + const data = await fetchParticipants(); + setParticipants(data.map(apiToParticipant)); + } catch (error) { + toast({ + title: 'Error loading participants', + description: error instanceof Error ? error.message : 'Unknown error', + variant: 'destructive', + }); + } + }; + + const loadAvailability = async () => { + setIsLoading(true); + try { + const ids = selectedParticipants.map((p) => p.id); + const slots = await fetchAvailability(ids); + setAvailabilitySlots(slots); + } catch (error) { + toast({ + title: 'Error loading availability', + description: error instanceof Error ? error.message : 'Unknown error', + variant: 'destructive', + }); + } finally { + setIsLoading(false); + } + }; + + const handleAddParticipant = async (data: { name: string; email: string; icsLink: string }) => { + try { + const created = await createParticipant({ + name: data.name, + email: data.email, + ics_url: data.icsLink, + }); + setParticipants((prev) => [...prev, apiToParticipant(created)]); + toast({ + title: 'Participant added', + description: `${data.name} has been added and calendar synced`, + }); + } catch (error) { + toast({ + title: 'Error adding participant', + description: error instanceof Error ? error.message : 'Unknown error', + variant: 'destructive', + }); + } + }; + + const handleRemoveParticipant = async (id: string) => { + try { + await deleteParticipant(id); + setParticipants((prev) => prev.filter((p) => p.id !== id)); + setSelectedParticipants((prev) => prev.filter((p) => p.id !== id)); + toast({ + title: 'Participant removed', + }); + } catch (error) { + toast({ + title: 'Error removing participant', + description: error instanceof Error ? error.message : 'Unknown error', + variant: 'destructive', + }); + } + }; + + const handleSyncCalendars = async () => { + setIsSyncing(true); + try { + await syncCalendars(); + if (selectedParticipants.length > 0) { + await loadAvailability(); + } + toast({ + title: 'Calendars synced', + description: 'All calendars have been refreshed', + }); + } catch (error) { + toast({ + title: 'Error syncing calendars', + description: error instanceof Error ? error.message : 'Unknown error', + variant: 'destructive', + }); + } finally { + setIsSyncing(false); + } + }; + const handleSlotSelect = (slot: TimeSlot) => { setSelectedSlot(slot); setIsModalOpen(true); @@ -132,7 +215,15 @@ const Index = () => {
-
+
+