diff --git a/.env.prod.example b/.env.prod.example new file mode 100644 index 0000000..752b944 --- /dev/null +++ b/.env.prod.example @@ -0,0 +1,133 @@ +# TaskFlow Production Environment (Managed Services) +# Copy to .env.prod and fill in your values +# Usage: ./scripts/deploy-prod.sh + +# ============================================================================= +# NEON POSTGRESQL (Required) +# ============================================================================= +# Create 4 databases in Neon Console: https://console.neon.tech/ +# +# Steps: +# 1. Create a Neon project +# 2. Create 4 databases: sso_db, taskflow_db, chatkit_db, notification_db +# 3. Copy the connection strings below +# +# Architecture: +# - sso_db: Auth (users, sessions) - consumed by SSO Platform +# - taskflow_db: Core business (tasks, projects, workers) - consumed by API, MCP +# - chatkit_db: Chat (threads, messages) - consumed by API ChatKit routes +# - notification_db: Notifications - consumed by Notification Service + +# SSO Database (Better Auth) +NEON_SSO_DATABASE_URL=postgresql://neondb_owner:xxx@ep-xxx.us-east-1.aws.neon.tech/sso_db?sslmode=require + +# API Database (Tasks, Projects, Workers, Audit) +NEON_API_DATABASE_URL=postgresql://neondb_owner:xxx@ep-xxx.us-east-1.aws.neon.tech/taskflow_db?sslmode=require + +# ChatKit Database (Chat threads, messages) +NEON_CHATKIT_DATABASE_URL=postgresql://neondb_owner:xxx@ep-xxx.us-east-1.aws.neon.tech/chatkit_db?sslmode=require + +# Notification Database (Notification Service) +NEON_NOTIFICATION_DATABASE_URL=postgresql://neondb_owner:xxx@ep-xxx.us-east-1.aws.neon.tech/notification_db?sslmode=require + +# ============================================================================= +# UPSTASH REDIS (Required) +# ============================================================================= +# Create ONE Redis database: https://console.upstash.com/ +# You'll use it for both Dapr pub/sub AND SSO rate limiting +# +# From Upstash Console, copy these values: + +# --- For Dapr Pub/Sub (standard Redis protocol) --- +# Found under "Connect" > "Redis" tab +UPSTASH_REDIS_HOST=your-redis-xxx.upstash.io:6379 +UPSTASH_REDIS_PASSWORD=your-upstash-password + +# --- For SSO Rate Limiting (Upstash REST API) --- +# Found under "Connect" > "REST API" tab +REDIS_URL=https://your-redis-xxx.upstash.io +REDIS_TOKEN=AXxxxxxxxxxxxxxxxxxxxxxxxxxxxx + +# ============================================================================= +# APPLICATION SECRETS (Required) +# ============================================================================= + +# Better Auth secret for JWT signing (SSO uses this to sign JWTs) +# Generate with: openssl rand -hex 32 +BETTER_AUTH_SECRET=your-32-char-secret-here + +# ============================================================================= +# OPENAI (Required for AI features) +# ============================================================================= + +# OpenAI API key (for ChatKit AI responses) +# Get from: https://platform.openai.com/api-keys +OPENAI_API_KEY=sk-proj-xxx + +# ChatKit domain key (for frontend authentication) +# Use default for local dev, generate unique for production +NEXT_PUBLIC_CHATKIT_DOMAIN_KEY=domain_pk_local_dev + +# ============================================================================= +# SMTP EMAIL (Optional - for email verification) +# ============================================================================= +# Required if you want email verification in SSO +# Works with Gmail, SendGrid, Mailgun, etc. + +# Gmail SMTP (use App Password, not regular password) +# SMTP_HOST=smtp.gmail.com +# SMTP_PORT=587 +# SMTP_USER=your-email@gmail.com +# SMTP_PASS=your-app-password +# SMTP_SECURE=false +# EMAIL_FROM=no-reply@yourdomain.com + +# SendGrid SMTP +# SMTP_HOST=smtp.sendgrid.net +# SMTP_PORT=587 +# SMTP_USER=apikey +# SMTP_PASS=your-sendgrid-api-key +# SMTP_SECURE=false +# EMAIL_FROM=no-reply@yourdomain.com + +# Leave empty to disable email features +SMTP_PASS= + +# ============================================================================= +# PRODUCTION URLS (Optional - for custom domains) +# ============================================================================= +# Uncomment and set these if deploying to a real domain + +# BETTER_AUTH_URL=https://sso.yourdomain.com +# ALLOWED_ORIGINS=https://yourdomain.com,https://sso.yourdomain.com +# CORS_ORIGINS=https://yourdomain.com,https://sso.yourdomain.com +# NEXT_PUBLIC_API_URL=https://api.yourdomain.com +# NEXT_PUBLIC_SSO_URL=https://sso.yourdomain.com + +# ============================================================================= +# QUICK START +# ============================================================================= +# +# 1. Copy this file: +# cp .env.prod.example .env.prod +# +# 2. Create Neon databases: +# - Go to https://console.neon.tech/ +# - Create project, then 4 databases (sso_db, taskflow_db, chatkit_db, notification_db) +# - Copy connection strings +# +# 3. Create Upstash Redis: +# - Go to https://console.upstash.com/ +# - Create Redis database +# - Copy host and password +# +# 4. Generate secrets: +# openssl rand -hex 32 # Run twice for BETTER_AUTH_SECRET and JWT_SECRET +# +# 5. Get OpenAI key: +# - Go to https://platform.openai.com/api-keys +# +# 6. Deploy: +# ./scripts/deploy-prod.sh --port-forward +# +# ============================================================================= diff --git a/.gitignore b/.gitignore index 32a245f..bc4a720 100644 --- a/.gitignore +++ b/.gitignore @@ -15,6 +15,7 @@ .env.development.local .env.test.local .env.production.local +.env.prod npm-debug.log* yarn-debug.log* diff --git a/docs/dapr-setup.md b/docs/dapr-setup.md new file mode 100644 index 0000000..cdadc95 --- /dev/null +++ b/docs/dapr-setup.md @@ -0,0 +1,232 @@ +# Dapr Setup Guide for TaskFlow + +This guide covers setting up Dapr for TaskFlow on Minikube/Kubernetes. + +## Architecture Overview + +``` +┌─────────────────────────────────────────────────────────────────────────┐ +│ KUBERNETES CLUSTER │ +│ │ +│ ┌─────────────────────────────────────────────────────────────────┐ │ +│ │ DAPR CONTROL PLANE │ │ +│ │ (dapr-system namespace) │ │ +│ │ - dapr-operator - dapr-sentry │ │ +│ │ - dapr-placement - dapr-sidecar-injector │ │ +│ │ - dapr-scheduler (3x) │ │ +│ └─────────────────────────────────────────────────────────────────┘ │ +│ │ +│ ┌─────────────────────┐ ┌─────────────────────┐ │ +│ │ TaskFlow API Pod │ │ Notification Svc │ │ +│ │ ┌───────┐ ┌──────┐ │ │ ┌───────┐ ┌──────┐ │ │ +│ │ │ API │ │ Dapr │ │ │ │ Notif │ │ Dapr │ │ │ +│ │ │ │◀┼▶Sidecar│─────▶│ │◀┼▶Sidecar│ │ │ +│ │ │ :8000 │ │ :3500│ │ │ │ :8001 │ │ :3500│ │ │ +│ │ └───────┘ └──────┘ │ │ └───────┘ └──────┘ │ │ +│ └─────────────────────┘ └─────────────────────┘ │ +│ │ ▲ │ +│ │ Dapr Pub/Sub │ │ +│ └───────────┬───────────────┘ │ +│ ▼ │ +│ ┌─────────────────┐ │ +│ │ Redis │ (local) or Kafka (production) │ +│ └─────────────────┘ │ +└─────────────────────────────────────────────────────────────────────────┘ +``` + +## Prerequisites + +- Minikube installed and running +- kubectl configured +- Helm 3.x installed + +## Step 1: Start Minikube + +```bash +# Start minikube with enough resources +minikube start --cpus=4 --memory=8192 + +# Verify +kubectl cluster-info +``` + +## Step 2: Install Dapr on Kubernetes via Helm + +```bash +# Add Dapr Helm repo +helm repo add dapr https://dapr.github.io/helm-charts/ +helm repo update + +# Install Dapr control plane (v1.15 for stability) +helm upgrade --install dapr dapr/dapr \ + --version=1.15 \ + --namespace dapr-system \ + --create-namespace \ + --wait + +# Verify installation (all pods should be Running 1/1) +kubectl get pods -n dapr-system +``` + +Expected output: +``` +NAME READY STATUS RESTARTS AGE +dapr-operator-xxx 1/1 Running 0 1m +dapr-placement-server-0 1/1 Running 0 1m +dapr-scheduler-server-0 1/1 Running 0 1m +dapr-scheduler-server-1 1/1 Running 0 1m +dapr-scheduler-server-2 1/1 Running 0 1m +dapr-sentry-xxx 1/1 Running 0 1m +dapr-sidecar-injector-xxx 1/1 Running 0 1m +``` + +## Step 3: Install Dapr Dashboard (Optional) + +```bash +helm install dapr-dashboard dapr/dapr-dashboard --namespace dapr-system + +# Access dashboard +kubectl port-forward service/dapr-dashboard 8080:8080 -n dapr-system +# Open http://localhost:8080 +``` + +## Step 4: Deploy TaskFlow with Dapr + +```bash +cd helm/taskflow + +# Install TaskFlow (includes Redis, Dapr components) +helm upgrade --install taskflow . \ + --namespace taskflow \ + --create-namespace \ + --set dapr.enabled=true \ + --wait + +# Verify pods have 2/2 containers (app + Dapr sidecar) +kubectl get pods -n taskflow +``` + +Expected output: +``` +NAME READY STATUS RESTARTS AGE +taskflow-api-xxx 2/2 Running 0 1m +notification-service-xxx 2/2 Running 0 1m +redis-xxx 1/1 Running 0 1m +... +``` + +## Step 5: Verify Dapr Components + +```bash +# List Dapr components +kubectl get components -n taskflow + +# Should show: +# taskflow-pubsub pubsub.redis +# taskflow-scheduler scheduler.cron +``` + +## Step 6: Test Pub/Sub Flow + +```bash +# Port forward the API +kubectl port-forward svc/taskflow-api 8000:8000 -n taskflow & + +# Create a task with due_date (triggers reminder scheduling) +curl -X POST http://localhost:8000/api/projects/1/tasks \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer " \ + -d '{ + "title": "Test Dapr Integration", + "due_date": "2025-12-12T10:00:00Z", + "is_recurring": true, + "recurrence_pattern": "daily", + "recurrence_trigger": "on_due_date" + }' + +# Check Dapr sidecar logs for pub/sub activity +kubectl logs -l app.kubernetes.io/component=api -c daprd -n taskflow --tail=50 +``` + +## Step 7: Test Job Scheduling + +The API schedules Dapr Jobs for: +1. **Recurring task spawn** - at exact `due_date` +2. **Reminders** - 24 hours before `due_date` + +Check scheduled jobs: +```bash +# Check API logs for job scheduling +kubectl logs -l app.kubernetes.io/component=api -c api -n taskflow | grep DAPR-JOB +``` + +## Troubleshooting + +### Pods stuck at 1/2 Ready +Dapr sidecar not injecting. Check: +```bash +# Verify Dapr is installed +kubectl get pods -n dapr-system + +# Check sidecar injector logs +kubectl logs -l app=dapr-sidecar-injector -n dapr-system +``` + +### Pub/Sub not working +```bash +# Check component is loaded +kubectl get components -n taskflow + +# Check Dapr logs +kubectl logs -c daprd -n taskflow +``` + +### Jobs not firing +```bash +# Check scheduler pods +kubectl get pods -n dapr-system | grep scheduler + +# Check scheduler logs +kubectl logs -l app=dapr-scheduler-server -n dapr-system +``` + +## Production: Switch to Kafka + +For production (Redpanda Cloud/Kafka): + +```yaml +# values-production.yaml +dapr: + enabled: true + pubsub: + name: taskflow-pubsub + type: pubsub.kafka + brokers: "your-kafka-broker:9092" + consumerGroup: taskflow-group + authType: password + secretName: kafka-credentials +``` + +Then: +```bash +# Create Kafka credentials secret +kubectl create secret generic kafka-credentials \ + --from-literal=username=your-username \ + --from-literal=password=your-password \ + -n taskflow + +# Deploy with production values +helm upgrade --install taskflow . \ + -f values-production.yaml \ + --namespace taskflow +``` + +## Architecture Benefits + +| Without Dapr | With Dapr | +|-------------|-----------| +| `pip install kafka-python` | HTTP to sidecar | +| Manual connection management | Automatic retries | +| Cron polling every 60s | Exact-time Jobs API | +| Hardcoded broker config | YAML component swap | +| Tight service coupling | Event-driven decoupling | diff --git a/helm/taskflow/templates/api/deployment.yaml b/helm/taskflow/templates/api/deployment.yaml index 78798e0..7d60b7a 100644 --- a/helm/taskflow/templates/api/deployment.yaml +++ b/helm/taskflow/templates/api/deployment.yaml @@ -15,8 +15,18 @@ spec: metadata: labels: {{- include "taskflow.componentSelectorLabels" (dict "root" . "component" "api") | nindent 8 }} + {{- if .Values.dapr.enabled }} + annotations: + dapr.io/enabled: "true" + dapr.io/app-id: {{ .Values.api.daprAppId | quote }} + dapr.io/app-port: {{ .Values.api.service.targetPort | quote }} + dapr.io/enable-api-logging: "true" + dapr.io/log-level: "info" + {{- end }} spec: initContainers: + {{- if not .Values.managedServices.neon.enabled }} + # Wait for in-cluster PostgreSQL (skip when using Neon - always available) - name: wait-for-db image: busybox:1.36 command: @@ -28,6 +38,7 @@ spec: sleep 2 done echo "PostgreSQL is ready!" + {{- end }} - name: wait-for-sso image: busybox:1.36 command: @@ -51,11 +62,13 @@ spec: - configMapRef: name: {{ .Values.api.name }}-config env: + {{- if not .Values.managedServices.neon.enabled }} - name: DATABASE_PASSWORD valueFrom: secretKeyRef: name: {{ .Values.api.name }}-secret key: DATABASE_PASSWORD + {{- end }} - name: JWT_SECRET valueFrom: secretKeyRef: @@ -76,6 +89,12 @@ spec: secretKeyRef: name: {{ .Values.api.name }}-secret key: TASKFLOW_CHATKIT_DATABASE_URL + {{- if .Values.dapr.enabled }} + - name: DAPR_HTTP_ENDPOINT + value: "http://localhost:3500" + - name: DAPR_PUBSUB_NAME + value: {{ .Values.dapr.pubsub.name | quote }} + {{- end }} resources: {{- toYaml .Values.api.resources | nindent 10 }} livenessProbe: diff --git a/helm/taskflow/templates/api/postgres-service.yaml b/helm/taskflow/templates/api/postgres-service.yaml index 3b2c172..f58e17b 100644 --- a/helm/taskflow/templates/api/postgres-service.yaml +++ b/helm/taskflow/templates/api/postgres-service.yaml @@ -1,4 +1,4 @@ -{{- if and .Values.api.enabled .Values.api.postgresql.enabled }} +{{- if and .Values.api.enabled .Values.api.postgresql.enabled (not .Values.managedServices.neon.enabled) }} apiVersion: v1 kind: Service metadata: diff --git a/helm/taskflow/templates/api/postgres-statefulset.yaml b/helm/taskflow/templates/api/postgres-statefulset.yaml index fccb99f..a891ea1 100644 --- a/helm/taskflow/templates/api/postgres-statefulset.yaml +++ b/helm/taskflow/templates/api/postgres-statefulset.yaml @@ -1,4 +1,4 @@ -{{- if and .Values.api.enabled .Values.api.postgresql.enabled }} +{{- if and .Values.api.enabled .Values.api.postgresql.enabled (not .Values.managedServices.neon.enabled) }} apiVersion: apps/v1 kind: StatefulSet metadata: diff --git a/helm/taskflow/templates/configmap.yaml b/helm/taskflow/templates/configmap.yaml index 9f5c41b..b3906f0 100644 --- a/helm/taskflow/templates/configmap.yaml +++ b/helm/taskflow/templates/configmap.yaml @@ -85,4 +85,8 @@ data: # Server-side (Next.js API routes) - uses internal K8s service names SERVER_API_URL: "http://{{ .Values.api.name }}:{{ .Values.api.service.port }}" SERVER_SSO_URL: "http://{{ .Values.sso.name }}:{{ .Values.sso.service.port }}" + # Notification Service (separate microservice) + {{- if .Values.notificationService.enabled }} + SERVER_NOTIFICATION_URL: "http://{{ include "taskflow.fullname" . }}-notification:{{ .Values.notificationService.service.port }}" + {{- end }} {{- end }} diff --git a/helm/taskflow/templates/dapr/pubsub.yaml b/helm/taskflow/templates/dapr/pubsub.yaml new file mode 100644 index 0000000..24f8e3c --- /dev/null +++ b/helm/taskflow/templates/dapr/pubsub.yaml @@ -0,0 +1,52 @@ +{{- if .Values.dapr.enabled }} +# Dapr Pub/Sub Component - Kafka/Redis abstraction +# No Kafka client code needed in app - Dapr handles connection +# Supports: in-cluster Redis, Upstash Redis (managed), or Kafka +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: {{ .Values.dapr.pubsub.name }} + namespace: {{ .Release.Namespace }} +spec: + type: {{ .Values.dapr.pubsub.type }} + version: v1 + metadata: + {{- if eq .Values.dapr.pubsub.type "pubsub.kafka" }} + # Kafka configuration (production) + - name: brokers + value: {{ .Values.dapr.pubsub.brokers | quote }} + - name: consumerGroup + value: {{ .Values.dapr.pubsub.consumerGroup | default "taskflow-group" | quote }} + - name: authType + value: {{ .Values.dapr.pubsub.authType | default "none" | quote }} + {{- if .Values.dapr.pubsub.saslUsername }} + - name: saslUsername + secretKeyRef: + name: {{ .Values.dapr.pubsub.secretName | default "kafka-credentials" }} + key: username + - name: saslPassword + secretKeyRef: + name: {{ .Values.dapr.pubsub.secretName | default "kafka-credentials" }} + key: password + {{- end }} + {{- else if eq .Values.dapr.pubsub.type "pubsub.redis" }} + {{- if .Values.managedServices.upstash.enabled }} + # Upstash Redis configuration (managed - TLS enabled) + - name: redisHost + value: {{ .Values.managedServices.upstash.host | quote }} + - name: redisPassword + value: {{ .Values.managedServices.upstash.password | quote }} + - name: enableTLS + value: "true" + {{- else }} + # In-cluster Redis configuration (local development) + - name: redisHost + value: {{ .Values.dapr.pubsub.redisHost | default "redis:6379" | quote }} + - name: redisPassword + value: {{ .Values.dapr.pubsub.redisPassword | default "" | quote }} + {{- end }} + {{- end }} +scopes: + - {{ .Values.api.daprAppId }} + - {{ .Values.notificationService.daprAppId }} +{{- end }} diff --git a/helm/taskflow/templates/dapr/scheduler.yaml b/helm/taskflow/templates/dapr/scheduler.yaml new file mode 100644 index 0000000..c2e9cb9 --- /dev/null +++ b/helm/taskflow/templates/dapr/scheduler.yaml @@ -0,0 +1,10 @@ +{{- /* +Dapr Jobs API is built into Dapr 1.15+ - no separate scheduler component needed. +The Jobs API uses the Dapr Scheduler service which is deployed automatically +with the Dapr control plane. + +To schedule jobs, the API service uses HTTP POST to: + http://localhost:3500/v1.0-alpha1/jobs/{job-name} + +This file is intentionally empty. +*/ -}} diff --git a/helm/taskflow/templates/mcp-server/deployment.yaml b/helm/taskflow/templates/mcp-server/deployment.yaml index 63017c9..276aa61 100644 --- a/helm/taskflow/templates/mcp-server/deployment.yaml +++ b/helm/taskflow/templates/mcp-server/deployment.yaml @@ -17,6 +17,8 @@ spec: {{- include "taskflow.componentSelectorLabels" (dict "root" . "component" "mcp") | nindent 8 }} spec: initContainers: + {{- if not .Values.managedServices.neon.enabled }} + # Wait for in-cluster PostgreSQL (skip when using Neon - always available) - name: wait-for-db image: busybox:1.36 command: @@ -28,6 +30,7 @@ spec: sleep 2 done echo "PostgreSQL is ready!" + {{- end }} - name: wait-for-api image: busybox:1.36 command: diff --git a/helm/taskflow/templates/notification-service/deployment.yaml b/helm/taskflow/templates/notification-service/deployment.yaml new file mode 100644 index 0000000..53001ce --- /dev/null +++ b/helm/taskflow/templates/notification-service/deployment.yaml @@ -0,0 +1,95 @@ +{{- if .Values.notificationService.enabled }} +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "taskflow.fullname" . }}-notification + labels: + {{- include "taskflow.labels" . | nindent 4 }} + app.kubernetes.io/component: notification-service +spec: + replicas: {{ .Values.notificationService.replicas | default 1 }} + selector: + matchLabels: + {{- include "taskflow.selectorLabels" . | nindent 6 }} + app.kubernetes.io/component: notification-service + template: + metadata: + annotations: + {{- if .Values.dapr.enabled }} + dapr.io/enabled: "true" + dapr.io/app-id: {{ .Values.notificationService.daprAppId | quote }} + dapr.io/app-port: {{ .Values.notificationService.port | quote }} + dapr.io/enable-api-logging: "true" + {{- end }} + labels: + {{- include "taskflow.selectorLabels" . | nindent 8 }} + app.kubernetes.io/component: notification-service + spec: + initContainers: + {{- if not .Values.managedServices.neon.enabled }} + # Wait for in-cluster PostgreSQL (skip when using Neon - always available) + - name: wait-for-db + image: busybox:1.36 + command: + - sh + - -c + - | + until nc -z {{ .Values.notificationService.database.host }} {{ .Values.notificationService.database.port }}; do + echo "Waiting for PostgreSQL at {{ .Values.notificationService.database.host }}:{{ .Values.notificationService.database.port }}..." + sleep 2 + done + echo "PostgreSQL is ready!" + {{- end }} + - name: wait-for-sso + image: busybox:1.36 + command: + - sh + - -c + - | + until nc -z {{ .Values.sso.name }} {{ .Values.sso.service.port }}; do + echo "Waiting for SSO at {{ .Values.sso.name }}:{{ .Values.sso.service.port }}..." + sleep 2 + done + echo "SSO is ready!" + containers: + - name: notification-service + image: "{{ .Values.notificationService.image.repository }}:{{ .Values.notificationService.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.notificationService.image.pullPolicy | default "IfNotPresent" }} + ports: + - name: http + containerPort: {{ .Values.notificationService.port }} + protocol: TCP + env: + - name: DATABASE_URL + valueFrom: + secretKeyRef: + name: {{ include "taskflow.fullname" . }}-notification-secret + key: notification-database-url + # SSO URL for JWT verification (uses internal K8s service name) + - name: SSO_URL + value: "http://{{ .Values.sso.name }}:{{ .Values.sso.service.port }}" + - name: DAPR_HTTP_ENDPOINT + value: "http://localhost:3500" + - name: DAPR_PUBSUB_NAME + value: {{ .Values.dapr.pubsub.name | quote }} + - name: DAPR_APP_ID + value: {{ .Values.notificationService.daprAppId | quote }} + - name: LOG_LEVEL + value: {{ .Values.notificationService.logLevel | default "INFO" | quote }} + - name: ALLOWED_ORIGINS + value: {{ .Values.notificationService.allowedOrigins | default "*" | quote }} + livenessProbe: + httpGet: + path: /health + port: http + initialDelaySeconds: 10 + periodSeconds: 30 + readinessProbe: + httpGet: + path: /health + port: http + initialDelaySeconds: 5 + periodSeconds: 10 + resources: + {{- toYaml .Values.notificationService.resources | nindent 12 }} +{{- end }} diff --git a/helm/taskflow/templates/notification-service/postgresql.yaml b/helm/taskflow/templates/notification-service/postgresql.yaml new file mode 100644 index 0000000..4c312ac --- /dev/null +++ b/helm/taskflow/templates/notification-service/postgresql.yaml @@ -0,0 +1,96 @@ +{{- if and .Values.notificationService.enabled (not .Values.managedServices.neon.enabled) }} +# PostgreSQL for Notification Service (separate DB - microservice pattern) +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .Values.notificationService.database.host }} + namespace: {{ .Release.Namespace }} + labels: + {{- include "taskflow.labels" . | nindent 4 }} + app.kubernetes.io/component: notification-db +spec: + replicas: 1 + selector: + matchLabels: + {{- include "taskflow.selectorLabels" . | nindent 6 }} + app.kubernetes.io/component: notification-db + template: + metadata: + labels: + {{- include "taskflow.selectorLabels" . | nindent 8 }} + app.kubernetes.io/component: notification-db + spec: + containers: + - name: postgres + image: postgres:16-alpine + ports: + - containerPort: 5432 + name: postgres + env: + - name: POSTGRES_DB + value: {{ .Values.notificationService.database.name | quote }} + - name: POSTGRES_USER + value: {{ .Values.notificationService.database.user | quote }} + - name: POSTGRES_PASSWORD + valueFrom: + secretKeyRef: + name: {{ include "taskflow.fullname" . }}-notification-secret + key: POSTGRES_PASSWORD + - name: PGDATA + value: /var/lib/postgresql/data/pgdata + # Allow password auth without SSL (required for asyncpg which doesn't support sslmode URL param) + - name: POSTGRES_HOST_AUTH_METHOD + value: scram-sha-256 + volumeMounts: + - name: data + mountPath: /var/lib/postgresql/data + resources: + requests: + memory: "128Mi" + cpu: "50m" + limits: + memory: "256Mi" + cpu: "200m" + livenessProbe: + exec: + command: + - /bin/sh + - -c + - pg_isready -U {{ .Values.notificationService.database.user }} + initialDelaySeconds: 30 + periodSeconds: 10 + timeoutSeconds: 5 + failureThreshold: 3 + readinessProbe: + exec: + command: + - /bin/sh + - -c + - pg_isready -U {{ .Values.notificationService.database.user }} + initialDelaySeconds: 5 + periodSeconds: 5 + timeoutSeconds: 3 + failureThreshold: 3 + volumes: + - name: data + emptyDir: {} # For demo; use PVC in production +--- +apiVersion: v1 +kind: Service +metadata: + name: {{ .Values.notificationService.database.host }} + namespace: {{ .Release.Namespace }} + labels: + {{- include "taskflow.labels" . | nindent 4 }} + app.kubernetes.io/component: notification-db +spec: + type: ClusterIP + ports: + - port: {{ .Values.notificationService.database.port }} + targetPort: postgres + protocol: TCP + name: postgres + selector: + {{- include "taskflow.selectorLabels" . | nindent 4 }} + app.kubernetes.io/component: notification-db +{{- end }} diff --git a/helm/taskflow/templates/notification-service/service.yaml b/helm/taskflow/templates/notification-service/service.yaml new file mode 100644 index 0000000..99129df --- /dev/null +++ b/helm/taskflow/templates/notification-service/service.yaml @@ -0,0 +1,19 @@ +{{- if .Values.notificationService.enabled }} +apiVersion: v1 +kind: Service +metadata: + name: {{ include "taskflow.fullname" . }}-notification + labels: + {{- include "taskflow.labels" . | nindent 4 }} + app.kubernetes.io/component: notification-service +spec: + type: {{ .Values.notificationService.service.type | default "ClusterIP" }} + ports: + - port: {{ .Values.notificationService.service.port | default 8001 }} + targetPort: http + protocol: TCP + name: http + selector: + {{- include "taskflow.selectorLabels" . | nindent 4 }} + app.kubernetes.io/component: notification-service +{{- end }} diff --git a/helm/taskflow/templates/redis/deployment.yaml b/helm/taskflow/templates/redis/deployment.yaml new file mode 100644 index 0000000..144e8c4 --- /dev/null +++ b/helm/taskflow/templates/redis/deployment.yaml @@ -0,0 +1,56 @@ +{{- if and .Values.dapr.enabled (eq .Values.dapr.pubsub.type "pubsub.redis") (not .Values.managedServices.upstash.enabled) }} +# Redis for Dapr Pub/Sub (local development) +# In production, use Kafka via Redpanda Cloud +apiVersion: apps/v1 +kind: Deployment +metadata: + name: redis + namespace: {{ .Release.Namespace }} + labels: + {{- include "taskflow.labels" . | nindent 4 }} + app.kubernetes.io/component: redis +spec: + replicas: 1 + selector: + matchLabels: + {{- include "taskflow.selectorLabels" . | nindent 6 }} + app.kubernetes.io/component: redis + template: + metadata: + labels: + {{- include "taskflow.selectorLabels" . | nindent 8 }} + app.kubernetes.io/component: redis + spec: + containers: + - name: redis + image: redis:7-alpine + ports: + - containerPort: 6379 + name: redis + resources: + requests: + memory: "64Mi" + cpu: "50m" + limits: + memory: "128Mi" + cpu: "100m" +--- +apiVersion: v1 +kind: Service +metadata: + name: redis + namespace: {{ .Release.Namespace }} + labels: + {{- include "taskflow.labels" . | nindent 4 }} + app.kubernetes.io/component: redis +spec: + type: ClusterIP + ports: + - port: 6379 + targetPort: redis + protocol: TCP + name: redis + selector: + {{- include "taskflow.selectorLabels" . | nindent 4 }} + app.kubernetes.io/component: redis +{{- end }} diff --git a/helm/taskflow/templates/secrets.yaml b/helm/taskflow/templates/secrets.yaml index b870ed1..91504e2 100644 --- a/helm/taskflow/templates/secrets.yaml +++ b/helm/taskflow/templates/secrets.yaml @@ -1,6 +1,8 @@ --- # SSO PostgreSQL Secret -{{- if and .Values.sso.enabled .Values.sso.postgresql.enabled }} +# When using Neon: uses managed connection string +# When using in-cluster: builds URL from components +{{- if .Values.sso.enabled }} apiVersion: v1 kind: Secret metadata: @@ -10,9 +12,14 @@ metadata: {{- include "taskflow.componentLabels" (dict "root" . "component" "sso-db") | nindent 4 }} type: Opaque stringData: - # Password from values.yaml: sso.postgresql.password + {{- if .Values.managedServices.neon.enabled }} + # Neon managed PostgreSQL + DATABASE_URL: {{ .Values.managedServices.neon.ssoDatabase | quote }} + {{- else }} + # In-cluster PostgreSQL POSTGRES_PASSWORD: {{ .Values.sso.postgresql.password | quote }} DATABASE_URL: "postgresql://{{ .Values.sso.database.user }}:{{ .Values.sso.postgresql.password }}@{{ .Values.sso.database.host }}:{{ .Values.sso.database.port }}/{{ .Values.sso.database.name }}?sslmode=disable" + {{- end }} {{- end }} --- @@ -33,11 +40,18 @@ stringData: {{- if .Values.sso.smtp.enabled }} SMTP_PASS: {{ .Values.sso.smtp.password | quote }} {{- end }} + {{- if .Values.managedServices.upstash.enabled }} + # Upstash Redis REST API for rate limiting + REDIS_URL: {{ .Values.managedServices.upstash.restUrl | quote }} + REDIS_TOKEN: {{ .Values.managedServices.upstash.restToken | quote }} + {{- end }} {{- end }} --- # API PostgreSQL Secret -{{- if and .Values.api.enabled .Values.api.postgresql.enabled }} +# When using Neon: uses managed connection string +# When using in-cluster: builds URL from components +{{- if .Values.api.enabled }} apiVersion: v1 kind: Secret metadata: @@ -47,9 +61,14 @@ metadata: {{- include "taskflow.componentLabels" (dict "root" . "component" "api-db") | nindent 4 }} type: Opaque stringData: - # Password from values.yaml: api.postgresql.password + {{- if .Values.managedServices.neon.enabled }} + # Neon managed PostgreSQL + DATABASE_URL: {{ .Values.managedServices.neon.apiDatabase | quote }} + {{- else }} + # In-cluster PostgreSQL POSTGRES_PASSWORD: {{ .Values.api.postgresql.password | quote }} DATABASE_URL: "postgresql://{{ .Values.api.database.user }}:{{ .Values.api.postgresql.password }}@{{ .Values.api.database.host }}:{{ .Values.api.database.port }}/{{ .Values.api.database.name }}?sslmode=disable" + {{- end }} {{- end }} --- @@ -64,12 +83,19 @@ metadata: {{- include "taskflow.componentLabels" (dict "root" . "component" "api") | nindent 4 }} type: Opaque stringData: - # Password from values.yaml: api.postgresql.password + {{- if not .Values.managedServices.neon.enabled }} + # Password from values.yaml: api.postgresql.password (only needed for in-cluster) DATABASE_PASSWORD: {{ .Values.api.postgresql.password | quote }} + {{- end }} JWT_SECRET: {{ .Values.api.jwtSecret | default "changeme-jwt-secret" | quote }} OPENAI_API_KEY: {{ .Values.api.openai.apiKey | default "" | quote }} + {{- if .Values.managedServices.neon.enabled }} + # Neon managed PostgreSQL for ChatKit (separate DB for clean microservice boundaries) + TASKFLOW_CHATKIT_DATABASE_URL: {{ .Values.managedServices.neon.chatkitDatabase | quote }} + {{- else }} # ChatKit uses same DB as API - URL built from api.postgresql.password (SINGLE SOURCE) TASKFLOW_CHATKIT_DATABASE_URL: "postgresql://{{ .Values.api.database.user }}:{{ .Values.api.postgresql.password }}@{{ .Values.api.database.host }}:{{ .Values.api.database.port }}/{{ .Values.api.database.name }}?sslmode=disable" + {{- end }} {{- end }} --- @@ -86,3 +112,26 @@ type: Opaque stringData: MCP_API_KEY: {{ .Values.mcpServer.mcpApiKey | default "changeme-mcp-api-key" | quote }} {{- end }} + +--- +# Notification Service Secret (separate microservice with own DB) +{{- if .Values.notificationService.enabled }} +apiVersion: v1 +kind: Secret +metadata: + name: {{ include "taskflow.fullname" . }}-notification-secret + namespace: {{ .Release.Namespace }} + labels: + {{- include "taskflow.labels" . | nindent 4 }} + app.kubernetes.io/component: notification-service +type: Opaque +stringData: + {{- if .Values.managedServices.neon.enabled }} + # Neon managed PostgreSQL + notification-database-url: {{ .Values.managedServices.neon.notificationDatabase | quote }} + {{- else }} + # In-cluster PostgreSQL + POSTGRES_PASSWORD: {{ .Values.notificationService.database.password | quote }} + notification-database-url: "postgresql://{{ .Values.notificationService.database.user }}:{{ .Values.notificationService.database.password }}@{{ .Values.notificationService.database.host }}:{{ .Values.notificationService.database.port }}/{{ .Values.notificationService.database.name }}" + {{- end }} +{{- end }} diff --git a/helm/taskflow/templates/sso-platform/deployment.yaml b/helm/taskflow/templates/sso-platform/deployment.yaml index a714c8f..256556b 100644 --- a/helm/taskflow/templates/sso-platform/deployment.yaml +++ b/helm/taskflow/templates/sso-platform/deployment.yaml @@ -16,7 +16,10 @@ spec: labels: {{- include "taskflow.componentSelectorLabels" (dict "root" . "component" "sso") | nindent 8 }} spec: + {{- if or (not .Values.managedServices.neon.enabled) .Values.sso.migrations.enabled }} initContainers: + {{- if not .Values.managedServices.neon.enabled }} + # Wait for in-cluster PostgreSQL (skip when using Neon - always available) - name: wait-for-db image: busybox:1.36 command: @@ -28,6 +31,8 @@ spec: sleep 2 done echo "PostgreSQL is ready!" + {{- end }} + {{- if .Values.sso.migrations.enabled }} - name: run-migrations image: "{{ .Values.sso.migrationsImage.repository }}:{{ .Values.sso.migrationsImage.tag }}" imagePullPolicy: {{ .Values.sso.migrationsImage.pullPolicy }} @@ -69,6 +74,8 @@ spec: secretKeyRef: name: {{ .Values.sso.postgresql.name }}-secret key: DATABASE_URL + {{- end }} + {{- end }} containers: - name: sso-platform image: "{{ .Values.sso.image.repository }}:{{ .Values.sso.image.tag }}" @@ -103,6 +110,18 @@ spec: name: {{ .Values.sso.name }}-secret key: SMTP_PASS {{- end }} + {{- if .Values.managedServices.upstash.enabled }} + - name: REDIS_URL + valueFrom: + secretKeyRef: + name: {{ .Values.sso.name }}-secret + key: REDIS_URL + - name: REDIS_TOKEN + valueFrom: + secretKeyRef: + name: {{ .Values.sso.name }}-secret + key: REDIS_TOKEN + {{- end }} resources: {{- toYaml .Values.sso.resources | nindent 10 }} livenessProbe: diff --git a/helm/taskflow/templates/sso-platform/postgres-service.yaml b/helm/taskflow/templates/sso-platform/postgres-service.yaml index bbb1270..efedbb1 100644 --- a/helm/taskflow/templates/sso-platform/postgres-service.yaml +++ b/helm/taskflow/templates/sso-platform/postgres-service.yaml @@ -1,4 +1,4 @@ -{{- if and .Values.sso.enabled .Values.sso.postgresql.enabled }} +{{- if and .Values.sso.enabled .Values.sso.postgresql.enabled (not .Values.managedServices.neon.enabled) }} apiVersion: v1 kind: Service metadata: diff --git a/helm/taskflow/templates/sso-platform/postgres-statefulset.yaml b/helm/taskflow/templates/sso-platform/postgres-statefulset.yaml index d23cfa4..0582605 100644 --- a/helm/taskflow/templates/sso-platform/postgres-statefulset.yaml +++ b/helm/taskflow/templates/sso-platform/postgres-statefulset.yaml @@ -1,4 +1,4 @@ -{{- if and .Values.sso.enabled .Values.sso.postgresql.enabled }} +{{- if and .Values.sso.enabled .Values.sso.postgresql.enabled (not .Values.managedServices.neon.enabled) }} apiVersion: apps/v1 kind: StatefulSet metadata: diff --git a/helm/taskflow/values-cloud.yaml b/helm/taskflow/values-cloud.yaml new file mode 100644 index 0000000..a71fc64 --- /dev/null +++ b/helm/taskflow/values-cloud.yaml @@ -0,0 +1,262 @@ +# TaskFlow Platform - Cloud Production Values +# Used by GitHub Actions CD pipeline for cloud deployments +# +# Secrets are injected via --set flags from GitHub Secrets +# This file contains non-sensitive cloud-specific configuration + +global: + # Domain is set via vars.DOMAIN in GitHub + domain: "" + namespace: taskflow + imagePullPolicy: Always + # Image registry and tag set via --set in workflow + # imageRegistry: ghcr.io/owner/taskflow + # imageTag: abc123 + +# ============================================================================= +# MANAGED SERVICES (Neon + Upstash) +# Values injected from GitHub Secrets +# ============================================================================= +managedServices: + neon: + enabled: true + # Connection strings injected via --set from secrets + upstash: + enabled: true + # Credentials injected via --set from secrets + +# Disable in-cluster databases (using Neon) +pgadmin: + enabled: false + +# ============================================================================= +# SSO Platform +# ============================================================================= +sso: + enabled: true + name: sso-platform + replicaCount: 1 + + image: + # Repository will be: ${global.imageRegistry}/sso-platform + repository: sso-platform + pullPolicy: Always + + # Disable migrations - run locally for Neon + migrations: + enabled: false + + service: + type: ClusterIP + port: 3001 + targetPort: 3001 + + ingress: + enabled: true + className: nginx + host: "" # Will be sso.${domain} + tls: + enabled: true + secretName: sso-tls + + # Disable in-cluster postgres (using Neon) + postgresql: + enabled: false + + env: + NODE_ENV: production + # BETTER_AUTH_URL will be https://sso.${domain} + # ALLOWED_ORIGINS will be https://${domain},https://sso.${domain} + + smtp: + enabled: true + host: smtp.gmail.com + port: "587" + user: mr.junaid.ca@gmail.com + secure: "false" + emailFrom: no-reply@taskflow.org + + resources: + requests: + memory: "256Mi" + cpu: "100m" + limits: + memory: "512Mi" + cpu: "500m" + +# ============================================================================= +# API Service +# ============================================================================= +api: + enabled: true + name: taskflow-api + daprAppId: taskflow-api + replicaCount: 1 + + image: + repository: api + pullPolicy: Always + + service: + type: ClusterIP + port: 8000 + targetPort: 8000 + + ingress: + enabled: true + className: nginx + host: "" # Will be api.${domain} + tls: + enabled: true + secretName: api-tls + + # Disable in-cluster postgres (using Neon) + postgresql: + enabled: false + + env: + ENV: production + SSO_URL: http://sso-platform:3001 + # CORS_ORIGINS set via template + + resources: + requests: + memory: "256Mi" + cpu: "100m" + limits: + memory: "512Mi" + cpu: "500m" + +# ============================================================================= +# MCP Server +# ============================================================================= +mcpServer: + enabled: true + name: mcp-server + replicaCount: 1 + + image: + repository: mcp-server + pullPolicy: Always + + service: + type: ClusterIP + port: 8001 + targetPort: 8001 + + env: + ENV: production + SSO_URL: http://sso-platform:3001 + TASKFLOW_API_URL: http://taskflow-api:8000 + + resources: + requests: + memory: "256Mi" + cpu: "100m" + limits: + memory: "512Mi" + cpu: "500m" + +# ============================================================================= +# Web Dashboard +# ============================================================================= +web: + enabled: true + name: web-dashboard + replicaCount: 1 + + image: + repository: web-dashboard + pullPolicy: Always + + service: + type: ClusterIP + port: 3000 + targetPort: 3000 + + ingress: + enabled: true + className: nginx + host: "" # Will be ${domain} + tls: + enabled: true + secretName: web-tls + + env: + NODE_ENV: production + # URLs set at build time or via template + # NEXT_PUBLIC_API_URL: https://api.${domain} + # NEXT_PUBLIC_SSO_URL: https://sso.${domain} + + resources: + requests: + memory: "256Mi" + cpu: "100m" + limits: + memory: "512Mi" + cpu: "500m" + +# ============================================================================= +# Notification Service +# ============================================================================= +notificationService: + enabled: true + name: taskflow-notification + daprAppId: notification-service + replicas: 1 + port: 8001 + logLevel: INFO + + image: + repository: notification-service + pullPolicy: Always + + service: + type: ClusterIP + port: 8001 + + # Disable in-cluster postgres (using Neon) + postgresql: + enabled: false + + resources: + requests: + memory: "128Mi" + cpu: "50m" + limits: + memory: "256Mi" + cpu: "250m" + +# ============================================================================= +# Dapr Configuration (using Upstash Redis) +# ============================================================================= +dapr: + enabled: true + + pubsub: + name: taskflow-pubsub + type: pubsub.redis + # redisHost and redisPassword injected from Upstash secrets + enableTLS: "true" # Upstash requires TLS + + scheduler: + enabled: true + name: taskflow-scheduler + +# ============================================================================= +# Ingress Controller +# ============================================================================= +ingress-nginx: + enabled: true + controller: + service: + type: LoadBalancer # Cloud provider will assign external IP + config: + proxy-body-size: "50m" + ssl-protocols: "TLSv1.2 TLSv1.3" + +# ============================================================================= +# Cert-Manager (for automatic TLS certificates) +# ============================================================================= +# Note: Cert-manager should be installed separately in the cluster +# kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.13.0/cert-manager.yaml diff --git a/helm/taskflow/values-prod.yaml b/helm/taskflow/values-prod.yaml new file mode 100644 index 0000000..a333356 --- /dev/null +++ b/helm/taskflow/values-prod.yaml @@ -0,0 +1,118 @@ +# TaskFlow Platform - Production Values (Managed Services) +# Uses Neon PostgreSQL and Upstash Redis for reliability +# +# Usage: +# helm upgrade --install taskflow ./helm/taskflow -f helm/taskflow/values-prod.yaml +# +# Or merge with base values: +# helm upgrade --install taskflow ./helm/taskflow -f helm/taskflow/values.yaml -f helm/taskflow/values-prod.yaml + +# ============================================================================= +# MANAGED SERVICES - Enable for Production +# ============================================================================= +managedServices: + neon: + enabled: true + # Set these via environment variables or --set for security: + # --set managedServices.neon.ssoDatabase="postgresql://..." + # + # Create 3 databases in Neon: + # 1. sso_db - Better Auth SSO data + # 2. taskflow_db - API + ChatKit data + # 3. notification_db - Notification service data + # + # Connection string format: postgresql://user:pass@ep-xxx.neon.tech/db_name?sslmode=require + ssoDatabase: "" + apiDatabase: "" + notificationDatabase: "" + + upstash: + enabled: true + # Set these via environment variables or --set for security: + # --set managedServices.upstash.host="xxx.upstash.io:6379" + # --set managedServices.upstash.password="your-password" + # + # Get from Upstash Console: https://console.upstash.com/ + host: "" + password: "" + +# ============================================================================= +# DISABLE IN-CLUSTER RESOURCES (they'll be skipped when managed services enabled) +# ============================================================================= + +# pgAdmin not needed with Neon (use Neon Console instead) +pgadmin: + enabled: false + +# In-cluster PostgreSQL disabled (using Neon) +sso: + postgresql: + enabled: false + +api: + postgresql: + enabled: false + +# ============================================================================= +# PRODUCTION SETTINGS +# ============================================================================= + +# SSO Platform +sso: + env: + NODE_ENV: production + # Update these for your production domain + BETTER_AUTH_URL: https://sso.yourdomain.com + ALLOWED_ORIGINS: "https://yourdomain.com,https://sso.yourdomain.com" + +# API +api: + env: + ENV: production + CORS_ORIGINS: "https://yourdomain.com,https://sso.yourdomain.com" + +# Web Dashboard +web: + env: + NODE_ENV: production + NEXT_PUBLIC_API_URL: https://api.yourdomain.com + NEXT_PUBLIC_SSO_URL: https://sso.yourdomain.com + +# Notification Service +notificationService: + allowedOrigins: "https://yourdomain.com,https://sso.yourdomain.com" + +# ============================================================================= +# RESOURCE SCALING (adjust based on load) +# ============================================================================= + +# Increase replicas for high availability +# sso: +# replicaCount: 2 +# api: +# replicaCount: 2 +# web: +# replicaCount: 2 + +# ============================================================================= +# SECRETS - Set via CLI, not in file +# ============================================================================= +# +# Required secrets (use --set or environment variables): +# +# Database connections (Neon): +# --set managedServices.neon.ssoDatabase="postgresql://..." +# --set managedServices.neon.apiDatabase="postgresql://..." +# --set managedServices.neon.notificationDatabase="postgresql://..." +# +# Redis (Upstash): +# --set managedServices.upstash.host="xxx.upstash.io:6379" +# --set managedServices.upstash.password="xxx" +# +# Application secrets: +# --set sso.env.BETTER_AUTH_SECRET="generate-32-char-secret" +# --set api.jwtSecret="generate-32-char-secret" +# --set api.openai.apiKey="sk-..." +# +# SMTP (if using email): +# --set sso.smtp.password="your-smtp-password" diff --git a/helm/taskflow/values.yaml b/helm/taskflow/values.yaml index d3c6eb0..e0bbd9b 100644 --- a/helm/taskflow/values.yaml +++ b/helm/taskflow/values.yaml @@ -6,6 +6,36 @@ global: namespace: taskflow imagePullPolicy: IfNotPresent +# ============================================================================= +# MANAGED SERVICES CONFIGURATION +# Set to true to use external managed services (Neon, Upstash) instead of +# in-cluster postgres/redis. This makes the cluster lightweight and reliable. +# ============================================================================= +managedServices: + # Use Neon PostgreSQL instead of in-cluster postgres + neon: + enabled: false # Set to true to use Neon + # Connection strings - set these in values-prod.yaml or via --set + ssoDatabase: "" # postgresql://user:pass@ep-xxx.neon.tech/sso_db?sslmode=require + apiDatabase: "" # postgresql://user:pass@ep-xxx.neon.tech/taskflow_db?sslmode=require + chatkitDatabase: "" # postgresql://user:pass@ep-xxx.neon.tech/chatkit_db?sslmode=require + notificationDatabase: "" # postgresql://user:pass@ep-xxx.neon.tech/notification_db?sslmode=require + + # Use Upstash Redis instead of in-cluster redis + upstash: + enabled: false # Set to true to use Upstash + # Upstash Redis connection - set in values-prod.yaml or via --set + # For Dapr pub/sub (standard Redis protocol) + host: "" # xxx.upstash.io:6379 + password: "" # Your Upstash password + # For SSO rate limiting (Upstash REST API) + restUrl: "" # https://xxx.upstash.io + restToken: "" # AXxxxx... + +# pgAdmin (disable in production) +pgadmin: + enabled: true # Set to false when using Neon (use Neon console instead) + # SSO Platform (Better Auth + PostgreSQL) sso: enabled: true @@ -18,6 +48,9 @@ sso: pullPolicy: IfNotPresent # Separate migrations image (builder stage with full dependencies) + # Set migrations.enabled=false when using Neon (run migrations locally instead) + migrations: + enabled: true # Set to false for Neon - run migrations locally migrationsImage: repository: taskflow/sso-platform-migrations tag: latest @@ -100,6 +133,7 @@ sso: api: enabled: true name: taskflow-api + daprAppId: taskflow-api replicaCount: 1 image: @@ -247,6 +281,66 @@ web: memory: "512Mi" cpu: "500m" +# Notification Service (Dapr pub/sub consumer) +notificationService: + enabled: true + name: notification-service + daprAppId: notification-service + replicas: 1 + port: 8001 + logLevel: INFO + allowedOrigins: "http://localhost:3000,http://localhost:3001,http://taskflow.local" + + image: + repository: taskflow/notification-service + tag: latest + pullPolicy: IfNotPresent + + service: + type: ClusterIP + port: 8001 + + # Separate PostgreSQL for notification service (microservice pattern) + database: + host: notification-postgres + port: 5432 + name: notification_db + user: notification_user + password: "changeme-notification-db" + + resources: + requests: + memory: "128Mi" + cpu: "50m" + limits: + memory: "256Mi" + cpu: "250m" + +# Dapr Configuration +dapr: + enabled: true + + # Pub/Sub Component (Kafka in production, Redis locally) + pubsub: + name: taskflow-pubsub + # For local dev: pubsub.redis + # For production: pubsub.kafka + type: pubsub.redis + # Redis config (local) + redisHost: redis:6379 + redisPassword: "" + # Kafka config (production) - uncomment and set for cloud + # type: pubsub.kafka + # brokers: "kafka.example.com:9092" + # consumerGroup: taskflow-group + # authType: password + # secretName: kafka-credentials + + # Scheduler Component (for Jobs API) + scheduler: + enabled: true + name: taskflow-scheduler + # Ingress Controller (NGINX) ingress-nginx: enabled: true diff --git a/history/prompts/notifications-dapr-triggers/001-agent-2b-initial-assignment.misc.prompt.md b/history/prompts/notifications-dapr-triggers/001-agent-2b-initial-assignment.misc.prompt.md new file mode 100644 index 0000000..71ae136 --- /dev/null +++ b/history/prompts/notifications-dapr-triggers/001-agent-2b-initial-assignment.misc.prompt.md @@ -0,0 +1,86 @@ +--- +id: 001 +title: Agent 2B Initial Assignment - Notifications and Dapr Integration +stage: misc +date: 2025-12-11 +surface: agent +model: claude-opus-4-5-20251101 +feature: notifications-dapr-triggers +branch: main +user: human +command: /sp.orchestrate +labels: ["phase-v", "notifications", "dapr", "recurring-tasks", "cron"] +links: + spec: specs/011-phase-v-features-planner/agent-2b-notifications-dapr-prd.md + ticket: null + adr: null + pr: null +files: + - packages/api/src/taskflow_api/models/task.py + - packages/api/src/taskflow_api/routers/tasks.py +tests: + - null +--- + +## Prompt + +You are Agent 2B. Your mission is to implement Notifications & complete the recurring triggers. Use SDD RI look from sp.specify, sp.plan, sp.tasks and sp.implement. Record all PHRs from initall human prompt and all your during the session. + +CRITICAL CONTEXT FROM AGENT 2A REVIEW: +- `recurrence_trigger` field accepts "on_due_date" and "both" BUT THEY DON'T WORK +- Users can set these values and nothing happens (silent failure) +- `recurring_root_id` is MISSING an index (performance bug) + +P0 FIXES (Do First): +1. Add index=True to recurring_root_id field +2. Add reminder_sent field +3. Run migration + +THEN implement: +1. Cron handler for on_due_date trigger +2. Cron handler for reminders +3. Event publishing via Dapr +4. Notification service +5. Frontend bell + +READ: specs/011-phase-v-features-planner/agent-2b-notifications-dapr-prd.md + +TIME: 60-75 minutes + +You are working on Phase 5 most important part of specification implementation. Given the complexity of this feature you will use proper spec loop from /sp.specify and be responsible for recording PHRs and ADRs. As you are engineering this Most Critical Update that is simple but needs proper planning and implementation for lousy enterprises on my behalf be meticulous. I am not available for this session so take the reins and report once done. Rather than getting your head in water use the relevant subagents and skills and tools.. Use your tools and carefully plan like we can use dapr helm charts already build by dapr and all etc. + +## Response snapshot + +Agent 2B acknowledged assignment and completed Phase 0 Context Analysis: + +**Key Findings:** +- `recurring_root_id` ALREADY HAS `index=True` (verified at line 79 in task.py) +- `reminder_sent` field is MISSING - needs to be added +- `on_complete` trigger works (create_next_occurrence in tasks.py line 216-315) +- `on_due_date` and `both` triggers NOT IMPLEMENTED (cron handler needed) +- No notification service exists - needs new package +- No Dapr integration - needs components and event service +- Helm charts exist at helm/taskflow/ - can extend for Dapr + +**Classification:** +- Actor Type: Both (Human + Agent) +- Work Type: API_FEATURE + INFRASTRUCTURE +- Hackathon Phase: V (Production) +- Complexity: MODERATE-COMPLEX + +**Routing Decision:** Proceed through full SDD-RI workflow autonomously as user delegated authority. + +## Outcome + +- Impact: Phase 0 complete, routing decision made, ready for P0 fixes then spec phase +- Tests: N/A (analysis phase) +- Files: Reviewed task.py, tasks.py, main.py, config.py, PRD +- Next prompts: P0 fixes, then /sp.specify notifications-dapr-triggers +- Reflection: User pre-approved autonomous execution via detailed PRD. Index already exists (PRD outdated on this point). + +## Evaluation notes (flywheel) + +- Failure modes observed: None - clean handoff from Agent 2A +- Graders run and results (PASS/FAIL): N/A +- Prompt variant (if applicable): Standard orchestration +- Next experiment: Execute P0 fix (add reminder_sent) then spec phase diff --git a/history/prompts/notifications-dapr-triggers/002-specification-complete.spec.prompt.md b/history/prompts/notifications-dapr-triggers/002-specification-complete.spec.prompt.md new file mode 100644 index 0000000..795f7e2 --- /dev/null +++ b/history/prompts/notifications-dapr-triggers/002-specification-complete.spec.prompt.md @@ -0,0 +1,68 @@ +--- +id: 002 +title: Notifications Dapr Triggers Specification Complete +stage: spec +date: 2025-12-11 +surface: agent +model: claude-opus-4-5-20251101 +feature: notifications-dapr-triggers +branch: 012-notifications-dapr-triggers +user: agent-2b +command: /sp.specify +labels: ["phase-v", "notifications", "dapr", "specification", "formal-verification"] +links: + spec: specs/012-notifications-dapr-triggers/spec.md + ticket: null + adr: null + pr: null +files: + - specs/012-notifications-dapr-triggers/spec.md + - specs/012-notifications-dapr-triggers/checklists/requirements.md + - packages/api/src/taskflow_api/models/task.py +tests: + - null +--- + +## Prompt + +/sp.specify notifications-dapr-triggers + +Create specification for notifications system with Dapr integration, cron handlers for on_due_date trigger, reminders, event publishing, and frontend bell. + +## Response snapshot + +**Specification Created**: `specs/012-notifications-dapr-triggers/spec.md` + +**Key Components:** +- 6 User Stories (P1: on_due_date spawn fix, P2: assignment/reminders, P3: bell/completion/spawn notifications) +- 22 Functional Requirements (FR-001 to FR-022) +- 7 Success Criteria (SC-001 to SC-007) +- Constraints for atomicity, concurrency control, data retention +- 7 Assumptions documented + +**Spec-Architect Validation:** +- Formal verification identified 5 invariants +- 2 counterexamples auto-fixed (spawn atomicity, concurrency) +- 1 architectural clarification resolved: Direct Creation (Option A) - API creates notifications directly in database, Dapr events are observability only +- Checklist generated: `specs/012-notifications-dapr-triggers/checklists/requirements.md` + +**P0 Fix Applied:** +- Added `reminder_sent` field to Task model (line 94-97 in task.py) +- Note: `recurring_root_id` already had `index=True` (PRD was outdated) + +**Overall Verdict:** READY for planning phase + +## Outcome + +- Impact: Comprehensive spec covers bug fix (on_due_date silent failure) + full notification system +- Tests: Spec includes 14 acceptance scenarios across 6 user stories +- Files: spec.md, checklist, task.py model update +- Next prompts: /sp.plan notifications-dapr-triggers +- Reflection: Spec-architect validation caught atomicity and concurrency gaps. Architectural decision (Direct Creation) ensures SC-005 achievable. + +## Evaluation notes (flywheel) + +- Failure modes observed: Initial PRD claimed recurring_root_id needed index - it already existed +- Graders run and results (PASS/FAIL): spec-architect validation PASS after auto-fixes +- Prompt variant (if applicable): Standard spec workflow with formal verification +- Next experiment: Verify cron handler implementation matches atomicity constraints diff --git a/history/prompts/notifications-dapr-triggers/003-implementation-plan.plan.prompt.md b/history/prompts/notifications-dapr-triggers/003-implementation-plan.plan.prompt.md new file mode 100644 index 0000000..6433a87 --- /dev/null +++ b/history/prompts/notifications-dapr-triggers/003-implementation-plan.plan.prompt.md @@ -0,0 +1,69 @@ +--- +id: 003 +title: Notifications Dapr Triggers Implementation Plan +stage: plan +date: 2025-12-11 +surface: agent +model: claude-opus-4-5-20251101 +feature: notifications-dapr-triggers +branch: 012-notifications-dapr-triggers +user: agent-2b +command: /sp.plan +labels: ["phase-v", "notifications", "dapr", "implementation-plan", "architecture"] +links: + spec: specs/012-notifications-dapr-triggers/spec.md + ticket: null + adr: null + pr: null +files: + - specs/012-notifications-dapr-triggers/plan.md +tests: + - null +--- + +## Prompt + +/sp.plan notifications-dapr-triggers + +Create implementation plan for notifications system with Dapr integration. + +## Response snapshot + +**Plan Created**: `specs/012-notifications-dapr-triggers/plan.md` (1192 lines) + +**Prioritized Implementation Phases:** +- **P1 (6-8h)**: Critical bug fix - Cron handler for `on_due_date` trigger +- **P2 (8-10h)**: Core notifications - Notification model, reminders, assignments +- **P3 (6-8h)**: Frontend bell, completion/spawn notifications, Dapr events +- **MCP (2-3h)**: Agent parity - list_notifications, mark_notification_read tools + +**Key Technical Decisions:** +1. **APScheduler** for 60-second cron interval (not Dapr cron binding for simplicity) +2. **Row-level locking** with `SELECT FOR UPDATE SKIP LOCKED` for concurrency +3. **Direct notification creation** (API writes DB, Dapr events for observability) +4. **Single PostgreSQL instance** (no separate notification service) +5. **30-second polling** for frontend bell (WebSocket is future enhancement) + +**Constitutional Compliance:** All 5 principles verified: +- Audit: Cron spawns create audit entries +- Agent Parity: MCP tools for notifications +- Recursive Tasks: Subtask cloning in cron +- Spec-Driven: Plan derived from spec.md +- Phase Continuity: Notification model stable across phases + +**Total Estimated Effort**: 26-35 hours + +## Outcome + +- Impact: Complete production-ready plan covering database schema, API contracts, cron logic, frontend, MCP tools +- Tests: 18 acceptance scenarios mapped across 4 test phases +- Files: plan.md with implementation checklist (38 items) +- Next prompts: /sp.tasks notifications-dapr-triggers +- Reflection: Plan includes detailed code examples for all critical paths (cron, notifications, frontend) + +## Evaluation notes (flywheel) + +- Failure modes observed: None - general-purpose agent produced comprehensive plan +- Graders run and results (PASS/FAIL): Manual review PASS +- Prompt variant (if applicable): Standard planning workflow with routing to general-purpose +- Next experiment: Verify APScheduler integration in production (not just Dapr cron) diff --git a/history/prompts/notifications-dapr-triggers/004-tasks-breakdown.tasks.prompt.md b/history/prompts/notifications-dapr-triggers/004-tasks-breakdown.tasks.prompt.md new file mode 100644 index 0000000..9c40051 --- /dev/null +++ b/history/prompts/notifications-dapr-triggers/004-tasks-breakdown.tasks.prompt.md @@ -0,0 +1,68 @@ +--- +id: 004 +title: Notifications Dapr Triggers Tasks Breakdown +stage: tasks +date: 2025-12-11 +surface: agent +model: claude-opus-4-5-20251101 +feature: notifications-dapr-triggers +branch: 012-notifications-dapr-triggers +user: agent-2b +command: /sp.tasks +labels: ["phase-v", "notifications", "dapr", "tasks", "implementation"] +links: + spec: specs/012-notifications-dapr-triggers/spec.md + ticket: null + adr: null + pr: null +files: + - specs/012-notifications-dapr-triggers/tasks.md +tests: + - null +--- + +## Prompt + +/sp.tasks notifications-dapr-triggers + +Generate task breakdown for notifications and Dapr integration implementation. + +## Response snapshot + +**Tasks Generated**: `specs/012-notifications-dapr-triggers/tasks.md` (62 tasks, ~155 minutes) + +**Task Distribution by User Story:** +- Setup: 4 tasks (10 min) +- Foundation: 8 tasks (25 min) +- US1 (on_due_date spawn): 14 tasks (45 min) - P1 MVP +- US2 (assignment notification): 10 tasks (30 min) +- US3 (due date reminder): 8 tasks (20 min) +- US4 (frontend bell): 10 tasks (25 min) +- US5 (completion notification): 4 tasks (10 min) +- US6 (spawn notification): 4 tasks (10 min) + +**Parallelizable**: 35 tasks (56%) + +**FR Traceability**: All 22 FRs from spec mapped to tasks + +**Key Features:** +- AI-Native Execution Guide with Context7 MCP queries +- Skills mapping (fastapi-backend, sqlmodel-database, nextjs-16, shadcn-ui) +- Acceptance criteria with verification commands per phase +- Dependency graph and parallel execution opportunities +- MVP scope identified (US1 only: ~80 min) + +## Outcome + +- Impact: Complete task breakdown enabling immediate implementation +- Tests: Manual validation checklist (no TDD requested) +- Files: tasks.md with 62 tasks, FR traceability matrix +- Next prompts: /sp.implement notifications-dapr-triggers +- Reflection: Task granularity targets 1-2 hour sweet spot. All tasks include file paths. + +## Evaluation notes (flywheel) + +- Failure modes observed: None - tasks extracted systematically from plan.md +- Graders run and results (PASS/FAIL): Format validation PASS (all 62 tasks have checkbox, ID, paths) +- Prompt variant (if applicable): Standard task generation with FR traceability +- Next experiment: Execute US1 to validate critical bug fix first diff --git a/history/prompts/notifications-dapr-triggers/005-implementation-complete.green.prompt.md b/history/prompts/notifications-dapr-triggers/005-implementation-complete.green.prompt.md new file mode 100644 index 0000000..b8291df --- /dev/null +++ b/history/prompts/notifications-dapr-triggers/005-implementation-complete.green.prompt.md @@ -0,0 +1,112 @@ +--- +id: 005 +title: Notifications Dapr Triggers Implementation Complete +stage: green +date: 2025-12-11 +surface: agent +model: claude-opus-4-5-20251101 +feature: notifications-dapr-triggers +branch: 012-notifications-dapr-triggers +user: agent-2b +command: /sp.implement +labels: ["phase-v", "notifications", "dapr", "implementation", "cron", "bug-fix"] +links: + spec: specs/012-notifications-dapr-triggers/spec.md + ticket: null + adr: null + pr: null +files: + - packages/api/src/taskflow_api/models/task.py + - packages/api/src/taskflow_api/models/notification.py + - packages/api/src/taskflow_api/schemas/notification.py + - packages/api/src/taskflow_api/services/events.py + - packages/api/src/taskflow_api/routers/cron.py + - packages/api/src/taskflow_api/routers/notifications.py + - packages/api/src/taskflow_api/routers/tasks.py + - packages/api/src/taskflow_api/routers/__init__.py + - packages/api/src/taskflow_api/config.py + - packages/api/src/taskflow_api/main.py + - packages/api/src/taskflow_api/database.py + - web-dashboard/src/components/NotificationBell.tsx + - web-dashboard/src/components/layout/header.tsx + - web-dashboard/src/lib/api.ts + - web-dashboard/src/types/notification.ts +tests: + - uv run pytest src/taskflow_api/tests/ -x (74 passed) + - pnpm build (successful) +--- + +## Prompt + +/sp.implement notifications-dapr-triggers + +Implement all user stories from the notifications-dapr-triggers feature spec. + +## Response snapshot + +**Implementation Complete**: All 6 user stories implemented across backend and frontend. + +**Backend Implementation:** + +1. **Foundational (Setup)** + - APScheduler 3.10.0+ added to dependencies + - Config settings: CRON_ENABLED, CRON_INTERVAL_SECONDS, DAPR_ENABLED, DAPR_HTTP_ENDPOINT, NOTIFICATION_RETENTION_DAYS + - Notification SQLModel with indexes (user_id, task_id, created_at) + - Notification schemas (Create, Read, Update) + - Event service with graceful degradation + +2. **US1: Critical Bug Fix - on_due_date spawn** + - Created `/api/cron/process-recurring-tasks` endpoint + - Implements `SELECT FOR UPDATE SKIP LOCKED` for row-level locking + - Checks max_occurrences before spawning + - Clones subtasks if `clone_subtasks_on_recur=True` + - Creates audit entries and notifications for spawned tasks + - APScheduler runs every 60 seconds via lifespan handler + +3. **US2: Assignment Notifications** + - Added notification creation to assign_task endpoint + - Notifies assignee when different from assigner + +4. **US3: Due Date Reminders** + - Created `/api/cron/send-reminders` endpoint + - Finds tasks due within 24 hours + - Uses `reminder_sent` flag to prevent duplicates + - APScheduler runs alongside recurring task processor + +5. **US5: Completion Notifications** + - Added notification to update_status when task is completed + - Notifies task creator when different from completer + +6. **Notification Router** + - `GET /api/notifications` - List user's notifications + - `GET /api/notifications/unread-count` - Get unread count + - `PATCH /api/notifications/{id}/read` - Mark as read + - `POST /api/notifications/mark-all-read` - Mark all read + +**Frontend Implementation:** + +1. **US4: Frontend Notification Bell** + - Created NotificationBell component with dropdown + - 30-second polling for unread count + - Icons for different notification types + - Mark single/all read functionality + - Relative time display using date-fns + +**Test Results:** +- Backend: 74 tests passed, 3 skipped +- Frontend: Build successful + +## Outcome + +- Impact: Critical bug fixed - `recurrence_trigger=on_due_date` now works +- Tests: All existing tests pass, new endpoints manually testable +- Files: 15 files created/modified +- Next prompts: Manual testing, database migration for new fields +- Reflection: Row-level locking with SKIP LOCKED pattern ensures safe concurrent cron execution + +## Evaluation notes (flywheel) + +- Failure modes observed: None - implementation followed plan.md closely +- Graders run and results (PASS/FAIL): pytest PASS, pnpm build PASS +- Prompt variant (if applicable): Standard implementation workflow +- Next experiment: Test with real recurring tasks to verify spawn behavior diff --git a/packages/api/src/taskflow_api/config.py b/packages/api/src/taskflow_api/config.py index 94ef152..8e6a103 100644 --- a/packages/api/src/taskflow_api/config.py +++ b/packages/api/src/taskflow_api/config.py @@ -40,6 +40,14 @@ class Settings(BaseSettings): # OpenAI API Key (required for chat) openai_api_key: str | None = None + # Dapr configuration + # Phase V requires Full Dapr: Pub/Sub, State, Bindings, Secrets, Service Invocation + dapr_http_endpoint: str = "http://localhost:3500" + dapr_pubsub_name: str = "taskflow-pubsub" # Kafka via Dapr + + # Notification configuration + notification_retention_days: int = 90 + @property def allowed_origins_list(self) -> list[str]: """Parse comma-separated origins into list.""" diff --git a/packages/api/src/taskflow_api/database.py b/packages/api/src/taskflow_api/database.py index 490aa6a..0a8850f 100644 --- a/packages/api/src/taskflow_api/database.py +++ b/packages/api/src/taskflow_api/database.py @@ -11,6 +11,7 @@ # Import all models to ensure they're registered with SQLModel.metadata # This MUST happen before create_db_and_tables() is called +# Notification removed - now handled by dedicated Notification Service from .models import AuditLog, Project, ProjectMember, Task, Worker # noqa: F401 diff --git a/packages/api/src/taskflow_api/main.py b/packages/api/src/taskflow_api/main.py index 36b5a7d..529fbb7 100644 --- a/packages/api/src/taskflow_api/main.py +++ b/packages/api/src/taskflow_api/main.py @@ -17,7 +17,7 @@ from .chatkit_store import RequestContext # noqa: E402 from .config import settings # noqa: E402 from .database import create_db_and_tables # noqa: E402 -from .routers import agents, audit, health, members, projects, tasks # noqa: E402 +from .routers import agents, audit, health, jobs, members, projects, tasks # noqa: E402 # Configure logging logging.basicConfig( @@ -131,6 +131,7 @@ async def general_exception_handler(request: Request, exc: Exception) -> JSONRes app.include_router(agents.router, prefix="/api/workers/agents") app.include_router(tasks.router) # Has its own prefixes defined app.include_router(audit.router, prefix="/api") +app.include_router(jobs.router) # Dapr Jobs callbacks at /api/jobs @app.post("/chatkit") diff --git a/packages/api/src/taskflow_api/models/__init__.py b/packages/api/src/taskflow_api/models/__init__.py index 672f8e9..8cd7fc8 100644 --- a/packages/api/src/taskflow_api/models/__init__.py +++ b/packages/api/src/taskflow_api/models/__init__.py @@ -5,6 +5,8 @@ from .task import Task from .worker import Worker +# Notification model removed - notifications now handled by dedicated microservice + __all__ = [ "AuditLog", "Project", diff --git a/packages/api/src/taskflow_api/models/task.py b/packages/api/src/taskflow_api/models/task.py index efd4fab..455c244 100644 --- a/packages/api/src/taskflow_api/models/task.py +++ b/packages/api/src/taskflow_api/models/task.py @@ -91,6 +91,10 @@ class Task(SQLModel, table=True): default=False, description="Whether this task has already spawned its next occurrence", ) + reminder_sent: bool = Field( + default=False, + description="Whether a reminder notification has been sent for this task's due date", + ) # Foreign keys project_id: int = Field(foreign_key="project.id", index=True) diff --git a/packages/api/src/taskflow_api/routers/__init__.py b/packages/api/src/taskflow_api/routers/__init__.py index 2678349..21e6f1b 100644 --- a/packages/api/src/taskflow_api/routers/__init__.py +++ b/packages/api/src/taskflow_api/routers/__init__.py @@ -1,11 +1,12 @@ """FastAPI routers.""" -from . import agents, audit, health, members, projects, tasks +from . import agents, audit, health, jobs, members, projects, tasks __all__ = [ "agents", "audit", "health", + "jobs", "members", "projects", "tasks", diff --git a/packages/api/src/taskflow_api/routers/jobs.py b/packages/api/src/taskflow_api/routers/jobs.py new file mode 100644 index 0000000..42c8c17 --- /dev/null +++ b/packages/api/src/taskflow_api/routers/jobs.py @@ -0,0 +1,258 @@ +"""Dapr Job callback handlers. + +NO POLLING - Dapr Jobs fire at exact scheduled times: +1. spawn jobs: Fire at task.due_date for on_due_date recurring tasks +2. reminder jobs: Fire at task.due_date - 24h + +Dapr calls POST /api/jobs/trigger when a scheduled job fires. +We publish events to Dapr pub/sub → Notification Service consumes them. +""" + +import logging +from datetime import datetime, timedelta + +from fastapi import APIRouter, Depends, Request +from sqlalchemy import func +from sqlmodel import select +from sqlmodel.ext.asyncio.session import AsyncSession + +from ..config import settings +from ..database import get_session +from ..models.task import Task +from ..models.worker import Worker +from ..services.audit import log_action +from ..services.events import TOPIC_REMINDERS, publish_event, publish_task_spawned +from ..services.jobs import schedule_recurring_spawn, schedule_reminder + +logger = logging.getLogger(__name__) + +router = APIRouter(prefix="/api/jobs", tags=["Jobs (Dapr Callbacks)"]) + + +def calculate_next_due(pattern: str, from_time: datetime) -> datetime: + """Calculate next due date based on recurrence pattern.""" + patterns = { + "1m": timedelta(minutes=1), + "5m": timedelta(minutes=5), + "10m": timedelta(minutes=10), + "15m": timedelta(minutes=15), + "30m": timedelta(minutes=30), + "1h": timedelta(hours=1), + "daily": timedelta(days=1), + "weekly": timedelta(weeks=1), + "monthly": timedelta(days=30), + } + return from_time + patterns.get(pattern, timedelta(days=1)) + + +@router.post("/trigger") +async def handle_job_trigger( + request: Request, + session: AsyncSession = Depends(get_session), +) -> dict: + """Handle Dapr job trigger callback. + + Dapr calls this endpoint when a scheduled job fires. + Job types: + - spawn: Create next recurring task occurrence + - reminder: Publish reminder event to Notification Service + """ + try: + body = await request.json() + job_data = body.get("data", body) # Handle both wrapped and raw payloads + + task_id = job_data.get("task_id") + job_type = job_data.get("type") + + logger.info("[DAPR-JOB] Received trigger: type=%s, task_id=%s", job_type, task_id) + + if job_type == "spawn": + return await handle_spawn(session, task_id) + elif job_type == "reminder": + return await handle_reminder(session, job_data) + else: + logger.warning("[DAPR-JOB] Unknown job type: %s", job_type) + return {"status": "unknown_type"} + + except Exception as e: + logger.exception("[DAPR-JOB] Error handling trigger: %s", e) + return {"status": "error", "message": str(e)} + + +async def handle_reminder(session: AsyncSession, job_data: dict) -> dict: + """Handle reminder job - publish to reminders topic. + + The Notification Service subscribes to this topic and creates + the actual notification in its own database. + + NO notification stored here - that's the Notification Service's job. + """ + task_id = job_data.get("task_id") + user_id = job_data.get("user_id") + title = job_data.get("title") + project_id = job_data.get("project_id") + due_at = job_data.get("due_at") + hours_until_due = job_data.get("hours_until_due", 24) + + if not user_id: + logger.warning("[DAPR-JOB] Reminder: no user_id for task %s", task_id) + return {"status": "no_user"} + + # Verify task still exists and isn't completed + task = await session.get(Task, task_id) + if not task: + logger.info("[DAPR-JOB] Reminder: task %s not found (deleted?)", task_id) + return {"status": "task_not_found"} + + if task.status in ("completed", "cancelled"): + logger.info("[DAPR-JOB] Reminder: task %s already %s", task_id, task.status) + return {"status": "task_already_done"} + + # Publish to reminders topic → Notification Service creates notification + await publish_event( + topic=TOPIC_REMINDERS, + event_type="reminder.due", + data={ + "task_id": task_id, + "user_id": user_id, + "title": title, + "project_id": project_id, + "due_at": due_at, + "hours_until_due": hours_until_due, + }, + dapr_http_endpoint=settings.dapr_http_endpoint, + pubsub_name=settings.dapr_pubsub_name, + ) + + # Mark task as reminded (prevent re-triggering if job somehow fires again) + task.reminder_sent = True + session.add(task) + await session.commit() + + logger.info("[DAPR-JOB] Published reminder for task %d to user %s", task_id, user_id) + return {"status": "reminder_sent"} + + +async def handle_spawn(session: AsyncSession, task_id: int) -> dict: + """Handle recurring spawn job trigger - create next task occurrence. + + This is triggered by Dapr at the exact due_date of a task with + recurrence_trigger='on_due_date' or 'both'. + + Publishes task.spawned event → Notification Service creates notification. + """ + task = await session.get(Task, task_id) + if not task: + logger.warning("[DAPR-JOB] Spawn: task %d not found", task_id) + return {"status": "task_not_found"} + + # Skip if already spawned (idempotency) + if task.has_spawned_next: + logger.info("[DAPR-JOB] Spawn: task %d already spawned", task_id) + return {"status": "already_spawned"} + + # Check max_occurrences + root_id = task.recurring_root_id or task.id + if task.max_occurrences is not None: + result = await session.exec( + select(func.count(Task.id)).where( + (Task.id == root_id) | (Task.recurring_root_id == root_id) + ) + ) + spawn_count = result.one() or 0 + if spawn_count >= task.max_occurrences: + logger.info("[DAPR-JOB] Spawn: task %d reached max_occurrences", task_id) + task.has_spawned_next = True + session.add(task) + await session.commit() + return {"status": "max_occurrences_reached"} + + # Calculate next due date + next_due = calculate_next_due(task.recurrence_pattern, task.due_date) + + # Create new task + new_task = Task( + title=task.title, + description=task.description, + project_id=task.project_id, + assignee_id=task.assignee_id, + parent_task_id=task.parent_task_id, + created_by_id=task.created_by_id, + priority=task.priority, + tags=task.tags.copy() if task.tags else [], + due_date=next_due, + is_recurring=True, + recurrence_pattern=task.recurrence_pattern, + max_occurrences=task.max_occurrences, + recurring_root_id=root_id, + recurrence_trigger=task.recurrence_trigger, + clone_subtasks_on_recur=task.clone_subtasks_on_recur, + status="pending", + progress_percent=0, + ) + session.add(new_task) + await session.flush() + + # Mark original as spawned + task.has_spawned_next = True + session.add(task) + + # Audit + await log_action( + session, + entity_type="task", + entity_id=new_task.id, + action="spawned_recurring", + actor_id=task.created_by_id, + actor_type="system", + details={ + "spawned_from": task.id, + "recurring_root": root_id, + "trigger": "dapr_job", + "recurrence_pattern": task.recurrence_pattern, + "next_due": next_due.isoformat(), + }, + ) + + await session.commit() + + # Get assignee info for event + assignee_user_id = None + if new_task.assignee_id: + assignee = await session.get(Worker, new_task.assignee_id) + if assignee: + assignee_user_id = assignee.user_id or f"@{assignee.name}" + + # Publish task.spawned event → Notification Service creates notification + await publish_task_spawned( + task_id=new_task.id, + spawned_from=task.id, + recurring_root=root_id, + user_id=assignee_user_id or "", + task_title=new_task.title, + project_id=new_task.project_id, + dapr_http_endpoint=settings.dapr_http_endpoint, + pubsub_name=settings.dapr_pubsub_name, + ) + + # Schedule next spawn job for the new task + if new_task.recurrence_trigger in ("on_due_date", "both"): + await schedule_recurring_spawn( + task_id=new_task.id, + due_date=next_due, + dapr_http_endpoint=settings.dapr_http_endpoint, + ) + + # Schedule reminder for new task if it has assignee + if new_task.assignee_id and assignee_user_id: + await schedule_reminder( + task_id=new_task.id, + due_date=next_due, + user_id=assignee_user_id, + title=new_task.title, + project_id=new_task.project_id, + dapr_http_endpoint=settings.dapr_http_endpoint, + ) + + logger.info("[DAPR-JOB] Spawned task %d from %d", new_task.id, task_id) + return {"status": "spawned", "new_task_id": new_task.id} diff --git a/packages/api/src/taskflow_api/routers/tasks.py b/packages/api/src/taskflow_api/routers/tasks.py index afb0019..f176b3c 100644 --- a/packages/api/src/taskflow_api/routers/tasks.py +++ b/packages/api/src/taskflow_api/routers/tasks.py @@ -10,6 +10,7 @@ from sqlmodel.ext.asyncio.session import AsyncSession from ..auth import CurrentUser, get_current_user +from ..config import settings from ..database import get_session from ..models.project import Project, ProjectMember from ..models.task import VALID_TRANSITIONS, Task, validate_status_transition @@ -25,6 +26,18 @@ TaskUpdate, ) from ..services.audit import log_action +from ..services.events import ( + publish_task_assigned, + publish_task_completed, + publish_task_created, + publish_task_deleted, +) +from ..services.jobs import ( + cancel_recurring_spawn, + cancel_reminder, + schedule_recurring_spawn, + schedule_reminder, +) from ..services.user_setup import ensure_user_setup router = APIRouter(tags=["Tasks"]) @@ -550,6 +563,8 @@ async def create_task( worker = await ensure_user_setup(session, user) worker_id = worker.id worker_type = worker.type + worker_user_id = worker.user_id # Store before commit to avoid MissingGreenlet + worker_name = worker.name # Check project exists and user is member project = await session.get(Project, project_id) @@ -560,9 +575,13 @@ async def create_task( # Validate assignee if provided assignee = None assignee_handle = None + assignee_user_id = None + assignee_name = None if data.assignee_id: assignee = await check_assignee_is_member(session, project_id, data.assignee_id) assignee_handle = assignee.handle + assignee_user_id = assignee.user_id # Store before commit to avoid MissingGreenlet + assignee_name = assignee.name # Validate parent if provided if data.parent_task_id: @@ -608,6 +627,37 @@ async def create_task( await session.commit() await session.refresh(task) + # Schedule Dapr job for on_due_date recurring tasks (only thing that needs precise timing) + if task.due_date and task.is_recurring and task.recurrence_trigger in ("on_due_date", "both"): + await schedule_recurring_spawn( + task_id=task.id, + due_date=task.due_date, + dapr_http_endpoint=settings.dapr_http_endpoint, + ) + + # Schedule reminder if task has due_date and assignee + if task.due_date and task.assignee_id and assignee_user_id: + reminder_user_id = assignee_user_id or f"@{assignee_name}" + await schedule_reminder( + task_id=task.id, + due_date=task.due_date, + user_id=reminder_user_id, + title=task.title, + project_id=task.project_id, + dapr_http_endpoint=settings.dapr_http_endpoint, + ) + + # Publish task.created event → Notification Service + await publish_task_created( + task_id=task.id, + task=task, + actor_id=worker_user_id or f"@{worker_name}", + actor_name=worker_name, + user_id=assignee_user_id, # Notify assignee if different from creator + dapr_http_endpoint=settings.dapr_http_endpoint, + pubsub_name=settings.dapr_pubsub_name, + ) + return TaskRead( id=task.id, title=task.title, @@ -746,6 +796,16 @@ async def update_task( await session.commit() await session.refresh(task) + # Reschedule spawn job if due_date changed for recurring tasks + if "due_date" in changes and task.due_date: + await cancel_recurring_spawn(task_id, settings.dapr_http_endpoint) + if task.is_recurring and task.recurrence_trigger in ("on_due_date", "both"): + await schedule_recurring_spawn( + task_id=task.id, + due_date=task.due_date, + dapr_http_endpoint=settings.dapr_http_endpoint, + ) + assignee = None if task.assignee_id: assignee = await session.get(Worker, task.assignee_id) @@ -763,6 +823,8 @@ async def delete_task( worker = await ensure_user_setup(session, user) worker_id = worker.id worker_type = worker.type + worker_user_id = worker.user_id # Store before commit to avoid MissingGreenlet + worker_name = worker.name task = await session.get(Task, task_id) if not task: @@ -803,6 +865,20 @@ async def delete_subtasks(parent_id: int) -> int: await session.delete(task) await session.commit() + # Cancel scheduled jobs for deleted task + await cancel_recurring_spawn(task_id, settings.dapr_http_endpoint) + await cancel_reminder(task_id, settings.dapr_http_endpoint) + + # Publish task.deleted event → Notification Service + await publish_task_deleted( + task_id=task_id, + title=task_title, + actor_id=worker_user_id or f"@{worker_name}", + actor_name=worker_name, + dapr_http_endpoint=settings.dapr_http_endpoint, + pubsub_name=settings.dapr_pubsub_name, + ) + return {"ok": True} @@ -820,6 +896,8 @@ async def update_status( worker = await ensure_user_setup(session, user) worker_id = worker.id worker_type = worker.type + worker_user_id = worker.user_id # Store before commit to avoid MissingGreenlet + worker_name = worker.name task = await session.get(Task, task_id) if not task: @@ -851,6 +929,9 @@ async def update_status( if task.is_recurring and task.recurrence_pattern: await create_next_occurrence(session, task, worker_id, worker_type) + # Notification for task creator is now handled via Dapr pub/sub + # See publish_task_completed() call below + session.add(task) await log_action( @@ -866,6 +947,28 @@ async def update_status( await session.commit() await session.refresh(task) + # Publish completion event if task was completed + if data.status == "completed": + # Get creator info for notification + creator_user_id = None + if task.created_by_id != worker_id: + creator = await session.get(Worker, task.created_by_id) + if creator: + creator_user_id = creator.user_id or f"@{creator.name}" + + await publish_task_completed( + task_id=task_id, + task=task, + actor_id=worker_user_id or f"@{worker_name}", + actor_name=worker_name, + creator_id=creator_user_id, + dapr_http_endpoint=settings.dapr_http_endpoint, + pubsub_name=settings.dapr_pubsub_name, + ) + + # Cancel reminder since task is done + await cancel_reminder(task_id, settings.dapr_http_endpoint) + assignee = None if task.assignee_id: assignee = await session.get(Worker, task.assignee_id) @@ -933,6 +1036,8 @@ async def assign_task( worker = await ensure_user_setup(session, user) worker_id = worker.id worker_type = worker.type + worker_user_id = worker.user_id # Store before commit to avoid MissingGreenlet + worker_name = worker.name task = await session.get(Task, task_id) if not task: @@ -943,6 +1048,8 @@ async def assign_task( # Validate assignee assignee = await check_assignee_is_member(session, task.project_id, data.assignee_id) assignee_handle = assignee.handle + assignee_user_id = assignee.user_id # Store before commit + assignee_name = assignee.name old_assignee_id = task.assignee_id task.assignee_id = data.assignee_id @@ -967,6 +1074,19 @@ async def assign_task( await session.commit() await session.refresh(task) + # Publish assignment event → Notification Service + if data.assignee_id != worker_id: + await publish_task_assigned( + task_id=task_id, + task_title=task.title, + assignee_user_id=assignee_user_id or f"@{assignee_name}", + actor_id=worker_user_id or f"@{worker_name}", + actor_name=worker_name, + project_id=task.project_id, + dapr_http_endpoint=settings.dapr_http_endpoint, + pubsub_name=settings.dapr_pubsub_name, + ) + # Compute spawn_count for recurring tasks spawn_count = 0 if task.is_recurring: diff --git a/packages/api/src/taskflow_api/services/events.py b/packages/api/src/taskflow_api/services/events.py new file mode 100644 index 0000000..e8c59a7 --- /dev/null +++ b/packages/api/src/taskflow_api/services/events.py @@ -0,0 +1,320 @@ +"""Event publishing via Dapr pub/sub. + +Phase V requires Full Dapr integration including: +- Pub/Sub for task-events, reminders, task-updates topics +- Every task operation (create, update, delete, complete) publishes to Kafka via Dapr + +Architecture: +- Backend publishes to Dapr sidecar HTTP API +- Dapr routes to Kafka (configured via pubsub component YAML) +- Consumer services (Notification Service) subscribe +""" + +import logging +from datetime import datetime +from typing import TYPE_CHECKING, Any, Literal + +import httpx + +if TYPE_CHECKING: + from ..models.task import Task + +logger = logging.getLogger(__name__) + +# Topic names as specified in hackathon requirements +TOPIC_TASK_EVENTS = "task-events" # All task CRUD operations +TOPIC_REMINDERS = "reminders" # Scheduled reminder triggers +TOPIC_TASK_UPDATES = "task-updates" # Real-time client sync + + +async def publish_event( + topic: str, + event_type: str, + data: dict[str, Any], + dapr_http_endpoint: str = "http://localhost:3500", + pubsub_name: str = "taskflow-pubsub", +) -> bool: + """Publish event to Dapr pub/sub. + + Dapr abstracts Kafka - app code doesn't need kafka-python library. + Just HTTP POST to sidecar, Dapr handles Kafka connection. + + Args: + topic: Kafka topic name (task-events, reminders, task-updates) + event_type: Event type (created, updated, deleted, completed, etc.) + data: Event payload + dapr_http_endpoint: Dapr sidecar HTTP endpoint + pubsub_name: Dapr pub/sub component name (defined in YAML) + + Returns: + True if published successfully, False otherwise + """ + url = f"{dapr_http_endpoint}/v1.0/publish/{pubsub_name}/{topic}" + + payload = { + "event_type": event_type, + "data": data, + "timestamp": datetime.utcnow().isoformat(), + } + + try: + async with httpx.AsyncClient(timeout=2.0) as client: + response = await client.post(url, json=payload) + response.raise_for_status() + logger.info( + "[DAPR-PUBSUB] Published %s to %s: task_id=%s", + event_type, + topic, + data.get("task_id"), + ) + return True + except Exception as e: + # Log but don't crash - pub/sub is for downstream services + logger.warning("[DAPR-PUBSUB] Failed to publish %s to %s: %s", event_type, topic, e) + return False + + +def _task_to_dict(task: "Task") -> dict[str, Any]: + """Convert Task model to serializable dict.""" + return { + "id": task.id, + "title": task.title, + "description": task.description, + "status": task.status, + "priority": task.priority, + "progress_percent": task.progress_percent, + "tags": task.tags, + "due_date": task.due_date.isoformat() if task.due_date else None, + "project_id": task.project_id, + "assignee_id": task.assignee_id, + "parent_task_id": task.parent_task_id, + "is_recurring": task.is_recurring, + "recurrence_pattern": task.recurrence_pattern, + } + + +# Convenience functions for specific event types + + +async def publish_task_created( + task_id: int, + task: "Task", + actor_id: str, + actor_name: str, + user_id: str | None = None, + dapr_http_endpoint: str = "http://localhost:3500", + pubsub_name: str = "taskflow-pubsub", +) -> bool: + """Publish task created event. + + Args: + task_id: ID of created task + task: Task model instance + actor_id: Who created the task + actor_name: Display name of creator + user_id: User to notify (assignee if different from creator) + """ + return await publish_event( + topic=TOPIC_TASK_EVENTS, + event_type="task.created", + data={ + "task_id": task_id, + "user_id": user_id, + "actor_id": actor_id, + "actor_name": actor_name, + "task": _task_to_dict(task), + }, + dapr_http_endpoint=dapr_http_endpoint, + pubsub_name=pubsub_name, + ) + + +async def publish_task_updated( + task_id: int, + changes: dict[str, Any], + actor_id: str, + actor_name: str, + dapr_http_endpoint: str = "http://localhost:3500", + pubsub_name: str = "taskflow-pubsub", +) -> bool: + """Publish task updated event.""" + return await publish_event( + topic=TOPIC_TASK_EVENTS, + event_type="task.updated", + data={ + "task_id": task_id, + "actor_id": actor_id, + "actor_name": actor_name, + "changes": changes, + }, + dapr_http_endpoint=dapr_http_endpoint, + pubsub_name=pubsub_name, + ) + + +async def publish_task_deleted( + task_id: int, + title: str, + actor_id: str, + actor_name: str, + dapr_http_endpoint: str = "http://localhost:3500", + pubsub_name: str = "taskflow-pubsub", +) -> bool: + """Publish task deleted event.""" + return await publish_event( + topic=TOPIC_TASK_EVENTS, + event_type="task.deleted", + data={ + "task_id": task_id, + "title": title, + "actor_id": actor_id, + "actor_name": actor_name, + }, + dapr_http_endpoint=dapr_http_endpoint, + pubsub_name=pubsub_name, + ) + + +async def publish_task_completed( + task_id: int, + task: "Task", + actor_id: str, + actor_name: str, + creator_id: str | None = None, + dapr_http_endpoint: str = "http://localhost:3500", + pubsub_name: str = "taskflow-pubsub", +) -> bool: + """Publish task completed event. + + Args: + task_id: ID of completed task + task: Task model instance + actor_id: Who completed the task + actor_name: Display name of completer + creator_id: Task creator to notify (if different from completer) + """ + return await publish_event( + topic=TOPIC_TASK_EVENTS, + event_type="task.completed", + data={ + "task_id": task_id, + "user_id": creator_id, # Notify creator + "actor_id": actor_id, + "actor_name": actor_name, + "task": _task_to_dict(task), + "is_recurring": task.is_recurring, + }, + dapr_http_endpoint=dapr_http_endpoint, + pubsub_name=pubsub_name, + ) + + +async def publish_task_spawned( + task_id: int, + spawned_from: int, + recurring_root: int, + user_id: str, + task_title: str = "", + project_id: int | None = None, + dapr_http_endpoint: str = "http://localhost:3500", + pubsub_name: str = "taskflow-pubsub", +) -> bool: + """Publish recurring task spawned event.""" + return await publish_event( + topic=TOPIC_TASK_EVENTS, + event_type="task.spawned", + data={ + "task_id": task_id, + "spawned_from": spawned_from, + "recurring_root": recurring_root, + "user_id": user_id, + "task_title": task_title, + "project_id": project_id, + }, + dapr_http_endpoint=dapr_http_endpoint, + pubsub_name=pubsub_name, + ) + + +async def publish_task_assigned( + task_id: int, + task_title: str, + assignee_user_id: str, + actor_id: str, + actor_name: str, + project_id: int | None = None, + dapr_http_endpoint: str = "http://localhost:3500", + pubsub_name: str = "taskflow-pubsub", +) -> bool: + """Publish task assigned event.""" + return await publish_event( + topic=TOPIC_TASK_EVENTS, + event_type="task.assigned", + data={ + "task_id": task_id, + "task_title": task_title, + "user_id": assignee_user_id, + "actor_id": actor_id, + "actor_name": actor_name, + "project_id": project_id, + }, + dapr_http_endpoint=dapr_http_endpoint, + pubsub_name=pubsub_name, + ) + + +async def publish_reminder_due( + task_id: int, + user_id: str, + title: str, + due_at: str, + hours_until_due: int, + project_id: int | None = None, + dapr_http_endpoint: str = "http://localhost:3500", + pubsub_name: str = "taskflow-pubsub", +) -> bool: + """Publish reminder due event to reminders topic. + + Called by Dapr Job handler when reminder fires. + Notification Service consumes this and creates the notification. + """ + return await publish_event( + topic=TOPIC_REMINDERS, + event_type="reminder.due", + data={ + "task_id": task_id, + "user_id": user_id, + "title": title, + "due_at": due_at, + "hours_until_due": hours_until_due, + "project_id": project_id, + }, + dapr_http_endpoint=dapr_http_endpoint, + pubsub_name=pubsub_name, + ) + + +async def publish_task_update_for_sync( + task_id: int, + user_id: str, + action: Literal["created", "updated", "deleted", "completed"], + task_data: dict[str, Any] | None = None, + dapr_http_endpoint: str = "http://localhost:3500", + pubsub_name: str = "taskflow-pubsub", +) -> bool: + """Publish to task-updates topic for real-time client sync. + + WebSocket Service consumes this and broadcasts to connected clients. + """ + return await publish_event( + topic=TOPIC_TASK_UPDATES, + event_type=f"sync.{action}", + data={ + "task_id": task_id, + "user_id": user_id, + "action": action, + "task": task_data, + }, + dapr_http_endpoint=dapr_http_endpoint, + pubsub_name=pubsub_name, + ) diff --git a/packages/api/src/taskflow_api/services/jobs.py b/packages/api/src/taskflow_api/services/jobs.py new file mode 100644 index 0000000..50c22cc --- /dev/null +++ b/packages/api/src/taskflow_api/services/jobs.py @@ -0,0 +1,250 @@ +"""Dapr Jobs API for programmatic scheduling. + +NO POLLING - schedule jobs at exact times: +1. on_due_date spawn: Schedule at task.due_date +2. reminders: Schedule at task.due_date - 24h + +Dapr calls back to our /api/jobs/trigger endpoint when job fires. +""" + +import logging +from datetime import datetime +from typing import Any + +import httpx + +logger = logging.getLogger(__name__) + + +async def schedule_job( + job_name: str, + due_time: datetime, + data: dict[str, Any], + dapr_http_endpoint: str = "http://localhost:3500", +) -> bool: + """Schedule a one-time Dapr job at a specific time. + + Args: + job_name: Unique job identifier (e.g., "spawn-task-123") + due_time: When to trigger the job (RFC3339 format) + data: Payload to include in the job trigger + dapr_http_endpoint: Dapr sidecar HTTP endpoint + + Returns: + True if scheduled successfully, False otherwise + """ + url = f"{dapr_http_endpoint}/v1.0-alpha1/jobs/{job_name}" + + # Format as RFC3339 (ISO8601 with timezone) + due_time_str = due_time.strftime("%Y-%m-%dT%H:%M:%SZ") + + payload = { + "dueTime": due_time_str, + "data": data, + } + + try: + async with httpx.AsyncClient(timeout=5.0) as client: + response = await client.post(url, json=payload) + if response.status_code == 204: + logger.info("[DAPR-JOB] Scheduled job '%s' for %s", job_name, due_time_str) + return True + else: + logger.warning( + "[DAPR-JOB] Failed to schedule '%s': %s %s", + job_name, + response.status_code, + response.text, + ) + return False + except Exception as e: + logger.warning("[DAPR-JOB] Failed to schedule '%s': %s", job_name, e) + return False + + +async def delete_job( + job_name: str, + dapr_http_endpoint: str = "http://localhost:3500", +) -> bool: + """Delete a scheduled Dapr job. + + Args: + job_name: Job identifier to delete + dapr_http_endpoint: Dapr sidecar HTTP endpoint + + Returns: + True if deleted successfully, False otherwise + """ + url = f"{dapr_http_endpoint}/v1.0-alpha1/jobs/{job_name}" + + try: + async with httpx.AsyncClient(timeout=5.0) as client: + response = await client.delete(url) + if response.status_code in (204, 500): # 500 = job not found, which is fine + logger.info("[DAPR-JOB] Deleted job '%s'", job_name) + return True + else: + logger.warning( + "[DAPR-JOB] Failed to delete '%s': %s", + job_name, + response.status_code, + ) + return False + except Exception as e: + logger.warning("[DAPR-JOB] Failed to delete '%s': %s", job_name, e) + return False + + +async def schedule_recurring_spawn( + task_id: int, + due_date: datetime, + dapr_http_endpoint: str = "http://localhost:3500", +) -> bool: + """Schedule recurring task spawn at due_date for on_due_date trigger. + + This is the ONLY Dapr job we use - because spawning MUST happen + at a specific time even if no user is online. + + Args: + task_id: Recurring task to spawn from + due_date: When to spawn the next occurrence + dapr_http_endpoint: Dapr sidecar endpoint + + Returns: + True if scheduled successfully + """ + # Don't schedule if due_date is in the past + if due_date <= datetime.utcnow(): + logger.info("[DAPR-JOB] Due date already passed for task %d", task_id) + return False + + return await schedule_job( + job_name=f"spawn-task-{task_id}", + due_time=due_date, + data={"task_id": task_id, "type": "spawn"}, + dapr_http_endpoint=dapr_http_endpoint, + ) + + +async def cancel_recurring_spawn( + task_id: int, + dapr_http_endpoint: str = "http://localhost:3500", +) -> bool: + """Cancel a scheduled recurring spawn (e.g., when task is deleted). + + Args: + task_id: Task whose spawn to cancel + dapr_http_endpoint: Dapr sidecar endpoint + + Returns: + True if cancelled successfully + """ + return await delete_job( + job_name=f"spawn-task-{task_id}", + dapr_http_endpoint=dapr_http_endpoint, + ) + + +# ============================================ +# REMINDER SCHEDULING (PROGRAMMATIC) +# ============================================ + +REMINDER_HOURS_BEFORE = 24 +REMINDER_HOURS_SHORT = 1 # For tasks less than 24h away +REMINDER_MINUTES_MEDIUM = 5 # For tasks 5-60 min away +REMINDER_SECONDS_SHORT = 30 # For tasks 1-5 min away (quick tasks) + + +async def schedule_reminder( + task_id: int, + due_date: datetime, + user_id: str, + title: str, + project_id: int | None = None, + dapr_http_endpoint: str = "http://localhost:3500", +) -> bool: + """Schedule a reminder notification for a task. + + Scheduling logic: + - If due date > 24 hours away: remind 24 hours before + - If due date 1-24 hours away: remind 1 hour before + - If due date 5-60 minutes away: remind 5 minutes before + - If due date 1-5 minutes away: remind 30 seconds before + - If due date < 1 minute away: no reminder (too late) + + Args: + task_id: Task to remind about + due_date: Task due date + user_id: User to notify + title: Task title for notification + project_id: Optional project ID + dapr_http_endpoint: Dapr sidecar endpoint + + Returns: + True if scheduled successfully + """ + from datetime import timedelta + + now = datetime.utcnow() + time_until_due = due_date - now + + # Calculate appropriate reminder time based on how far away due date is + if time_until_due > timedelta(hours=REMINDER_HOURS_BEFORE): + # More than 24h away - remind 24h before + remind_at = due_date - timedelta(hours=REMINDER_HOURS_BEFORE) + hours_until = REMINDER_HOURS_BEFORE + elif time_until_due > timedelta(hours=REMINDER_HOURS_SHORT): + # 1-24h away - remind 1h before + remind_at = due_date - timedelta(hours=REMINDER_HOURS_SHORT) + hours_until = REMINDER_HOURS_SHORT + elif time_until_due > timedelta(minutes=REMINDER_MINUTES_MEDIUM): + # 5-60 minutes away - remind 5 minutes before + remind_at = due_date - timedelta(minutes=REMINDER_MINUTES_MEDIUM) + hours_until = 0 # Will show as "in a few minutes" in notification + elif time_until_due > timedelta(minutes=1): + # 1-5 minutes away - remind 30 seconds before (for quick tasks) + remind_at = due_date - timedelta(seconds=REMINDER_SECONDS_SHORT) + hours_until = -1 # Special flag for "in seconds" display + else: + # Less than 1 minute away - too late for reminder + logger.info("[DAPR-JOB] Due date too soon for reminder on task %d", task_id) + return False + + # Final check: reminder time must be in the future + if remind_at <= now: + logger.info("[DAPR-JOB] Reminder time already passed for task %d", task_id) + return False + + return await schedule_job( + job_name=f"reminder-task-{task_id}", + due_time=remind_at, + data={ + "task_id": task_id, + "type": "reminder", + "user_id": user_id, + "title": title, + "project_id": project_id, + "due_at": due_date.isoformat(), + "hours_until_due": hours_until, + }, + dapr_http_endpoint=dapr_http_endpoint, + ) + + +async def cancel_reminder( + task_id: int, + dapr_http_endpoint: str = "http://localhost:3500", +) -> bool: + """Cancel a scheduled reminder (e.g., when task deleted or completed). + + Args: + task_id: Task whose reminder to cancel + dapr_http_endpoint: Dapr sidecar endpoint + + Returns: + True if cancelled successfully + """ + return await delete_job( + job_name=f"reminder-task-{task_id}", + dapr_http_endpoint=dapr_http_endpoint, + ) diff --git a/packages/notification-service/Dockerfile b/packages/notification-service/Dockerfile new file mode 100644 index 0000000..f58fbb3 --- /dev/null +++ b/packages/notification-service/Dockerfile @@ -0,0 +1,23 @@ +# Notification Service Dockerfile +FROM python:3.13-slim + +WORKDIR /app + +# Install uv for fast dependency management +COPY --from=ghcr.io/astral-sh/uv:latest /uv /bin/uv + +# Copy dependency files first (for layer caching) +COPY pyproject.toml README.md ./ + +# Copy source code +COPY src/ src/ + +# Install dependencies (after copying src for -e install) +RUN uv pip install --system -e . + +EXPOSE 8001 + +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD python -c "import httpx; httpx.get('http://localhost:8001/health').raise_for_status()" + +CMD ["uvicorn", "notification_service.main:app", "--host", "0.0.0.0", "--port", "8001"] diff --git a/packages/notification-service/README.md b/packages/notification-service/README.md new file mode 100644 index 0000000..e69de29 diff --git a/packages/notification-service/pyproject.toml b/packages/notification-service/pyproject.toml new file mode 100644 index 0000000..b6fdeea --- /dev/null +++ b/packages/notification-service/pyproject.toml @@ -0,0 +1,41 @@ +[project] +name = "notification-service" +version = "0.1.0" +description = "TaskFlow Notification Microservice - Dapr pub/sub consumer" +readme = "README.md" +requires-python = ">=3.11" +dependencies = [ + "fastapi>=0.115.0", + "uvicorn[standard]>=0.32.0", + "sqlmodel>=0.0.22", + "asyncpg>=0.30.0", + "pydantic-settings>=2.6.0", + "httpx>=0.28.0", + "python-jose[cryptography]>=3.3.0", +] + +[project.optional-dependencies] +dev = [ + "pytest>=8.0.0", + "pytest-asyncio>=0.24.0", + "ruff>=0.8.0", +] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["src/notification_service"] + +[tool.ruff] +line-length = 120 +target-version = "py311" + +[tool.ruff.lint] +select = ["E", "F", "I", "W"] +ignore = ["E501"] + +[tool.pytest.ini_options] +asyncio_mode = "auto" +testpaths = ["tests"] diff --git a/packages/notification-service/src/notification_service/__init__.py b/packages/notification-service/src/notification_service/__init__.py new file mode 100644 index 0000000..12c73dc --- /dev/null +++ b/packages/notification-service/src/notification_service/__init__.py @@ -0,0 +1,3 @@ +"""TaskFlow Notification Service - Dapr pub/sub consumer microservice.""" + +__version__ = "0.1.0" diff --git a/packages/notification-service/src/notification_service/auth.py b/packages/notification-service/src/notification_service/auth.py new file mode 100644 index 0000000..3feed8c --- /dev/null +++ b/packages/notification-service/src/notification_service/auth.py @@ -0,0 +1,118 @@ +"""JWT/JWKS authentication against Better Auth SSO. + +Copied from TaskFlow API - same auth pattern for microservice consistency. +""" + +import logging +import time +from typing import Any + +import httpx +from fastapi import Depends, HTTPException, status +from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer +from jose import JWTError, jwt + +from .config import settings + +logger = logging.getLogger(__name__) + +security = HTTPBearer() + +# JWKS cache - fetched once, reused for 1 hour +_jwks_cache: dict[str, Any] | None = None +_jwks_cache_time: float = 0 +JWKS_CACHE_TTL = 3600 # 1 hour + + +async def get_jwks() -> dict[str, Any]: + """Fetch and cache JWKS public keys from SSO.""" + global _jwks_cache, _jwks_cache_time + + now = time.time() + if _jwks_cache and (now - _jwks_cache_time) < JWKS_CACHE_TTL: + return _jwks_cache + + jwks_url = f"{settings.sso_url}/api/auth/jwks" + logger.info("[AUTH] Fetching JWKS from %s", jwks_url) + + try: + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.get(jwks_url) + response.raise_for_status() + _jwks_cache = response.json() + _jwks_cache_time = now + return _jwks_cache + except httpx.HTTPError as e: + logger.error("[AUTH] JWKS fetch failed: %s", e) + if _jwks_cache: + logger.warning("[AUTH] Using expired JWKS cache as fallback") + return _jwks_cache + raise HTTPException( + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + detail=f"Authentication service unavailable: {e}", + ) from e + + +async def verify_jwt(token: str) -> dict[str, Any]: + """Verify JWT signature using JWKS public keys.""" + try: + jwks = await get_jwks() + unverified_header = jwt.get_unverified_header(token) + kid = unverified_header.get("kid") + + rsa_key: dict[str, Any] | None = None + for key in jwks.get("keys", []): + if key.get("kid") == kid: + rsa_key = key + break + + if not rsa_key: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Token signing key not found", + headers={"WWW-Authenticate": "Bearer"}, + ) + + payload = jwt.decode( + token, + rsa_key, + algorithms=["RS256"], + options={"verify_aud": False}, + ) + return payload + + except JWTError as e: + logger.error("[AUTH] JWT verification failed: %s", e) + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail=f"Invalid JWT: {e}", + headers={"WWW-Authenticate": "Bearer"}, + ) from e + + +class CurrentUser: + """Authenticated user from JWT claims.""" + + def __init__(self, payload: dict[str, Any]) -> None: + self.id: str = payload.get("sub", "") + self.email: str = payload.get("email", "") + self.name: str = payload.get("name", "") + + def __repr__(self) -> str: + return f"CurrentUser(id={self.id!r}, email={self.email!r})" + + +async def get_current_user( + credentials: HTTPAuthorizationCredentials = Depends(security), +) -> CurrentUser: + """FastAPI dependency to get authenticated user from JWT.""" + # Dev mode bypass + if settings.dev_mode: + return CurrentUser({ + "sub": "dev-user-id", + "email": "dev@taskflow.local", + "name": "Dev User", + }) + + payload = await verify_jwt(credentials.credentials) + return CurrentUser(payload) diff --git a/packages/notification-service/src/notification_service/config.py b/packages/notification-service/src/notification_service/config.py new file mode 100644 index 0000000..dd6ff6a --- /dev/null +++ b/packages/notification-service/src/notification_service/config.py @@ -0,0 +1,42 @@ +"""Configuration for Notification Service.""" + +from pydantic_settings import BaseSettings, SettingsConfigDict + + +class Settings(BaseSettings): + """Notification service settings from environment.""" + + model_config = SettingsConfigDict( + env_file=".env", + env_file_encoding="utf-8", + case_sensitive=False, + extra="ignore", + ) + + # Database - SEPARATE from main TaskFlow DB + database_url: str + + # SSO configuration (for JWT verification) + sso_url: str = "http://localhost:3001" + + # Dev mode (bypass JWT verification for local dev) + dev_mode: bool = False + + # Dapr configuration + dapr_http_endpoint: str = "http://localhost:3500" + dapr_pubsub_name: str = "taskflow-pubsub" + dapr_app_id: str = "notification-service" + + # Service configuration + debug: bool = False + log_level: str = "INFO" + + # CORS (for direct API access if needed) + allowed_origins: str = "http://localhost:3000,http://localhost:3001" + + @property + def allowed_origins_list(self) -> list[str]: + return [origin.strip() for origin in self.allowed_origins.split(",")] + + +settings = Settings() diff --git a/packages/notification-service/src/notification_service/database.py b/packages/notification-service/src/notification_service/database.py new file mode 100644 index 0000000..bd188d5 --- /dev/null +++ b/packages/notification-service/src/notification_service/database.py @@ -0,0 +1,92 @@ +"""Database configuration for Notification Service. + +Uses a SEPARATE database from main TaskFlow API. +This is the microservice pattern - each service owns its data. +""" + +import logging +from collections.abc import AsyncGenerator +from urllib.parse import parse_qs, urlencode, urlparse, urlunparse + +from sqlalchemy.ext.asyncio import create_async_engine +from sqlalchemy.pool import NullPool +from sqlmodel import SQLModel +from sqlmodel.ext.asyncio.session import AsyncSession + +from .config import settings + +logger = logging.getLogger(__name__) + + +def get_async_database_url(url: str) -> str: + """Convert sync PostgreSQL URL to async-compatible format. + + Handles: + - Driver prefix: postgresql:// → postgresql+asyncpg:// + - SSL param: sslmode=require → ssl=require + - Removes unsupported params (channel_binding, etc.) + """ + if not url: + raise ValueError("DATABASE_URL is required") + + # Convert driver prefix + if url.startswith("postgresql://"): + url = url.replace("postgresql://", "postgresql+asyncpg://", 1) + elif url.startswith("postgres://"): + url = url.replace("postgres://", "postgresql+asyncpg://", 1) + + # Parse URL to handle params + parsed = urlparse(url) + params = parse_qs(parsed.query) + + # Convert sslmode to ssl for asyncpg + if "sslmode" in params: + params["ssl"] = params.pop("sslmode") + + # Remove unsupported asyncpg params + unsupported = ["channel_binding"] + for param in unsupported: + params.pop(param, None) + + # Rebuild query string (parse_qs returns lists, flatten single values) + clean_params = {k: v[0] if len(v) == 1 else v for k, v in params.items()} + new_query = urlencode(clean_params) + + # Rebuild URL + return urlunparse( + ( + parsed.scheme, + parsed.netloc, + parsed.path, + parsed.params, + new_query, + parsed.fragment, + ) + ) + + +# Create async engine with cleaned URL +DATABASE_URL = get_async_database_url(settings.database_url) +logger.info("Database URL configured (asyncpg format)") + +# Use NullPool for serverless/cloud databases like Neon +# This avoids pool_pre_ping issues with asyncpg and is better for serverless +# where connections are short-lived anyway +engine = create_async_engine( + DATABASE_URL, + echo=settings.debug, + poolclass=NullPool, # No connection pooling - Neon handles this server-side +) + + +async def init_db() -> None: + """Create all tables in the notification database.""" + async with engine.begin() as conn: + await conn.run_sync(SQLModel.metadata.create_all) + logger.info("Notification database initialized") + + +async def get_session() -> AsyncGenerator[AsyncSession, None]: + """Dependency that yields async database sessions.""" + async with AsyncSession(engine) as session: + yield session diff --git a/packages/notification-service/src/notification_service/main.py b/packages/notification-service/src/notification_service/main.py new file mode 100644 index 0000000..3751ae7 --- /dev/null +++ b/packages/notification-service/src/notification_service/main.py @@ -0,0 +1,80 @@ +"""Notification Service - Dapr pub/sub consumer microservice. + +This is a separate microservice that: +1. Subscribes to Dapr pub/sub topics (task-events, reminders) +2. Stores notifications in its OWN database +3. Exposes REST API for frontend to fetch notifications + +Architecture: +- Runs as separate K8s deployment with Dapr sidecar +- Has its own PostgreSQL database (separate from TaskFlow API) +- Receives events via Dapr, no direct coupling to TaskFlow API +""" + +import logging +from contextlib import asynccontextmanager + +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from .config import settings +from .database import init_db +from .routers import dapr_router, notifications_router + +# Configure logging +logging.basicConfig( + level=getattr(logging, settings.log_level.upper()), + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", +) +logger = logging.getLogger(__name__) + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """Application lifespan - startup and shutdown.""" + logger.info("Starting Notification Service...") + await init_db() + logger.info("Notification Service ready - Dapr subscriptions active") + yield + logger.info("Shutting down Notification Service...") + + +app = FastAPI( + title="TaskFlow Notification Service", + description="Dapr pub/sub consumer for task notifications", + version="0.1.0", + lifespan=lifespan, +) + +# CORS for direct API access (if frontend calls this service directly) +app.add_middleware( + CORSMiddleware, + allow_origins=settings.allowed_origins_list, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# Register routers +app.include_router(dapr_router) +app.include_router(notifications_router) + + +@app.get("/health") +async def health_check() -> dict: + """Health check endpoint for K8s probes.""" + return {"status": "healthy", "service": "notification-service"} + + +@app.get("/") +async def root() -> dict: + """Root endpoint with service info.""" + return { + "service": "TaskFlow Notification Service", + "version": "0.1.0", + "dapr_app_id": settings.dapr_app_id, + "subscriptions": [ + "task-events", + "reminders", + ], + } diff --git a/packages/notification-service/src/notification_service/models/__init__.py b/packages/notification-service/src/notification_service/models/__init__.py new file mode 100644 index 0000000..bf44e96 --- /dev/null +++ b/packages/notification-service/src/notification_service/models/__init__.py @@ -0,0 +1,5 @@ +"""Notification service models.""" + +from .notification import Notification + +__all__ = ["Notification"] diff --git a/packages/notification-service/src/notification_service/models/notification.py b/packages/notification-service/src/notification_service/models/notification.py new file mode 100644 index 0000000..ded5b86 --- /dev/null +++ b/packages/notification-service/src/notification_service/models/notification.py @@ -0,0 +1,74 @@ +"""Notification model - stored in notification service's own database.""" + +from datetime import datetime +from typing import Literal + +from sqlmodel import Field, SQLModel + + +NotificationType = Literal[ + "task_assigned", + "task_completed", + "task_spawned", + "task_reminder", + "task_created", + "task_updated", + "task_deleted", +] + + +class Notification(SQLModel, table=True): + """Notification stored in notification service database. + + This is the ONLY table in this service's database. + Each microservice owns its data - no shared DB. + """ + + __tablename__ = "notification" + + id: int | None = Field(default=None, primary_key=True) + + # Who receives this notification + user_id: str = Field(index=True, description="SSO user ID of recipient") + user_type: str = Field(default="human", description="human or agent") + + # Notification content + type: str = Field(description="Notification type (task_assigned, task_reminder, etc.)") + title: str = Field(max_length=200) + body: str | None = Field(default=None, max_length=500) + + # Link back to task (optional) + task_id: int | None = Field(default=None, index=True) + project_id: int | None = Field(default=None) + + # State + read: bool = Field(default=False, index=True) + + # Metadata from event + actor_id: str | None = Field(default=None, description="Who triggered this notification") + actor_name: str | None = Field(default=None) + + # Timestamps + created_at: datetime = Field(default_factory=datetime.utcnow) + read_at: datetime | None = Field(default=None) + + +class NotificationRead(SQLModel): + """Response schema for notification.""" + + id: int + user_id: str + type: str + title: str + body: str | None + task_id: int | None + project_id: int | None + read: bool + actor_name: str | None + created_at: datetime + + +class NotificationUpdate(SQLModel): + """Request schema for updating notification.""" + + read: bool diff --git a/packages/notification-service/src/notification_service/routers/__init__.py b/packages/notification-service/src/notification_service/routers/__init__.py new file mode 100644 index 0000000..62ce705 --- /dev/null +++ b/packages/notification-service/src/notification_service/routers/__init__.py @@ -0,0 +1,6 @@ +"""Notification service routers.""" + +from .dapr import router as dapr_router +from .notifications import router as notifications_router + +__all__ = ["dapr_router", "notifications_router"] diff --git a/packages/notification-service/src/notification_service/routers/dapr.py b/packages/notification-service/src/notification_service/routers/dapr.py new file mode 100644 index 0000000..d0d2797 --- /dev/null +++ b/packages/notification-service/src/notification_service/routers/dapr.py @@ -0,0 +1,241 @@ +"""Dapr subscription handlers. + +This is where Dapr pub/sub events are consumed. +Dapr calls these endpoints when events are published to subscribed topics. + +Flow: +1. TaskFlow API publishes event to Dapr sidecar +2. Dapr routes to Kafka/Redis (based on component config) +3. Dapr calls this service's /dapr/subscribe to get subscriptions +4. Dapr delivers events to /dapr/events/{topic} +5. This service stores notification in its own DB +""" + +import logging +from datetime import datetime + +from fastapi import APIRouter, Depends, Request +from sqlmodel.ext.asyncio.session import AsyncSession + +from ..config import settings +from ..database import get_session +from ..models.notification import Notification + +logger = logging.getLogger(__name__) + +router = APIRouter(prefix="/dapr", tags=["Dapr Subscriptions"]) + + +# Topics we subscribe to +SUBSCRIPTIONS = [ + {"pubsubname": settings.dapr_pubsub_name, "topic": "task-events", "route": "/dapr/events/task-events"}, + {"pubsubname": settings.dapr_pubsub_name, "topic": "reminders", "route": "/dapr/events/reminders"}, +] + + +@router.get("/subscribe") +async def get_subscriptions() -> list[dict]: + """Tell Dapr which topics we subscribe to. + + Dapr calls this endpoint on startup to discover subscriptions. + This is the programmatic subscription method. + """ + logger.info("[DAPR] Returning subscriptions: %s", [s["topic"] for s in SUBSCRIPTIONS]) + return SUBSCRIPTIONS + + +@router.post("/events/task-events") +async def handle_task_events( + request: Request, + session: AsyncSession = Depends(get_session), +) -> dict: + """Handle events from task-events topic. + + Events: task.created, task.updated, task.deleted, task.completed, task.spawned + """ + try: + raw_event = await request.json() + logger.info("[DAPR] Raw event received: %s", raw_event) + + # Dapr CloudEvent wraps our payload in "data" field + # Our payload has: {"event_type": ..., "data": ..., "timestamp": ...} + event = raw_event.get("data", raw_event) # Unwrap CloudEvent or use as-is + logger.info("[DAPR] Received task-events: %s", event.get("event_type")) + + event_type = event.get("event_type", "") + data = event.get("data", {}) + + # Generate notification based on event type + notification = await _create_notification_from_event(event_type, data) + + if notification: + session.add(notification) + await session.commit() + logger.info("[DAPR] Created notification %d for %s", notification.id, notification.user_id) + + return {"status": "SUCCESS"} + + except Exception as e: + logger.exception("[DAPR] Error handling task-events: %s", e) + # Return SUCCESS to prevent Dapr retries for bad events + return {"status": "SUCCESS"} + + +@router.post("/events/reminders") +async def handle_reminders( + request: Request, + session: AsyncSession = Depends(get_session), +) -> dict: + """Handle events from reminders topic. + + Events: reminder.scheduled, reminder.sent + These are triggered by Dapr Jobs at exact times. + """ + try: + event = await request.json() + logger.info("[DAPR] Received reminder event: %s", event.get("event_type")) + + event_type = event.get("event_type", "") + data = event.get("data", {}) + + if event_type == "reminder.due": + # This is the actual reminder notification to user + hours_until = data.get("hours_until_due", 24) + if hours_until == -1: + time_text = "in seconds" # Quick task reminder (30 sec before) + elif hours_until == 0: + time_text = "in a few minutes" + elif hours_until == 1: + time_text = "in 1 hour" + else: + time_text = f"in {hours_until} hours" + + notification = Notification( + user_id=data.get("user_id", ""), + user_type="human", + type="task_reminder", + title=f"Task due {time_text}", + body=f'"{data.get("title", "Task")}" is approaching its deadline', + task_id=data.get("task_id"), + project_id=data.get("project_id"), + actor_id="system", + actor_name="TaskFlow", + ) + session.add(notification) + await session.commit() + logger.info("[DAPR] Created reminder notification for user %s", notification.user_id) + + return {"status": "SUCCESS"} + + except Exception as e: + logger.exception("[DAPR] Error handling reminder: %s", e) + return {"status": "SUCCESS"} + + +async def _create_notification_from_event(event_type: str, data: dict) -> Notification | None: + """Create notification from event data. + + Returns None if no notification should be created. + """ + user_id = data.get("user_id") + if not user_id: + return None + + # task_title can be nested in "task" object or at top level (for task.assigned) + task_title = data.get("task_title") or data.get("task", {}).get("title", "Task") + task_id = data.get("task_id") + project_id = data.get("task", {}).get("project_id") or data.get("project_id") + actor_name = data.get("actor_name", "Someone") + + if event_type == "task.created": + # Notify assignee when task is created and assigned to them + assignee_id = data.get("task", {}).get("assignee_id") + if assignee_id and assignee_id != data.get("actor_id"): + return Notification( + user_id=user_id, + user_type="human", + type="task_assigned", + title="New task assigned to you", + body=f'{actor_name} created "{task_title}"', + task_id=task_id, + project_id=project_id, + actor_id=data.get("actor_id"), + actor_name=actor_name, + ) + + elif event_type == "task.completed": + # Notify creator when someone else completes their task + creator_id = data.get("creator_id") + if creator_id and creator_id != data.get("actor_id"): + return Notification( + user_id=creator_id, + user_type="human", + type="task_completed", + title="Task completed", + body=f'{actor_name} completed "{task_title}"', + task_id=task_id, + project_id=project_id, + actor_id=data.get("actor_id"), + actor_name=actor_name, + ) + + elif event_type == "task.spawned": + # Notify assignee when recurring task spawns new occurrence + return Notification( + user_id=user_id, + user_type="human", + type="task_spawned", + title="Recurring task created", + body=f'New occurrence of "{task_title}" is ready', + task_id=task_id, + project_id=project_id, + actor_id="system", + actor_name="TaskFlow", + ) + + elif event_type == "task.assigned": + # Direct assignment notification + return Notification( + user_id=user_id, + user_type="human", + type="task_assigned", + title="Task assigned to you", + body=f'{actor_name} assigned "{task_title}" to you', + task_id=task_id, + project_id=project_id, + actor_id=data.get("actor_id"), + actor_name=actor_name, + ) + + elif event_type == "task.deleted": + # Notify assignee when their task is deleted by someone else + return Notification( + user_id=user_id, + user_type="human", + type="task_deleted", + title="Task deleted", + body=f'{actor_name} deleted "{task_title}"', + task_id=task_id, + project_id=project_id, + actor_id=data.get("actor_id"), + actor_name=actor_name, + ) + + elif event_type == "task.updated": + # Notify assignee when task is updated (status change, priority, etc.) + changes = data.get("changes", {}) + if changes: + change_summary = ", ".join(f"{k}: {v}" for k, v in list(changes.items())[:2]) + return Notification( + user_id=user_id, + user_type="human", + type="task_updated", + title="Task updated", + body=f'{actor_name} updated "{task_title}" ({change_summary})', + task_id=task_id, + project_id=project_id, + actor_id=data.get("actor_id"), + actor_name=actor_name, + ) + + return None diff --git a/packages/notification-service/src/notification_service/routers/notifications.py b/packages/notification-service/src/notification_service/routers/notifications.py new file mode 100644 index 0000000..db18580 --- /dev/null +++ b/packages/notification-service/src/notification_service/routers/notifications.py @@ -0,0 +1,140 @@ +"""Notification API endpoints. + +These endpoints are called by the frontend to fetch/manage notifications. +Uses JWT auth - same pattern as TaskFlow API. +""" + +from datetime import datetime + +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlmodel import func, select +from sqlmodel.ext.asyncio.session import AsyncSession + +from ..auth import CurrentUser, get_current_user +from ..database import get_session +from ..models.notification import Notification, NotificationRead, NotificationUpdate + +router = APIRouter(prefix="/api/notifications", tags=["Notifications"]) + + +@router.get("", response_model=list[NotificationRead]) +async def list_notifications( + user: CurrentUser = Depends(get_current_user), + unread_only: bool = Query(default=False), + limit: int = Query(default=50, le=100), + offset: int = Query(default=0, ge=0), + session: AsyncSession = Depends(get_session), +) -> list[NotificationRead]: + """List notifications for the current user. + + Uses JWT auth to identify user - no query param needed. + """ + # Use SSO user ID to find notifications + # Notifications are stored with user_id = SSO sub claim + stmt = ( + select(Notification) + .where(Notification.user_id == user.id) + .order_by(Notification.created_at.desc()) + .offset(offset) + .limit(limit) + ) + + if unread_only: + stmt = stmt.where(Notification.read == False) # noqa: E712 + + result = await session.exec(stmt) + notifications = result.all() + + return [ + NotificationRead( + id=n.id, + user_id=n.user_id, + type=n.type, + title=n.title, + body=n.body, + task_id=n.task_id, + project_id=n.project_id, + read=n.read, + actor_name=n.actor_name, + created_at=n.created_at, + ) + for n in notifications + ] + + +@router.get("/unread-count") +async def get_unread_count( + user: CurrentUser = Depends(get_current_user), + session: AsyncSession = Depends(get_session), +) -> dict: + """Get count of unread notifications.""" + stmt = select(func.count(Notification.id)).where( + Notification.user_id == user.id, + Notification.read == False, # noqa: E712 + ) + + result = await session.exec(stmt) + count = result.one() or 0 + + return {"count": count} + + +@router.patch("/{notification_id}/read", response_model=NotificationRead) +async def mark_read( + notification_id: int, + update: NotificationUpdate, + user: CurrentUser = Depends(get_current_user), + session: AsyncSession = Depends(get_session), +) -> NotificationRead: + """Mark a notification as read or unread.""" + notification = await session.get(Notification, notification_id) + + if not notification: + raise HTTPException(status_code=404, detail="Notification not found") + + if notification.user_id != user.id: + raise HTTPException(status_code=403, detail="Not authorized") + + notification.read = update.read + notification.read_at = datetime.utcnow() if update.read else None + session.add(notification) + await session.commit() + await session.refresh(notification) + + return NotificationRead( + id=notification.id, + user_id=notification.user_id, + type=notification.type, + title=notification.title, + body=notification.body, + task_id=notification.task_id, + project_id=notification.project_id, + read=notification.read, + actor_name=notification.actor_name, + created_at=notification.created_at, + ) + + +@router.post("/mark-all-read") +async def mark_all_read( + user: CurrentUser = Depends(get_current_user), + session: AsyncSession = Depends(get_session), +) -> dict: + """Mark all notifications as read for current user.""" + stmt = select(Notification).where( + Notification.user_id == user.id, + Notification.read == False, # noqa: E712 + ) + + result = await session.exec(stmt) + notifications = result.all() + + now = datetime.utcnow() + for notification in notifications: + notification.read = True + notification.read_at = now + session.add(notification) + + await session.commit() + + return {"updated": len(notifications)} diff --git a/packages/notification-service/uv.lock b/packages/notification-service/uv.lock new file mode 100644 index 0000000..479b8ee --- /dev/null +++ b/packages/notification-service/uv.lock @@ -0,0 +1,1055 @@ +version = 1 +revision = 2 +requires-python = ">=3.11" + +[[package]] +name = "annotated-doc" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/16/ce/8a777047513153587e5434fd752e89334ac33e379aa3497db860eeb60377/anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0", size = 228266, upload-time = "2025-11-28T23:37:38.911Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/9c/36c5c37947ebfb8c7f22e0eb6e4d188ee2d53aa3880f3f2744fb894f0cb1/anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb", size = 113362, upload-time = "2025-11-28T23:36:57.897Z" }, +] + +[[package]] +name = "asyncpg" +version = "0.31.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cc/d18065ce2380d80b1bcce927c24a2642efd38918e33fd724bc4bca904877/asyncpg-0.31.0.tar.gz", hash = "sha256:c989386c83940bfbd787180f2b1519415e2d3d6277a70d9d0f0145ac73500735", size = 993667, upload-time = "2025-11-24T23:27:00.812Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/17/cc02bc49bc350623d050fa139e34ea512cd6e020562f2a7312a7bcae4bc9/asyncpg-0.31.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eee690960e8ab85063ba93af2ce128c0f52fd655fdff9fdb1a28df01329f031d", size = 643159, upload-time = "2025-11-24T23:25:36.443Z" }, + { url = "https://files.pythonhosted.org/packages/a4/62/4ded7d400a7b651adf06f49ea8f73100cca07c6df012119594d1e3447aa6/asyncpg-0.31.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2657204552b75f8288de08ca60faf4a99a65deef3a71d1467454123205a88fab", size = 638157, upload-time = "2025-11-24T23:25:37.89Z" }, + { url = "https://files.pythonhosted.org/packages/d6/5b/4179538a9a72166a0bf60ad783b1ef16efb7960e4d7b9afe9f77a5551680/asyncpg-0.31.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a429e842a3a4b4ea240ea52d7fe3f82d5149853249306f7ff166cb9948faa46c", size = 2918051, upload-time = "2025-11-24T23:25:39.461Z" }, + { url = "https://files.pythonhosted.org/packages/e6/35/c27719ae0536c5b6e61e4701391ffe435ef59539e9360959240d6e47c8c8/asyncpg-0.31.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0807be46c32c963ae40d329b3a686356e417f674c976c07fa49f1b30303f109", size = 2972640, upload-time = "2025-11-24T23:25:41.512Z" }, + { url = "https://files.pythonhosted.org/packages/43/f4/01ebb9207f29e645a64699b9ce0eefeff8e7a33494e1d29bb53736f7766b/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e5d5098f63beeae93512ee513d4c0c53dc12e9aa2b7a1af5a81cddf93fe4e4da", size = 2851050, upload-time = "2025-11-24T23:25:43.153Z" }, + { url = "https://files.pythonhosted.org/packages/3e/f4/03ff1426acc87be0f4e8d40fa2bff5c3952bef0080062af9efc2212e3be8/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37fc6c00a814e18eef51833545d1891cac9aa69140598bb076b4cd29b3e010b9", size = 2962574, upload-time = "2025-11-24T23:25:44.942Z" }, + { url = "https://files.pythonhosted.org/packages/c7/39/cc788dfca3d4060f9d93e67be396ceec458dfc429e26139059e58c2c244d/asyncpg-0.31.0-cp311-cp311-win32.whl", hash = "sha256:5a4af56edf82a701aece93190cc4e094d2df7d33f6e915c222fb09efbb5afc24", size = 521076, upload-time = "2025-11-24T23:25:46.486Z" }, + { url = "https://files.pythonhosted.org/packages/28/fc/735af5384c029eb7f1ca60ccb8fa95521dbdaeef788edf4cecfc604c3cab/asyncpg-0.31.0-cp311-cp311-win_amd64.whl", hash = "sha256:480c4befbdf079c14c9ca43c8c5e1fe8b6296c96f1f927158d4f1e750aacc047", size = 584980, upload-time = "2025-11-24T23:25:47.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/a6/59d0a146e61d20e18db7396583242e32e0f120693b67a8de43f1557033e2/asyncpg-0.31.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b44c31e1efc1c15188ef183f287c728e2046abb1d26af4d20858215d50d91fad", size = 662042, upload-time = "2025-11-24T23:25:49.578Z" }, + { url = "https://files.pythonhosted.org/packages/36/01/ffaa189dcb63a2471720615e60185c3f6327716fdc0fc04334436fbb7c65/asyncpg-0.31.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0c89ccf741c067614c9b5fc7f1fc6f3b61ab05ae4aaa966e6fd6b93097c7d20d", size = 638504, upload-time = "2025-11-24T23:25:51.501Z" }, + { url = "https://files.pythonhosted.org/packages/9f/62/3f699ba45d8bd24c5d65392190d19656d74ff0185f42e19d0bbd973bb371/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:12b3b2e39dc5470abd5e98c8d3373e4b1d1234d9fbdedf538798b2c13c64460a", size = 3426241, upload-time = "2025-11-24T23:25:53.278Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d1/a867c2150f9c6e7af6462637f613ba67f78a314b00db220cd26ff559d532/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:aad7a33913fb8bcb5454313377cc330fbb19a0cd5faa7272407d8a0c4257b671", size = 3520321, upload-time = "2025-11-24T23:25:54.982Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1a/cce4c3f246805ecd285a3591222a2611141f1669d002163abef999b60f98/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3df118d94f46d85b2e434fd62c84cb66d5834d5a890725fe625f498e72e4d5ec", size = 3316685, upload-time = "2025-11-24T23:25:57.43Z" }, + { url = "https://files.pythonhosted.org/packages/40/ae/0fc961179e78cc579e138fad6eb580448ecae64908f95b8cb8ee2f241f67/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd5b6efff3c17c3202d4b37189969acf8927438a238c6257f66be3c426beba20", size = 3471858, upload-time = "2025-11-24T23:25:59.636Z" }, + { url = "https://files.pythonhosted.org/packages/52/b2/b20e09670be031afa4cbfabd645caece7f85ec62d69c312239de568e058e/asyncpg-0.31.0-cp312-cp312-win32.whl", hash = "sha256:027eaa61361ec735926566f995d959ade4796f6a49d3bde17e5134b9964f9ba8", size = 527852, upload-time = "2025-11-24T23:26:01.084Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f0/f2ed1de154e15b107dc692262395b3c17fc34eafe2a78fc2115931561730/asyncpg-0.31.0-cp312-cp312-win_amd64.whl", hash = "sha256:72d6bdcbc93d608a1158f17932de2321f68b1a967a13e014998db87a72ed3186", size = 597175, upload-time = "2025-11-24T23:26:02.564Z" }, + { url = "https://files.pythonhosted.org/packages/95/11/97b5c2af72a5d0b9bc3fa30cd4b9ce22284a9a943a150fdc768763caf035/asyncpg-0.31.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c204fab1b91e08b0f47e90a75d1b3c62174dab21f670ad6c5d0f243a228f015b", size = 661111, upload-time = "2025-11-24T23:26:04.467Z" }, + { url = "https://files.pythonhosted.org/packages/1b/71/157d611c791a5e2d0423f09f027bd499935f0906e0c2a416ce712ba51ef3/asyncpg-0.31.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:54a64f91839ba59008eccf7aad2e93d6e3de688d796f35803235ea1c4898ae1e", size = 636928, upload-time = "2025-11-24T23:26:05.944Z" }, + { url = "https://files.pythonhosted.org/packages/2e/fc/9e3486fb2bbe69d4a867c0b76d68542650a7ff1574ca40e84c3111bb0c6e/asyncpg-0.31.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0e0822b1038dc7253b337b0f3f676cadc4ac31b126c5d42691c39691962e403", size = 3424067, upload-time = "2025-11-24T23:26:07.957Z" }, + { url = "https://files.pythonhosted.org/packages/12/c6/8c9d076f73f07f995013c791e018a1cd5f31823c2a3187fc8581706aa00f/asyncpg-0.31.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bef056aa502ee34204c161c72ca1f3c274917596877f825968368b2c33f585f4", size = 3518156, upload-time = "2025-11-24T23:26:09.591Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3b/60683a0baf50fbc546499cfb53132cb6835b92b529a05f6a81471ab60d0c/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0bfbcc5b7ffcd9b75ab1558f00db2ae07db9c80637ad1b2469c43df79d7a5ae2", size = 3319636, upload-time = "2025-11-24T23:26:11.168Z" }, + { url = "https://files.pythonhosted.org/packages/50/dc/8487df0f69bd398a61e1792b3cba0e47477f214eff085ba0efa7eac9ce87/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:22bc525ebbdc24d1261ecbf6f504998244d4e3be1721784b5f64664d61fbe602", size = 3472079, upload-time = "2025-11-24T23:26:13.164Z" }, + { url = "https://files.pythonhosted.org/packages/13/a1/c5bbeeb8531c05c89135cb8b28575ac2fac618bcb60119ee9696c3faf71c/asyncpg-0.31.0-cp313-cp313-win32.whl", hash = "sha256:f890de5e1e4f7e14023619399a471ce4b71f5418cd67a51853b9910fdfa73696", size = 527606, upload-time = "2025-11-24T23:26:14.78Z" }, + { url = "https://files.pythonhosted.org/packages/91/66/b25ccb84a246b470eb943b0107c07edcae51804912b824054b3413995a10/asyncpg-0.31.0-cp313-cp313-win_amd64.whl", hash = "sha256:dc5f2fa9916f292e5c5c8b2ac2813763bcd7f58e130055b4ad8a0531314201ab", size = 596569, upload-time = "2025-11-24T23:26:16.189Z" }, + { url = "https://files.pythonhosted.org/packages/3c/36/e9450d62e84a13aea6580c83a47a437f26c7ca6fa0f0fd40b6670793ea30/asyncpg-0.31.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f6b56b91bb0ffc328c4e3ed113136cddd9deefdf5f79ab448598b9772831df44", size = 660867, upload-time = "2025-11-24T23:26:17.631Z" }, + { url = "https://files.pythonhosted.org/packages/82/4b/1d0a2b33b3102d210439338e1beea616a6122267c0df459ff0265cd5807a/asyncpg-0.31.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:334dec28cf20d7f5bb9e45b39546ddf247f8042a690bff9b9573d00086e69cb5", size = 638349, upload-time = "2025-11-24T23:26:19.689Z" }, + { url = "https://files.pythonhosted.org/packages/41/aa/e7f7ac9a7974f08eff9183e392b2d62516f90412686532d27e196c0f0eeb/asyncpg-0.31.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98cc158c53f46de7bb677fd20c417e264fc02b36d901cc2a43bd6cb0dc6dbfd2", size = 3410428, upload-time = "2025-11-24T23:26:21.275Z" }, + { url = "https://files.pythonhosted.org/packages/6f/de/bf1b60de3dede5c2731e6788617a512bc0ebd9693eac297ee74086f101d7/asyncpg-0.31.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9322b563e2661a52e3cdbc93eed3be7748b289f792e0011cb2720d278b366ce2", size = 3471678, upload-time = "2025-11-24T23:26:23.627Z" }, + { url = "https://files.pythonhosted.org/packages/46/78/fc3ade003e22d8bd53aaf8f75f4be48f0b460fa73738f0391b9c856a9147/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19857a358fc811d82227449b7ca40afb46e75b33eb8897240c3839dd8b744218", size = 3313505, upload-time = "2025-11-24T23:26:25.235Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e9/73eb8a6789e927816f4705291be21f2225687bfa97321e40cd23055e903a/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ba5f8886e850882ff2c2ace5732300e99193823e8107e2c53ef01c1ebfa1e85d", size = 3434744, upload-time = "2025-11-24T23:26:26.944Z" }, + { url = "https://files.pythonhosted.org/packages/08/4b/f10b880534413c65c5b5862f79b8e81553a8f364e5238832ad4c0af71b7f/asyncpg-0.31.0-cp314-cp314-win32.whl", hash = "sha256:cea3a0b2a14f95834cee29432e4ddc399b95700eb1d51bbc5bfee8f31fa07b2b", size = 532251, upload-time = "2025-11-24T23:26:28.404Z" }, + { url = "https://files.pythonhosted.org/packages/d3/2d/7aa40750b7a19efa5d66e67fc06008ca0f27ba1bd082e457ad82f59aba49/asyncpg-0.31.0-cp314-cp314-win_amd64.whl", hash = "sha256:04d19392716af6b029411a0264d92093b6e5e8285ae97a39957b9a9c14ea72be", size = 604901, upload-time = "2025-11-24T23:26:30.34Z" }, + { url = "https://files.pythonhosted.org/packages/ce/fe/b9dfe349b83b9dee28cc42360d2c86b2cdce4cb551a2c2d27e156bcac84d/asyncpg-0.31.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:bdb957706da132e982cc6856bb2f7b740603472b54c3ebc77fe60ea3e57e1bd2", size = 702280, upload-time = "2025-11-24T23:26:32Z" }, + { url = "https://files.pythonhosted.org/packages/6a/81/e6be6e37e560bd91e6c23ea8a6138a04fd057b08cf63d3c5055c98e81c1d/asyncpg-0.31.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6d11b198111a72f47154fa03b85799f9be63701e068b43f84ac25da0bda9cb31", size = 682931, upload-time = "2025-11-24T23:26:33.572Z" }, + { url = "https://files.pythonhosted.org/packages/a6/45/6009040da85a1648dd5bc75b3b0a062081c483e75a1a29041ae63a0bf0dc/asyncpg-0.31.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18c83b03bc0d1b23e6230f5bf8d4f217dc9bc08644ce0502a9d91dc9e634a9c7", size = 3581608, upload-time = "2025-11-24T23:26:35.638Z" }, + { url = "https://files.pythonhosted.org/packages/7e/06/2e3d4d7608b0b2b3adbee0d0bd6a2d29ca0fc4d8a78f8277df04e2d1fd7b/asyncpg-0.31.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e009abc333464ff18b8f6fd146addffd9aaf63e79aa3bb40ab7a4c332d0c5e9e", size = 3498738, upload-time = "2025-11-24T23:26:37.275Z" }, + { url = "https://files.pythonhosted.org/packages/7d/aa/7d75ede780033141c51d83577ea23236ba7d3a23593929b32b49db8ed36e/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3b1fbcb0e396a5ca435a8826a87e5c2c2cc0c8c68eb6fadf82168056b0e53a8c", size = 3401026, upload-time = "2025-11-24T23:26:39.423Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7a/15e37d45e7f7c94facc1e9148c0e455e8f33c08f0b8a0b1deb2c5171771b/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8df714dba348efcc162d2adf02d213e5fab1bd9f557e1305633e851a61814a7a", size = 3429426, upload-time = "2025-11-24T23:26:41.032Z" }, + { url = "https://files.pythonhosted.org/packages/13/d5/71437c5f6ae5f307828710efbe62163974e71237d5d46ebd2869ea052d10/asyncpg-0.31.0-cp314-cp314t-win32.whl", hash = "sha256:1b41f1afb1033f2b44f3234993b15096ddc9cd71b21a42dbd87fc6a57b43d65d", size = 614495, upload-time = "2025-11-24T23:26:42.659Z" }, + { url = "https://files.pythonhosted.org/packages/3c/d7/8fb3044eaef08a310acfe23dae9a8e2e07d305edc29a53497e52bc76eca7/asyncpg-0.31.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bd4107bb7cdd0e9e65fae66a62afd3a249663b844fa34d479f6d5b3bef9c04c3", size = 706062, upload-time = "2025-11-24T23:26:44.086Z" }, +] + +[[package]] +name = "certifi" +version = "2025.11.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "cryptography" +version = "46.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, + { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, + { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, + { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, + { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, + { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, + { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, + { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, + { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, + { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" }, + { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" }, + { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" }, + { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" }, + { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" }, + { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" }, + { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" }, + { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" }, + { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" }, + { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" }, + { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" }, + { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" }, + { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" }, + { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" }, + { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, + { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, + { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, + { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, + { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, + { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, + { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, + { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, + { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, + { url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" }, + { url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" }, + { url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" }, + { url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" }, + { url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" }, +] + +[[package]] +name = "ecdsa" +version = "0.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c0/1f/924e3caae75f471eae4b26bd13b698f6af2c44279f67af317439c2f4c46a/ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61", size = 201793, upload-time = "2025-03-13T11:52:43.25Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/a3/460c57f094a4a165c84a1341c373b0a4f5ec6ac244b998d5021aade89b77/ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3", size = 150607, upload-time = "2025-03-13T11:52:41.757Z" }, +] + +[[package]] +name = "fastapi" +version = "0.124.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-doc" }, + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/b7/4dbca3f9d847ba9876dcb7098c13a4c6c86ee8db148c923fab78e27748d3/fastapi-0.124.2.tar.gz", hash = "sha256:72e188f01f360e2f59da51c8822cbe4bca210c35daaae6321b1b724109101c00", size = 361867, upload-time = "2025-12-10T12:10:10.676Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/c5/8a5231197b81943b2df126cc8ea2083262e004bee3a39cf85a471392d145/fastapi-0.124.2-py3-none-any.whl", hash = "sha256:6314385777a507bb19b34bd064829fddaea0eea54436deb632b5de587554055c", size = 112711, upload-time = "2025-12-10T12:10:08.855Z" }, +] + +[[package]] +name = "greenlet" +version = "3.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/e5/40dbda2736893e3e53d25838e0f19a2b417dfc122b9989c91918db30b5d3/greenlet-3.3.0.tar.gz", hash = "sha256:a82bb225a4e9e4d653dd2fb7b8b2d36e4fb25bc0165422a11e48b88e9e6f78fb", size = 190651, upload-time = "2025-12-04T14:49:44.05Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/cb/48e964c452ca2b92175a9b2dca037a553036cb053ba69e284650ce755f13/greenlet-3.3.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e29f3018580e8412d6aaf5641bb7745d38c85228dacf51a73bd4e26ddf2a6a8e", size = 274908, upload-time = "2025-12-04T14:23:26.435Z" }, + { url = "https://files.pythonhosted.org/packages/28/da/38d7bff4d0277b594ec557f479d65272a893f1f2a716cad91efeb8680953/greenlet-3.3.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a687205fb22794e838f947e2194c0566d3812966b41c78709554aa883183fb62", size = 577113, upload-time = "2025-12-04T14:50:05.493Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f2/89c5eb0faddc3ff014f1c04467d67dee0d1d334ab81fadbf3744847f8a8a/greenlet-3.3.0-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4243050a88ba61842186cb9e63c7dfa677ec146160b0efd73b855a3d9c7fcf32", size = 590338, upload-time = "2025-12-04T14:57:41.136Z" }, + { url = "https://files.pythonhosted.org/packages/80/d7/db0a5085035d05134f8c089643da2b44cc9b80647c39e93129c5ef170d8f/greenlet-3.3.0-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:670d0f94cd302d81796e37299bcd04b95d62403883b24225c6b5271466612f45", size = 601098, upload-time = "2025-12-04T15:07:11.898Z" }, + { url = "https://files.pythonhosted.org/packages/dc/a6/e959a127b630a58e23529972dbc868c107f9d583b5a9f878fb858c46bc1a/greenlet-3.3.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cb3a8ec3db4a3b0eb8a3c25436c2d49e3505821802074969db017b87bc6a948", size = 590206, upload-time = "2025-12-04T14:26:01.254Z" }, + { url = "https://files.pythonhosted.org/packages/48/60/29035719feb91798693023608447283b266b12efc576ed013dd9442364bb/greenlet-3.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2de5a0b09eab81fc6a382791b995b1ccf2b172a9fec934747a7a23d2ff291794", size = 1550668, upload-time = "2025-12-04T15:04:22.439Z" }, + { url = "https://files.pythonhosted.org/packages/0a/5f/783a23754b691bfa86bd72c3033aa107490deac9b2ef190837b860996c9f/greenlet-3.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4449a736606bd30f27f8e1ff4678ee193bc47f6ca810d705981cfffd6ce0d8c5", size = 1615483, upload-time = "2025-12-04T14:27:28.083Z" }, + { url = "https://files.pythonhosted.org/packages/1d/d5/c339b3b4bc8198b7caa4f2bd9fd685ac9f29795816d8db112da3d04175bb/greenlet-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:7652ee180d16d447a683c04e4c5f6441bae7ba7b17ffd9f6b3aff4605e9e6f71", size = 301164, upload-time = "2025-12-04T14:42:51.577Z" }, + { url = "https://files.pythonhosted.org/packages/f8/0a/a3871375c7b9727edaeeea994bfff7c63ff7804c9829c19309ba2e058807/greenlet-3.3.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:b01548f6e0b9e9784a2c99c5651e5dc89ffcbe870bc5fb2e5ef864e9cc6b5dcb", size = 276379, upload-time = "2025-12-04T14:23:30.498Z" }, + { url = "https://files.pythonhosted.org/packages/43/ab/7ebfe34dce8b87be0d11dae91acbf76f7b8246bf9d6b319c741f99fa59c6/greenlet-3.3.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:349345b770dc88f81506c6861d22a6ccd422207829d2c854ae2af8025af303e3", size = 597294, upload-time = "2025-12-04T14:50:06.847Z" }, + { url = "https://files.pythonhosted.org/packages/a4/39/f1c8da50024feecd0793dbd5e08f526809b8ab5609224a2da40aad3a7641/greenlet-3.3.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e8e18ed6995e9e2c0b4ed264d2cf89260ab3ac7e13555b8032b25a74c6d18655", size = 607742, upload-time = "2025-12-04T14:57:42.349Z" }, + { url = "https://files.pythonhosted.org/packages/77/cb/43692bcd5f7a0da6ec0ec6d58ee7cddb606d055ce94a62ac9b1aa481e969/greenlet-3.3.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c024b1e5696626890038e34f76140ed1daf858e37496d33f2af57f06189e70d7", size = 622297, upload-time = "2025-12-04T15:07:13.552Z" }, + { url = "https://files.pythonhosted.org/packages/75/b0/6bde0b1011a60782108c01de5913c588cf51a839174538d266de15e4bf4d/greenlet-3.3.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:047ab3df20ede6a57c35c14bf5200fcf04039d50f908270d3f9a7a82064f543b", size = 609885, upload-time = "2025-12-04T14:26:02.368Z" }, + { url = "https://files.pythonhosted.org/packages/49/0e/49b46ac39f931f59f987b7cd9f34bfec8ef81d2a1e6e00682f55be5de9f4/greenlet-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d9ad37fc657b1102ec880e637cccf20191581f75c64087a549e66c57e1ceb53", size = 1567424, upload-time = "2025-12-04T15:04:23.757Z" }, + { url = "https://files.pythonhosted.org/packages/05/f5/49a9ac2dff7f10091935def9165c90236d8f175afb27cbed38fb1d61ab6b/greenlet-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83cd0e36932e0e7f36a64b732a6f60c2fc2df28c351bae79fbaf4f8092fe7614", size = 1636017, upload-time = "2025-12-04T14:27:29.688Z" }, + { url = "https://files.pythonhosted.org/packages/6c/79/3912a94cf27ec503e51ba493692d6db1e3cd8ac7ac52b0b47c8e33d7f4f9/greenlet-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7a34b13d43a6b78abf828a6d0e87d3385680eaf830cd60d20d52f249faabf39", size = 301964, upload-time = "2025-12-04T14:36:58.316Z" }, + { url = "https://files.pythonhosted.org/packages/02/2f/28592176381b9ab2cafa12829ba7b472d177f3acc35d8fbcf3673d966fff/greenlet-3.3.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:a1e41a81c7e2825822f4e068c48cb2196002362619e2d70b148f20a831c00739", size = 275140, upload-time = "2025-12-04T14:23:01.282Z" }, + { url = "https://files.pythonhosted.org/packages/2c/80/fbe937bf81e9fca98c981fe499e59a3f45df2a04da0baa5c2be0dca0d329/greenlet-3.3.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f515a47d02da4d30caaa85b69474cec77b7929b2e936ff7fb853d42f4bf8808", size = 599219, upload-time = "2025-12-04T14:50:08.309Z" }, + { url = "https://files.pythonhosted.org/packages/c2/ff/7c985128f0514271b8268476af89aee6866df5eec04ac17dcfbc676213df/greenlet-3.3.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7d2d9fd66bfadf230b385fdc90426fcd6eb64db54b40c495b72ac0feb5766c54", size = 610211, upload-time = "2025-12-04T14:57:43.968Z" }, + { url = "https://files.pythonhosted.org/packages/79/07/c47a82d881319ec18a4510bb30463ed6891f2ad2c1901ed5ec23d3de351f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30a6e28487a790417d036088b3bcb3f3ac7d8babaa7d0139edbaddebf3af9492", size = 624311, upload-time = "2025-12-04T15:07:14.697Z" }, + { url = "https://files.pythonhosted.org/packages/fd/8e/424b8c6e78bd9837d14ff7df01a9829fc883ba2ab4ea787d4f848435f23f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:087ea5e004437321508a8d6f20efc4cfec5e3c30118e1417ea96ed1d93950527", size = 612833, upload-time = "2025-12-04T14:26:03.669Z" }, + { url = "https://files.pythonhosted.org/packages/b5/ba/56699ff9b7c76ca12f1cdc27a886d0f81f2189c3455ff9f65246780f713d/greenlet-3.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab97cf74045343f6c60a39913fa59710e4bd26a536ce7ab2397adf8b27e67c39", size = 1567256, upload-time = "2025-12-04T15:04:25.276Z" }, + { url = "https://files.pythonhosted.org/packages/1e/37/f31136132967982d698c71a281a8901daf1a8fbab935dce7c0cf15f942cc/greenlet-3.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5375d2e23184629112ca1ea89a53389dddbffcf417dad40125713d88eb5f96e8", size = 1636483, upload-time = "2025-12-04T14:27:30.804Z" }, + { url = "https://files.pythonhosted.org/packages/7e/71/ba21c3fb8c5dce83b8c01f458a42e99ffdb1963aeec08fff5a18588d8fd7/greenlet-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:9ee1942ea19550094033c35d25d20726e4f1c40d59545815e1128ac58d416d38", size = 301833, upload-time = "2025-12-04T14:32:23.929Z" }, + { url = "https://files.pythonhosted.org/packages/d7/7c/f0a6d0ede2c7bf092d00bc83ad5bafb7e6ec9b4aab2fbdfa6f134dc73327/greenlet-3.3.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:60c2ef0f578afb3c8d92ea07ad327f9a062547137afe91f38408f08aacab667f", size = 275671, upload-time = "2025-12-04T14:23:05.267Z" }, + { url = "https://files.pythonhosted.org/packages/44/06/dac639ae1a50f5969d82d2e3dd9767d30d6dbdbab0e1a54010c8fe90263c/greenlet-3.3.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a5d554d0712ba1de0a6c94c640f7aeba3f85b3a6e1f2899c11c2c0428da9365", size = 646360, upload-time = "2025-12-04T14:50:10.026Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/0fb76fe6c5369fba9bf98529ada6f4c3a1adf19e406a47332245ef0eb357/greenlet-3.3.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3a898b1e9c5f7307ebbde4102908e6cbfcb9ea16284a3abe15cab996bee8b9b3", size = 658160, upload-time = "2025-12-04T14:57:45.41Z" }, + { url = "https://files.pythonhosted.org/packages/93/79/d2c70cae6e823fac36c3bbc9077962105052b7ef81db2f01ec3b9bf17e2b/greenlet-3.3.0-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dcd2bdbd444ff340e8d6bdf54d2f206ccddbb3ccfdcd3c25bf4afaa7b8f0cf45", size = 671388, upload-time = "2025-12-04T15:07:15.789Z" }, + { url = "https://files.pythonhosted.org/packages/b8/14/bab308fc2c1b5228c3224ec2bf928ce2e4d21d8046c161e44a2012b5203e/greenlet-3.3.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5773edda4dc00e173820722711d043799d3adb4f01731f40619e07ea2750b955", size = 660166, upload-time = "2025-12-04T14:26:05.099Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d2/91465d39164eaa0085177f61983d80ffe746c5a1860f009811d498e7259c/greenlet-3.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ac0549373982b36d5fd5d30beb8a7a33ee541ff98d2b502714a09f1169f31b55", size = 1615193, upload-time = "2025-12-04T15:04:27.041Z" }, + { url = "https://files.pythonhosted.org/packages/42/1b/83d110a37044b92423084d52d5d5a3b3a73cafb51b547e6d7366ff62eff1/greenlet-3.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d198d2d977460358c3b3a4dc844f875d1adb33817f0613f663a656f463764ccc", size = 1683653, upload-time = "2025-12-04T14:27:32.366Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/9030e6f9aa8fd7808e9c31ba4c38f87c4f8ec324ee67431d181fe396d705/greenlet-3.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:73f51dd0e0bdb596fb0417e475fa3c5e32d4c83638296e560086b8d7da7c4170", size = 305387, upload-time = "2025-12-04T14:26:51.063Z" }, + { url = "https://files.pythonhosted.org/packages/a0/66/bd6317bc5932accf351fc19f177ffba53712a202f9df10587da8df257c7e/greenlet-3.3.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:d6ed6f85fae6cdfdb9ce04c9bf7a08d666cfcfb914e7d006f44f840b46741931", size = 282638, upload-time = "2025-12-04T14:25:20.941Z" }, + { url = "https://files.pythonhosted.org/packages/30/cf/cc81cb030b40e738d6e69502ccbd0dd1bced0588e958f9e757945de24404/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9125050fcf24554e69c4cacb086b87b3b55dc395a8b3ebe6487b045b2614388", size = 651145, upload-time = "2025-12-04T14:50:11.039Z" }, + { url = "https://files.pythonhosted.org/packages/9c/ea/1020037b5ecfe95ca7df8d8549959baceb8186031da83d5ecceff8b08cd2/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:87e63ccfa13c0a0f6234ed0add552af24cc67dd886731f2261e46e241608bee3", size = 654236, upload-time = "2025-12-04T14:57:47.007Z" }, + { url = "https://files.pythonhosted.org/packages/69/cc/1e4bae2e45ca2fa55299f4e85854606a78ecc37fead20d69322f96000504/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2662433acbca297c9153a4023fe2161c8dcfdcc91f10433171cf7e7d94ba2221", size = 662506, upload-time = "2025-12-04T15:07:16.906Z" }, + { url = "https://files.pythonhosted.org/packages/57/b9/f8025d71a6085c441a7eaff0fd928bbb275a6633773667023d19179fe815/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3c6e9b9c1527a78520357de498b0e709fb9e2f49c3a513afd5a249007261911b", size = 653783, upload-time = "2025-12-04T14:26:06.225Z" }, + { url = "https://files.pythonhosted.org/packages/f6/c7/876a8c7a7485d5d6b5c6821201d542ef28be645aa024cfe1145b35c120c1/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:286d093f95ec98fdd92fcb955003b8a3d054b4e2cab3e2707a5039e7b50520fd", size = 1614857, upload-time = "2025-12-04T15:04:28.484Z" }, + { url = "https://files.pythonhosted.org/packages/4f/dc/041be1dff9f23dac5f48a43323cd0789cb798342011c19a248d9c9335536/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c10513330af5b8ae16f023e8ddbfb486ab355d04467c4679c5cfe4659975dd9", size = 1676034, upload-time = "2025-12-04T14:27:33.531Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httptools" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961, upload-time = "2025-10-10T03:55:08.559Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/08/17e07e8d89ab8f343c134616d72eebfe03798835058e2ab579dcc8353c06/httptools-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657", size = 206521, upload-time = "2025-10-10T03:54:31.002Z" }, + { url = "https://files.pythonhosted.org/packages/aa/06/c9c1b41ff52f16aee526fd10fbda99fa4787938aa776858ddc4a1ea825ec/httptools-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70", size = 110375, upload-time = "2025-10-10T03:54:31.941Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cc/10935db22fda0ee34c76f047590ca0a8bd9de531406a3ccb10a90e12ea21/httptools-0.7.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df", size = 456621, upload-time = "2025-10-10T03:54:33.176Z" }, + { url = "https://files.pythonhosted.org/packages/0e/84/875382b10d271b0c11aa5d414b44f92f8dd53e9b658aec338a79164fa548/httptools-0.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e", size = 454954, upload-time = "2025-10-10T03:54:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/30/e1/44f89b280f7e46c0b1b2ccee5737d46b3bb13136383958f20b580a821ca0/httptools-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274", size = 440175, upload-time = "2025-10-10T03:54:35.942Z" }, + { url = "https://files.pythonhosted.org/packages/6f/7e/b9287763159e700e335028bc1824359dc736fa9b829dacedace91a39b37e/httptools-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec", size = 440310, upload-time = "2025-10-10T03:54:37.1Z" }, + { url = "https://files.pythonhosted.org/packages/b3/07/5b614f592868e07f5c94b1f301b5e14a21df4e8076215a3bccb830a687d8/httptools-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb", size = 86875, upload-time = "2025-10-10T03:54:38.421Z" }, + { url = "https://files.pythonhosted.org/packages/53/7f/403e5d787dc4942316e515e949b0c8a013d84078a915910e9f391ba9b3ed/httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5", size = 206280, upload-time = "2025-10-10T03:54:39.274Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0d/7f3fd28e2ce311ccc998c388dd1c53b18120fda3b70ebb022b135dc9839b/httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5", size = 110004, upload-time = "2025-10-10T03:54:40.403Z" }, + { url = "https://files.pythonhosted.org/packages/84/a6/b3965e1e146ef5762870bbe76117876ceba51a201e18cc31f5703e454596/httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03", size = 517655, upload-time = "2025-10-10T03:54:41.347Z" }, + { url = "https://files.pythonhosted.org/packages/11/7d/71fee6f1844e6fa378f2eddde6c3e41ce3a1fb4b2d81118dd544e3441ec0/httptools-0.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2", size = 511440, upload-time = "2025-10-10T03:54:42.452Z" }, + { url = "https://files.pythonhosted.org/packages/22/a5/079d216712a4f3ffa24af4a0381b108aa9c45b7a5cc6eb141f81726b1823/httptools-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362", size = 495186, upload-time = "2025-10-10T03:54:43.937Z" }, + { url = "https://files.pythonhosted.org/packages/e9/9e/025ad7b65278745dee3bd0ebf9314934c4592560878308a6121f7f812084/httptools-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c", size = 499192, upload-time = "2025-10-10T03:54:45.003Z" }, + { url = "https://files.pythonhosted.org/packages/6d/de/40a8f202b987d43afc4d54689600ff03ce65680ede2f31df348d7f368b8f/httptools-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321", size = 86694, upload-time = "2025-10-10T03:54:45.923Z" }, + { url = "https://files.pythonhosted.org/packages/09/8f/c77b1fcbfd262d422f12da02feb0d218fa228d52485b77b953832105bb90/httptools-0.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6babce6cfa2a99545c60bfef8bee0cc0545413cb0018f617c8059a30ad985de3", size = 202889, upload-time = "2025-10-10T03:54:47.089Z" }, + { url = "https://files.pythonhosted.org/packages/0a/1a/22887f53602feaa066354867bc49a68fc295c2293433177ee90870a7d517/httptools-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:601b7628de7504077dd3dcb3791c6b8694bbd967148a6d1f01806509254fb1ca", size = 108180, upload-time = "2025-10-10T03:54:48.052Z" }, + { url = "https://files.pythonhosted.org/packages/32/6a/6aaa91937f0010d288d3d124ca2946d48d60c3a5ee7ca62afe870e3ea011/httptools-0.7.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:04c6c0e6c5fb0739c5b8a9eb046d298650a0ff38cf42537fc372b28dc7e4472c", size = 478596, upload-time = "2025-10-10T03:54:48.919Z" }, + { url = "https://files.pythonhosted.org/packages/6d/70/023d7ce117993107be88d2cbca566a7c1323ccbaf0af7eabf2064fe356f6/httptools-0.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69d4f9705c405ae3ee83d6a12283dc9feba8cc6aaec671b412917e644ab4fa66", size = 473268, upload-time = "2025-10-10T03:54:49.993Z" }, + { url = "https://files.pythonhosted.org/packages/32/4d/9dd616c38da088e3f436e9a616e1d0cc66544b8cdac405cc4e81c8679fc7/httptools-0.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:44c8f4347d4b31269c8a9205d8a5ee2df5322b09bbbd30f8f862185bb6b05346", size = 455517, upload-time = "2025-10-10T03:54:51.066Z" }, + { url = "https://files.pythonhosted.org/packages/1d/3a/a6c595c310b7df958e739aae88724e24f9246a514d909547778d776799be/httptools-0.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:465275d76db4d554918aba40bf1cbebe324670f3dfc979eaffaa5d108e2ed650", size = 458337, upload-time = "2025-10-10T03:54:52.196Z" }, + { url = "https://files.pythonhosted.org/packages/fd/82/88e8d6d2c51edc1cc391b6e044c6c435b6aebe97b1abc33db1b0b24cd582/httptools-0.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:322d00c2068d125bd570f7bf78b2d367dad02b919d8581d7476d8b75b294e3e6", size = 85743, upload-time = "2025-10-10T03:54:53.448Z" }, + { url = "https://files.pythonhosted.org/packages/34/50/9d095fcbb6de2d523e027a2f304d4551855c2f46e0b82befd718b8b20056/httptools-0.7.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:c08fe65728b8d70b6923ce31e3956f859d5e1e8548e6f22ec520a962c6757270", size = 203619, upload-time = "2025-10-10T03:54:54.321Z" }, + { url = "https://files.pythonhosted.org/packages/07/f0/89720dc5139ae54b03f861b5e2c55a37dba9a5da7d51e1e824a1f343627f/httptools-0.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7aea2e3c3953521c3c51106ee11487a910d45586e351202474d45472db7d72d3", size = 108714, upload-time = "2025-10-10T03:54:55.163Z" }, + { url = "https://files.pythonhosted.org/packages/b3/cb/eea88506f191fb552c11787c23f9a405f4c7b0c5799bf73f2249cd4f5228/httptools-0.7.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0e68b8582f4ea9166be62926077a3334064d422cf08ab87d8b74664f8e9058e1", size = 472909, upload-time = "2025-10-10T03:54:56.056Z" }, + { url = "https://files.pythonhosted.org/packages/e0/4a/a548bdfae6369c0d078bab5769f7b66f17f1bfaa6fa28f81d6be6959066b/httptools-0.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df091cf961a3be783d6aebae963cc9b71e00d57fa6f149025075217bc6a55a7b", size = 470831, upload-time = "2025-10-10T03:54:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/4d/31/14df99e1c43bd132eec921c2e7e11cda7852f65619bc0fc5bdc2d0cb126c/httptools-0.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f084813239e1eb403ddacd06a30de3d3e09a9b76e7894dcda2b22f8a726e9c60", size = 452631, upload-time = "2025-10-10T03:54:58.219Z" }, + { url = "https://files.pythonhosted.org/packages/22/d2/b7e131f7be8d854d48cb6d048113c30f9a46dca0c9a8b08fcb3fcd588cdc/httptools-0.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7347714368fb2b335e9063bc2b96f2f87a9ceffcd9758ac295f8bbcd3ffbc0ca", size = 452910, upload-time = "2025-10-10T03:54:59.366Z" }, + { url = "https://files.pythonhosted.org/packages/53/cf/878f3b91e4e6e011eff6d1fa9ca39f7eb17d19c9d7971b04873734112f30/httptools-0.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:cfabda2a5bb85aa2a904ce06d974a3f30fb36cc63d7feaddec05d2050acede96", size = 88205, upload-time = "2025-10-10T03:55:00.389Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "notification-service" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "asyncpg" }, + { name = "fastapi" }, + { name = "httpx" }, + { name = "pydantic-settings" }, + { name = "python-jose", extra = ["cryptography"] }, + { name = "sqlmodel" }, + { name = "uvicorn", extra = ["standard"] }, +] + +[package.optional-dependencies] +dev = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "asyncpg", specifier = ">=0.30.0" }, + { name = "fastapi", specifier = ">=0.115.0" }, + { name = "httpx", specifier = ">=0.28.0" }, + { name = "pydantic-settings", specifier = ">=2.6.0" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0" }, + { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.24.0" }, + { name = "python-jose", extras = ["cryptography"], specifier = ">=3.3.0" }, + { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.8.0" }, + { name = "sqlmodel", specifier = ">=0.0.22" }, + { name = "uvicorn", extras = ["standard"], specifier = ">=0.32.0" }, +] +provides-extras = ["dev"] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pyasn1" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322, upload-time = "2024-09-10T22:41:42.55Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135, upload-time = "2024-09-11T16:00:36.122Z" }, +] + +[[package]] +name = "pycparser" +version = "2.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/43/4b/ac7e0aae12027748076d72a8764ff1c9d82ca75a7a52622e67ed3f765c54/pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0", size = 194184, upload-time = "2025-11-10T14:25:47.013Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, +] + +[[package]] +name = "python-jose" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ecdsa" }, + { name = "pyasn1" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/77/3a1c9039db7124eb039772b935f2244fbb73fc8ee65b9acf2375da1c07bf/python_jose-3.5.0.tar.gz", hash = "sha256:fb4eaa44dbeb1c26dcc69e4bd7ec54a1cb8dd64d3b4d81ef08d90ff453f2b01b", size = 92726, upload-time = "2025-05-28T17:31:54.288Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/c3/0bd11992072e6a1c513b16500a5d07f91a24017c5909b02c72c62d7ad024/python_jose-3.5.0-py2.py3-none-any.whl", hash = "sha256:abd1202f23d34dfad2c3d28cb8617b90acf34132c7afd60abd0b0b7d3cb55771", size = 34624, upload-time = "2025-05-28T17:31:52.802Z" }, +] + +[package.optional-dependencies] +cryptography = [ + { name = "cryptography" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "rsa" +version = "4.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, +] + +[[package]] +name = "ruff" +version = "0.14.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/d9/f7a0c4b3a2bf2556cd5d99b05372c29980249ef71e8e32669ba77428c82c/ruff-0.14.8.tar.gz", hash = "sha256:774ed0dd87d6ce925e3b8496feb3a00ac564bea52b9feb551ecd17e0a23d1eed", size = 5765385, upload-time = "2025-12-04T15:06:17.669Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/b8/9537b52010134b1d2b72870cc3f92d5fb759394094741b09ceccae183fbe/ruff-0.14.8-py3-none-linux_armv6l.whl", hash = "sha256:ec071e9c82eca417f6111fd39f7043acb53cd3fde9b1f95bbed745962e345afb", size = 13441540, upload-time = "2025-12-04T15:06:14.896Z" }, + { url = "https://files.pythonhosted.org/packages/24/00/99031684efb025829713682012b6dd37279b1f695ed1b01725f85fd94b38/ruff-0.14.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8cdb162a7159f4ca36ce980a18c43d8f036966e7f73f866ac8f493b75e0c27e9", size = 13669384, upload-time = "2025-12-04T15:06:51.809Z" }, + { url = "https://files.pythonhosted.org/packages/72/64/3eb5949169fc19c50c04f28ece2c189d3b6edd57e5b533649dae6ca484fe/ruff-0.14.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2e2fcbefe91f9fad0916850edf0854530c15bd1926b6b779de47e9ab619ea38f", size = 12806917, upload-time = "2025-12-04T15:06:08.925Z" }, + { url = "https://files.pythonhosted.org/packages/c4/08/5250babb0b1b11910f470370ec0cbc67470231f7cdc033cee57d4976f941/ruff-0.14.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9d70721066a296f45786ec31916dc287b44040f553da21564de0ab4d45a869b", size = 13256112, upload-time = "2025-12-04T15:06:23.498Z" }, + { url = "https://files.pythonhosted.org/packages/78/4c/6c588e97a8e8c2d4b522c31a579e1df2b4d003eddfbe23d1f262b1a431ff/ruff-0.14.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2c87e09b3cd9d126fc67a9ecd3b5b1d3ded2b9c7fce3f16e315346b9d05cfb52", size = 13227559, upload-time = "2025-12-04T15:06:33.432Z" }, + { url = "https://files.pythonhosted.org/packages/23/ce/5f78cea13eda8eceac71b5f6fa6e9223df9b87bb2c1891c166d1f0dce9f1/ruff-0.14.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d62cb310c4fbcb9ee4ac023fe17f984ae1e12b8a4a02e3d21489f9a2a5f730c", size = 13896379, upload-time = "2025-12-04T15:06:02.687Z" }, + { url = "https://files.pythonhosted.org/packages/cf/79/13de4517c4dadce9218a20035b21212a4c180e009507731f0d3b3f5df85a/ruff-0.14.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1af35c2d62633d4da0521178e8a2641c636d2a7153da0bac1b30cfd4ccd91344", size = 15372786, upload-time = "2025-12-04T15:06:29.828Z" }, + { url = "https://files.pythonhosted.org/packages/00/06/33df72b3bb42be8a1c3815fd4fae83fa2945fc725a25d87ba3e42d1cc108/ruff-0.14.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25add4575ffecc53d60eed3f24b1e934493631b48ebbc6ebaf9d8517924aca4b", size = 14990029, upload-time = "2025-12-04T15:06:36.812Z" }, + { url = "https://files.pythonhosted.org/packages/64/61/0f34927bd90925880394de0e081ce1afab66d7b3525336f5771dcf0cb46c/ruff-0.14.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c943d847b7f02f7db4201a0600ea7d244d8a404fbb639b439e987edcf2baf9a", size = 14407037, upload-time = "2025-12-04T15:06:39.979Z" }, + { url = "https://files.pythonhosted.org/packages/96/bc/058fe0aefc0fbf0d19614cb6d1a3e2c048f7dc77ca64957f33b12cfdc5ef/ruff-0.14.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb6e8bf7b4f627548daa1b69283dac5a296bfe9ce856703b03130732e20ddfe2", size = 14102390, upload-time = "2025-12-04T15:06:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/af/a4/e4f77b02b804546f4c17e8b37a524c27012dd6ff05855d2243b49a7d3cb9/ruff-0.14.8-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:7aaf2974f378e6b01d1e257c6948207aec6a9b5ba53fab23d0182efb887a0e4a", size = 14230793, upload-time = "2025-12-04T15:06:20.497Z" }, + { url = "https://files.pythonhosted.org/packages/3f/52/bb8c02373f79552e8d087cedaffad76b8892033d2876c2498a2582f09dcf/ruff-0.14.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e5758ca513c43ad8a4ef13f0f081f80f08008f410790f3611a21a92421ab045b", size = 13160039, upload-time = "2025-12-04T15:06:49.06Z" }, + { url = "https://files.pythonhosted.org/packages/1f/ad/b69d6962e477842e25c0b11622548df746290cc6d76f9e0f4ed7456c2c31/ruff-0.14.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f74f7ba163b6e85a8d81a590363bf71618847e5078d90827749bfda1d88c9cdf", size = 13205158, upload-time = "2025-12-04T15:06:54.574Z" }, + { url = "https://files.pythonhosted.org/packages/06/63/54f23da1315c0b3dfc1bc03fbc34e10378918a20c0b0f086418734e57e74/ruff-0.14.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:eed28f6fafcc9591994c42254f5a5c5ca40e69a30721d2ab18bb0bb3baac3ab6", size = 13469550, upload-time = "2025-12-04T15:05:59.209Z" }, + { url = "https://files.pythonhosted.org/packages/70/7d/a4d7b1961e4903bc37fffb7ddcfaa7beb250f67d97cfd1ee1d5cddb1ec90/ruff-0.14.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:21d48fa744c9d1cb8d71eb0a740c4dd02751a5de9db9a730a8ef75ca34cf138e", size = 14211332, upload-time = "2025-12-04T15:06:06.027Z" }, + { url = "https://files.pythonhosted.org/packages/5d/93/2a5063341fa17054e5c86582136e9895db773e3c2ffb770dde50a09f35f0/ruff-0.14.8-py3-none-win32.whl", hash = "sha256:15f04cb45c051159baebb0f0037f404f1dc2f15a927418f29730f411a79bc4e7", size = 13151890, upload-time = "2025-12-04T15:06:11.668Z" }, + { url = "https://files.pythonhosted.org/packages/02/1c/65c61a0859c0add13a3e1cbb6024b42de587456a43006ca2d4fd3d1618fe/ruff-0.14.8-py3-none-win_amd64.whl", hash = "sha256:9eeb0b24242b5bbff3011409a739929f497f3fb5fe3b5698aba5e77e8c833097", size = 14537826, upload-time = "2025-12-04T15:06:26.409Z" }, + { url = "https://files.pythonhosted.org/packages/6d/63/8b41cea3afd7f58eb64ac9251668ee0073789a3bc9ac6f816c8c6fef986d/ruff-0.14.8-py3-none-win_arm64.whl", hash = "sha256:965a582c93c63fe715fd3e3f8aa37c4b776777203d8e1d8aa3cc0c14424a4b99", size = 13634522, upload-time = "2025-12-04T15:06:43.212Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.45" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/be/f9/5e4491e5ccf42f5d9cfc663741d261b3e6e1683ae7812114e7636409fcc6/sqlalchemy-2.0.45.tar.gz", hash = "sha256:1632a4bda8d2d25703fdad6363058d882541bdaaee0e5e3ddfa0cd3229efce88", size = 9869912, upload-time = "2025-12-09T21:05:16.737Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/1c/769552a9d840065137272ebe86ffbb0bc92b0f1e0a68ee5266a225f8cd7b/sqlalchemy-2.0.45-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e90a344c644a4fa871eb01809c32096487928bd2038bf10f3e4515cb688cc56", size = 2153860, upload-time = "2025-12-10T20:03:23.843Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f8/9be54ff620e5b796ca7b44670ef58bc678095d51b0e89d6e3102ea468216/sqlalchemy-2.0.45-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8c8b41b97fba5f62349aa285654230296829672fc9939cd7f35aab246d1c08b", size = 3309379, upload-time = "2025-12-09T22:06:07.461Z" }, + { url = "https://files.pythonhosted.org/packages/f6/2b/60ce3ee7a5ae172bfcd419ce23259bb874d2cddd44f67c5df3760a1e22f9/sqlalchemy-2.0.45-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:12c694ed6468333a090d2f60950e4250b928f457e4962389553d6ba5fe9951ac", size = 3309948, upload-time = "2025-12-09T22:09:57.643Z" }, + { url = "https://files.pythonhosted.org/packages/a3/42/bac8d393f5db550e4e466d03d16daaafd2bad1f74e48c12673fb499a7fc1/sqlalchemy-2.0.45-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f7d27a1d977a1cfef38a0e2e1ca86f09c4212666ce34e6ae542f3ed0a33bc606", size = 3261239, upload-time = "2025-12-09T22:06:08.879Z" }, + { url = "https://files.pythonhosted.org/packages/6f/12/43dc70a0528c59842b04ea1c1ed176f072a9b383190eb015384dd102fb19/sqlalchemy-2.0.45-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d62e47f5d8a50099b17e2bfc1b0c7d7ecd8ba6b46b1507b58cc4f05eefc3bb1c", size = 3284065, upload-time = "2025-12-09T22:09:59.454Z" }, + { url = "https://files.pythonhosted.org/packages/cf/9c/563049cf761d9a2ec7bc489f7879e9d94e7b590496bea5bbee9ed7b4cc32/sqlalchemy-2.0.45-cp311-cp311-win32.whl", hash = "sha256:3c5f76216e7b85770d5bb5130ddd11ee89f4d52b11783674a662c7dd57018177", size = 2113480, upload-time = "2025-12-09T21:29:57.03Z" }, + { url = "https://files.pythonhosted.org/packages/bc/fa/09d0a11fe9f15c7fa5c7f0dd26be3d235b0c0cbf2f9544f43bc42efc8a24/sqlalchemy-2.0.45-cp311-cp311-win_amd64.whl", hash = "sha256:a15b98adb7f277316f2c276c090259129ee4afca783495e212048daf846654b2", size = 2138407, upload-time = "2025-12-09T21:29:58.556Z" }, + { url = "https://files.pythonhosted.org/packages/2d/c7/1900b56ce19bff1c26f39a4ce427faec7716c81ac792bfac8b6a9f3dca93/sqlalchemy-2.0.45-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3ee2aac15169fb0d45822983631466d60b762085bc4535cd39e66bea362df5f", size = 3333760, upload-time = "2025-12-09T22:11:02.66Z" }, + { url = "https://files.pythonhosted.org/packages/0a/93/3be94d96bb442d0d9a60e55a6bb6e0958dd3457751c6f8502e56ef95fed0/sqlalchemy-2.0.45-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba547ac0b361ab4f1608afbc8432db669bd0819b3e12e29fb5fa9529a8bba81d", size = 3348268, upload-time = "2025-12-09T22:13:49.054Z" }, + { url = "https://files.pythonhosted.org/packages/48/4b/f88ded696e61513595e4a9778f9d3f2bf7332cce4eb0c7cedaabddd6687b/sqlalchemy-2.0.45-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:215f0528b914e5c75ef2559f69dca86878a3beeb0c1be7279d77f18e8d180ed4", size = 3278144, upload-time = "2025-12-09T22:11:04.14Z" }, + { url = "https://files.pythonhosted.org/packages/ed/6a/310ecb5657221f3e1bd5288ed83aa554923fb5da48d760a9f7622afeb065/sqlalchemy-2.0.45-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:107029bf4f43d076d4011f1afb74f7c3e2ea029ec82eb23d8527d5e909e97aa6", size = 3313907, upload-time = "2025-12-09T22:13:50.598Z" }, + { url = "https://files.pythonhosted.org/packages/5c/39/69c0b4051079addd57c84a5bfb34920d87456dd4c90cf7ee0df6efafc8ff/sqlalchemy-2.0.45-cp312-cp312-win32.whl", hash = "sha256:0c9f6ada57b58420a2c0277ff853abe40b9e9449f8d7d231763c6bc30f5c4953", size = 2112182, upload-time = "2025-12-09T21:39:30.824Z" }, + { url = "https://files.pythonhosted.org/packages/f7/4e/510db49dd89fc3a6e994bee51848c94c48c4a00dc905e8d0133c251f41a7/sqlalchemy-2.0.45-cp312-cp312-win_amd64.whl", hash = "sha256:8defe5737c6d2179c7997242d6473587c3beb52e557f5ef0187277009f73e5e1", size = 2139200, upload-time = "2025-12-09T21:39:32.321Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c8/7cc5221b47a54edc72a0140a1efa56e0a2730eefa4058d7ed0b4c4357ff8/sqlalchemy-2.0.45-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fe187fc31a54d7fd90352f34e8c008cf3ad5d064d08fedd3de2e8df83eb4a1cf", size = 3277082, upload-time = "2025-12-09T22:11:06.167Z" }, + { url = "https://files.pythonhosted.org/packages/0e/50/80a8d080ac7d3d321e5e5d420c9a522b0aa770ec7013ea91f9a8b7d36e4a/sqlalchemy-2.0.45-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:672c45cae53ba88e0dad74b9027dddd09ef6f441e927786b05bec75d949fbb2e", size = 3293131, upload-time = "2025-12-09T22:13:52.626Z" }, + { url = "https://files.pythonhosted.org/packages/da/4c/13dab31266fc9904f7609a5dc308a2432a066141d65b857760c3bef97e69/sqlalchemy-2.0.45-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:470daea2c1ce73910f08caf10575676a37159a6d16c4da33d0033546bddebc9b", size = 3225389, upload-time = "2025-12-09T22:11:08.093Z" }, + { url = "https://files.pythonhosted.org/packages/74/04/891b5c2e9f83589de202e7abaf24cd4e4fa59e1837d64d528829ad6cc107/sqlalchemy-2.0.45-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9c6378449e0940476577047150fd09e242529b761dc887c9808a9a937fe990c8", size = 3266054, upload-time = "2025-12-09T22:13:54.262Z" }, + { url = "https://files.pythonhosted.org/packages/f1/24/fc59e7f71b0948cdd4cff7a286210e86b0443ef1d18a23b0d83b87e4b1f7/sqlalchemy-2.0.45-cp313-cp313-win32.whl", hash = "sha256:4b6bec67ca45bc166c8729910bd2a87f1c0407ee955df110d78948f5b5827e8a", size = 2110299, upload-time = "2025-12-09T21:39:33.486Z" }, + { url = "https://files.pythonhosted.org/packages/c0/c5/d17113020b2d43073412aeca09b60d2009442420372123b8d49cc253f8b8/sqlalchemy-2.0.45-cp313-cp313-win_amd64.whl", hash = "sha256:afbf47dc4de31fa38fd491f3705cac5307d21d4bb828a4f020ee59af412744ee", size = 2136264, upload-time = "2025-12-09T21:39:36.801Z" }, + { url = "https://files.pythonhosted.org/packages/3d/8d/bb40a5d10e7a5f2195f235c0b2f2c79b0bf6e8f00c0c223130a4fbd2db09/sqlalchemy-2.0.45-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:83d7009f40ce619d483d26ac1b757dfe3167b39921379a8bd1b596cf02dab4a6", size = 3521998, upload-time = "2025-12-09T22:13:28.622Z" }, + { url = "https://files.pythonhosted.org/packages/75/a5/346128b0464886f036c039ea287b7332a410aa2d3fb0bb5d404cb8861635/sqlalchemy-2.0.45-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d8a2ca754e5415cde2b656c27900b19d50ba076aa05ce66e2207623d3fe41f5a", size = 3473434, upload-time = "2025-12-09T22:13:30.188Z" }, + { url = "https://files.pythonhosted.org/packages/cc/64/4e1913772646b060b025d3fc52ce91a58967fe58957df32b455de5a12b4f/sqlalchemy-2.0.45-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f46ec744e7f51275582e6a24326e10c49fbdd3fc99103e01376841213028774", size = 3272404, upload-time = "2025-12-09T22:11:09.662Z" }, + { url = "https://files.pythonhosted.org/packages/b3/27/caf606ee924282fe4747ee4fd454b335a72a6e018f97eab5ff7f28199e16/sqlalchemy-2.0.45-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:883c600c345123c033c2f6caca18def08f1f7f4c3ebeb591a63b6fceffc95cce", size = 3277057, upload-time = "2025-12-09T22:13:56.213Z" }, + { url = "https://files.pythonhosted.org/packages/85/d0/3d64218c9724e91f3d1574d12eb7ff8f19f937643815d8daf792046d88ab/sqlalchemy-2.0.45-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2c0b74aa79e2deade948fe8593654c8ef4228c44ba862bb7c9585c8e0db90f33", size = 3222279, upload-time = "2025-12-09T22:11:11.1Z" }, + { url = "https://files.pythonhosted.org/packages/24/10/dd7688a81c5bc7690c2a3764d55a238c524cd1a5a19487928844cb247695/sqlalchemy-2.0.45-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8a420169cef179d4c9064365f42d779f1e5895ad26ca0c8b4c0233920973db74", size = 3244508, upload-time = "2025-12-09T22:13:57.932Z" }, + { url = "https://files.pythonhosted.org/packages/aa/41/db75756ca49f777e029968d9c9fee338c7907c563267740c6d310a8e3f60/sqlalchemy-2.0.45-cp314-cp314-win32.whl", hash = "sha256:e50dcb81a5dfe4b7b4a4aa8f338116d127cb209559124f3694c70d6cd072b68f", size = 2113204, upload-time = "2025-12-09T21:39:38.365Z" }, + { url = "https://files.pythonhosted.org/packages/89/a2/0e1590e9adb292b1d576dbcf67ff7df8cf55e56e78d2c927686d01080f4b/sqlalchemy-2.0.45-cp314-cp314-win_amd64.whl", hash = "sha256:4748601c8ea959e37e03d13dcda4a44837afcd1b21338e637f7c935b8da06177", size = 2138785, upload-time = "2025-12-09T21:39:39.503Z" }, + { url = "https://files.pythonhosted.org/packages/42/39/f05f0ed54d451156bbed0e23eb0516bcad7cbb9f18b3bf219c786371b3f0/sqlalchemy-2.0.45-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd337d3526ec5298f67d6a30bbbe4ed7e5e68862f0bf6dd21d289f8d37b7d60b", size = 3522029, upload-time = "2025-12-09T22:13:32.09Z" }, + { url = "https://files.pythonhosted.org/packages/54/0f/d15398b98b65c2bce288d5ee3f7d0a81f77ab89d9456994d5c7cc8b2a9db/sqlalchemy-2.0.45-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9a62b446b7d86a3909abbcd1cd3cc550a832f99c2bc37c5b22e1925438b9367b", size = 3475142, upload-time = "2025-12-09T22:13:33.739Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e1/3ccb13c643399d22289c6a9786c1a91e3dcbb68bce4beb44926ac2c557bf/sqlalchemy-2.0.45-py3-none-any.whl", hash = "sha256:5225a288e4c8cc2308dbdd874edad6e7d0fd38eac1e9e5f23503425c8eee20d0", size = 1936672, upload-time = "2025-12-09T21:54:52.608Z" }, +] + +[[package]] +name = "sqlmodel" +version = "0.0.27" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "sqlalchemy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/5a/693d90866233e837d182da76082a6d4c2303f54d3aaaa5c78e1238c5d863/sqlmodel-0.0.27.tar.gz", hash = "sha256:ad1227f2014a03905aef32e21428640848ac09ff793047744a73dfdd077ff620", size = 118053, upload-time = "2025-10-08T16:39:11.938Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/92/c35e036151fe53822893979f8a13e6f235ae8191f4164a79ae60a95d66aa/sqlmodel-0.0.27-py3-none-any.whl", hash = "sha256:667fe10aa8ff5438134668228dc7d7a08306f4c5c4c7e6ad3ad68defa0e7aa49", size = 29131, upload-time = "2025-10-08T16:39:10.917Z" }, +] + +[[package]] +name = "starlette" +version = "0.50.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ba/b8/73a0e6a6e079a9d9cfa64113d771e421640b6f679a52eeb9b32f72d871a1/starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca", size = 2646985, upload-time = "2025-11-01T15:25:27.516Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033, upload-time = "2025-11-01T15:25:25.461Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.38.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cb/ce/f06b84e2697fef4688ca63bdb2fdf113ca0a3be33f94488f2cadb690b0cf/uvicorn-0.38.0.tar.gz", hash = "sha256:fd97093bdd120a2609fc0d3afe931d4d4ad688b6e75f0f929fde1bc36fe0e91d", size = 80605, upload-time = "2025-10-18T13:46:44.63Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/d9/d88e73ca598f4f6ff671fb5fde8a32925c2e08a637303a1d12883c7305fa/uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02", size = 68109, upload-time = "2025-10-18T13:46:42.958Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "httptools" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, + { name = "watchfiles" }, + { name = "websockets" }, +] + +[[package]] +name = "uvloop" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/06/f0/18d39dbd1971d6d62c4629cc7fa67f74821b0dc1f5a77af43719de7936a7/uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f", size = 2443250, upload-time = "2025-10-16T22:17:19.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/d5/69900f7883235562f1f50d8184bb7dd84a2fb61e9ec63f3782546fdbd057/uvloop-0.22.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9", size = 1352420, upload-time = "2025-10-16T22:16:21.187Z" }, + { url = "https://files.pythonhosted.org/packages/a8/73/c4e271b3bce59724e291465cc936c37758886a4868787da0278b3b56b905/uvloop-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77", size = 748677, upload-time = "2025-10-16T22:16:22.558Z" }, + { url = "https://files.pythonhosted.org/packages/86/94/9fb7fad2f824d25f8ecac0d70b94d0d48107ad5ece03769a9c543444f78a/uvloop-0.22.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21", size = 3753819, upload-time = "2025-10-16T22:16:23.903Z" }, + { url = "https://files.pythonhosted.org/packages/74/4f/256aca690709e9b008b7108bc85fba619a2bc37c6d80743d18abad16ee09/uvloop-0.22.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702", size = 3804529, upload-time = "2025-10-16T22:16:25.246Z" }, + { url = "https://files.pythonhosted.org/packages/7f/74/03c05ae4737e871923d21a76fe28b6aad57f5c03b6e6bfcfa5ad616013e4/uvloop-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733", size = 3621267, upload-time = "2025-10-16T22:16:26.819Z" }, + { url = "https://files.pythonhosted.org/packages/75/be/f8e590fe61d18b4a92070905497aec4c0e64ae1761498cad09023f3f4b3e/uvloop-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473", size = 3723105, upload-time = "2025-10-16T22:16:28.252Z" }, + { url = "https://files.pythonhosted.org/packages/3d/ff/7f72e8170be527b4977b033239a83a68d5c881cc4775fca255c677f7ac5d/uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42", size = 1359936, upload-time = "2025-10-16T22:16:29.436Z" }, + { url = "https://files.pythonhosted.org/packages/c3/c6/e5d433f88fd54d81ef4be58b2b7b0cea13c442454a1db703a1eea0db1a59/uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6", size = 752769, upload-time = "2025-10-16T22:16:30.493Z" }, + { url = "https://files.pythonhosted.org/packages/24/68/a6ac446820273e71aa762fa21cdcc09861edd3536ff47c5cd3b7afb10eeb/uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370", size = 4317413, upload-time = "2025-10-16T22:16:31.644Z" }, + { url = "https://files.pythonhosted.org/packages/5f/6f/e62b4dfc7ad6518e7eff2516f680d02a0f6eb62c0c212e152ca708a0085e/uvloop-0.22.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b5b1ac819a3f946d3b2ee07f09149578ae76066d70b44df3fa990add49a82e4", size = 4426307, upload-time = "2025-10-16T22:16:32.917Z" }, + { url = "https://files.pythonhosted.org/packages/90/60/97362554ac21e20e81bcef1150cb2a7e4ffdaf8ea1e5b2e8bf7a053caa18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e047cc068570bac9866237739607d1313b9253c3051ad84738cbb095be0537b2", size = 4131970, upload-time = "2025-10-16T22:16:34.015Z" }, + { url = "https://files.pythonhosted.org/packages/99/39/6b3f7d234ba3964c428a6e40006340f53ba37993f46ed6e111c6e9141d18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:512fec6815e2dd45161054592441ef76c830eddaad55c8aa30952e6fe1ed07c0", size = 4296343, upload-time = "2025-10-16T22:16:35.149Z" }, + { url = "https://files.pythonhosted.org/packages/89/8c/182a2a593195bfd39842ea68ebc084e20c850806117213f5a299dfc513d9/uvloop-0.22.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:561577354eb94200d75aca23fbde86ee11be36b00e52a4eaf8f50fb0c86b7705", size = 1358611, upload-time = "2025-10-16T22:16:36.833Z" }, + { url = "https://files.pythonhosted.org/packages/d2/14/e301ee96a6dc95224b6f1162cd3312f6d1217be3907b79173b06785f2fe7/uvloop-0.22.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cdf5192ab3e674ca26da2eada35b288d2fa49fdd0f357a19f0e7c4e7d5077c8", size = 751811, upload-time = "2025-10-16T22:16:38.275Z" }, + { url = "https://files.pythonhosted.org/packages/b7/02/654426ce265ac19e2980bfd9ea6590ca96a56f10c76e63801a2df01c0486/uvloop-0.22.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e2ea3d6190a2968f4a14a23019d3b16870dd2190cd69c8180f7c632d21de68d", size = 4288562, upload-time = "2025-10-16T22:16:39.375Z" }, + { url = "https://files.pythonhosted.org/packages/15/c0/0be24758891ef825f2065cd5db8741aaddabe3e248ee6acc5e8a80f04005/uvloop-0.22.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0530a5fbad9c9e4ee3f2b33b148c6a64d47bbad8000ea63704fa8260f4cf728e", size = 4366890, upload-time = "2025-10-16T22:16:40.547Z" }, + { url = "https://files.pythonhosted.org/packages/d2/53/8369e5219a5855869bcee5f4d317f6da0e2c669aecf0ef7d371e3d084449/uvloop-0.22.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bc5ef13bbc10b5335792360623cc378d52d7e62c2de64660616478c32cd0598e", size = 4119472, upload-time = "2025-10-16T22:16:41.694Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ba/d69adbe699b768f6b29a5eec7b47dd610bd17a69de51b251126a801369ea/uvloop-0.22.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1f38ec5e3f18c8a10ded09742f7fb8de0108796eb673f30ce7762ce1b8550cad", size = 4239051, upload-time = "2025-10-16T22:16:43.224Z" }, + { url = "https://files.pythonhosted.org/packages/90/cd/b62bdeaa429758aee8de8b00ac0dd26593a9de93d302bff3d21439e9791d/uvloop-0.22.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3879b88423ec7e97cd4eba2a443aa26ed4e59b45e6b76aabf13fe2f27023a142", size = 1362067, upload-time = "2025-10-16T22:16:44.503Z" }, + { url = "https://files.pythonhosted.org/packages/0d/f8/a132124dfda0777e489ca86732e85e69afcd1ff7686647000050ba670689/uvloop-0.22.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4baa86acedf1d62115c1dc6ad1e17134476688f08c6efd8a2ab076e815665c74", size = 752423, upload-time = "2025-10-16T22:16:45.968Z" }, + { url = "https://files.pythonhosted.org/packages/a3/94/94af78c156f88da4b3a733773ad5ba0b164393e357cc4bd0ab2e2677a7d6/uvloop-0.22.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:297c27d8003520596236bdb2335e6b3f649480bd09e00d1e3a99144b691d2a35", size = 4272437, upload-time = "2025-10-16T22:16:47.451Z" }, + { url = "https://files.pythonhosted.org/packages/b5/35/60249e9fd07b32c665192cec7af29e06c7cd96fa1d08b84f012a56a0b38e/uvloop-0.22.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1955d5a1dd43198244d47664a5858082a3239766a839b2102a269aaff7a4e25", size = 4292101, upload-time = "2025-10-16T22:16:49.318Z" }, + { url = "https://files.pythonhosted.org/packages/02/62/67d382dfcb25d0a98ce73c11ed1a6fba5037a1a1d533dcbb7cab033a2636/uvloop-0.22.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b31dc2fccbd42adc73bc4e7cdbae4fc5086cf378979e53ca5d0301838c5682c6", size = 4114158, upload-time = "2025-10-16T22:16:50.517Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/f1171b4a882a5d13c8b7576f348acfe6074d72eaf52cccef752f748d4a9f/uvloop-0.22.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:93f617675b2d03af4e72a5333ef89450dfaa5321303ede6e67ba9c9d26878079", size = 4177360, upload-time = "2025-10-16T22:16:52.646Z" }, + { url = "https://files.pythonhosted.org/packages/79/7b/b01414f31546caf0919da80ad57cbfe24c56b151d12af68cee1b04922ca8/uvloop-0.22.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:37554f70528f60cad66945b885eb01f1bb514f132d92b6eeed1c90fd54ed6289", size = 1454790, upload-time = "2025-10-16T22:16:54.355Z" }, + { url = "https://files.pythonhosted.org/packages/d4/31/0bb232318dd838cad3fa8fb0c68c8b40e1145b32025581975e18b11fab40/uvloop-0.22.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b76324e2dc033a0b2f435f33eb88ff9913c156ef78e153fb210e03c13da746b3", size = 796783, upload-time = "2025-10-16T22:16:55.906Z" }, + { url = "https://files.pythonhosted.org/packages/42/38/c9b09f3271a7a723a5de69f8e237ab8e7803183131bc57c890db0b6bb872/uvloop-0.22.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:badb4d8e58ee08dad957002027830d5c3b06aea446a6a3744483c2b3b745345c", size = 4647548, upload-time = "2025-10-16T22:16:57.008Z" }, + { url = "https://files.pythonhosted.org/packages/c1/37/945b4ca0ac27e3dc4952642d4c900edd030b3da6c9634875af6e13ae80e5/uvloop-0.22.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b91328c72635f6f9e0282e4a57da7470c7350ab1c9f48546c0f2866205349d21", size = 4467065, upload-time = "2025-10-16T22:16:58.206Z" }, + { url = "https://files.pythonhosted.org/packages/97/cc/48d232f33d60e2e2e0b42f4e73455b146b76ebe216487e862700457fbf3c/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:daf620c2995d193449393d6c62131b3fbd40a63bf7b307a1527856ace637fe88", size = 4328384, upload-time = "2025-10-16T22:16:59.36Z" }, + { url = "https://files.pythonhosted.org/packages/e4/16/c1fd27e9549f3c4baf1dc9c20c456cd2f822dbf8de9f463824b0c0357e06/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6cde23eeda1a25c75b2e07d39970f3374105d5eafbaab2a4482be82f272d5a5e", size = 4296730, upload-time = "2025-10-16T22:17:00.744Z" }, +] + +[[package]] +name = "watchfiles" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/f8/2c5f479fb531ce2f0564eda479faecf253d886b1ab3630a39b7bf7362d46/watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5", size = 406529, upload-time = "2025-10-14T15:04:32.899Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cd/f515660b1f32f65df671ddf6f85bfaca621aee177712874dc30a97397977/watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741", size = 394384, upload-time = "2025-10-14T15:04:33.761Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c3/28b7dc99733eab43fca2d10f55c86e03bd6ab11ca31b802abac26b23d161/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6", size = 448789, upload-time = "2025-10-14T15:04:34.679Z" }, + { url = "https://files.pythonhosted.org/packages/4a/24/33e71113b320030011c8e4316ccca04194bf0cbbaeee207f00cbc7d6b9f5/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b", size = 460521, upload-time = "2025-10-14T15:04:35.963Z" }, + { url = "https://files.pythonhosted.org/packages/f4/c3/3c9a55f255aa57b91579ae9e98c88704955fa9dac3e5614fb378291155df/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14", size = 488722, upload-time = "2025-10-14T15:04:37.091Z" }, + { url = "https://files.pythonhosted.org/packages/49/36/506447b73eb46c120169dc1717fe2eff07c234bb3232a7200b5f5bd816e9/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d", size = 596088, upload-time = "2025-10-14T15:04:38.39Z" }, + { url = "https://files.pythonhosted.org/packages/82/ab/5f39e752a9838ec4d52e9b87c1e80f1ee3ccdbe92e183c15b6577ab9de16/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff", size = 472923, upload-time = "2025-10-14T15:04:39.666Z" }, + { url = "https://files.pythonhosted.org/packages/af/b9/a419292f05e302dea372fa7e6fda5178a92998411f8581b9830d28fb9edb/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606", size = 456080, upload-time = "2025-10-14T15:04:40.643Z" }, + { url = "https://files.pythonhosted.org/packages/b0/c3/d5932fd62bde1a30c36e10c409dc5d54506726f08cb3e1d8d0ba5e2bc8db/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701", size = 629432, upload-time = "2025-10-14T15:04:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/f7/77/16bddd9779fafb795f1a94319dc965209c5641db5bf1edbbccace6d1b3c0/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10", size = 623046, upload-time = "2025-10-14T15:04:42.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/ef/f2ecb9a0f342b4bfad13a2787155c6ee7ce792140eac63a34676a2feeef2/watchfiles-1.1.1-cp311-cp311-win32.whl", hash = "sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849", size = 271473, upload-time = "2025-10-14T15:04:43.624Z" }, + { url = "https://files.pythonhosted.org/packages/94/bc/f42d71125f19731ea435c3948cad148d31a64fccde3867e5ba4edee901f9/watchfiles-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4", size = 287598, upload-time = "2025-10-14T15:04:44.516Z" }, + { url = "https://files.pythonhosted.org/packages/57/c9/a30f897351f95bbbfb6abcadafbaca711ce1162f4db95fc908c98a9165f3/watchfiles-1.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e", size = 277210, upload-time = "2025-10-14T15:04:45.883Z" }, + { url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" }, + { url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" }, + { url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" }, + { url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485, upload-time = "2025-10-14T15:04:50.155Z" }, + { url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813, upload-time = "2025-10-14T15:04:51.059Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816, upload-time = "2025-10-14T15:04:52.031Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186, upload-time = "2025-10-14T15:04:53.064Z" }, + { url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812, upload-time = "2025-10-14T15:04:55.174Z" }, + { url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196, upload-time = "2025-10-14T15:04:56.22Z" }, + { url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657, upload-time = "2025-10-14T15:04:57.521Z" }, + { url = "https://files.pythonhosted.org/packages/0a/bf/95895e78dd75efe9a7f31733607f384b42eb5feb54bd2eb6ed57cc2e94f4/watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9", size = 272042, upload-time = "2025-10-14T15:04:59.046Z" }, + { url = "https://files.pythonhosted.org/packages/87/0a/90eb755f568de2688cb220171c4191df932232c20946966c27a59c400850/watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9", size = 288410, upload-time = "2025-10-14T15:05:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/36/76/f322701530586922fbd6723c4f91ace21364924822a8772c549483abed13/watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404", size = 278209, upload-time = "2025-10-14T15:05:01.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/f750b29225fe77139f7ae5de89d4949f5a99f934c65a1f1c0b248f26f747/watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18", size = 404321, upload-time = "2025-10-14T15:05:02.063Z" }, + { url = "https://files.pythonhosted.org/packages/2b/f9/f07a295cde762644aa4c4bb0f88921d2d141af45e735b965fb2e87858328/watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a", size = 391783, upload-time = "2025-10-14T15:05:03.052Z" }, + { url = "https://files.pythonhosted.org/packages/bc/11/fc2502457e0bea39a5c958d86d2cb69e407a4d00b85735ca724bfa6e0d1a/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219", size = 449279, upload-time = "2025-10-14T15:05:04.004Z" }, + { url = "https://files.pythonhosted.org/packages/e3/1f/d66bc15ea0b728df3ed96a539c777acfcad0eb78555ad9efcaa1274688f0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428", size = 459405, upload-time = "2025-10-14T15:05:04.942Z" }, + { url = "https://files.pythonhosted.org/packages/be/90/9f4a65c0aec3ccf032703e6db02d89a157462fbb2cf20dd415128251cac0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0", size = 488976, upload-time = "2025-10-14T15:05:05.905Z" }, + { url = "https://files.pythonhosted.org/packages/37/57/ee347af605d867f712be7029bb94c8c071732a4b44792e3176fa3c612d39/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150", size = 595506, upload-time = "2025-10-14T15:05:06.906Z" }, + { url = "https://files.pythonhosted.org/packages/a8/78/cc5ab0b86c122047f75e8fc471c67a04dee395daf847d3e59381996c8707/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae", size = 474936, upload-time = "2025-10-14T15:05:07.906Z" }, + { url = "https://files.pythonhosted.org/packages/62/da/def65b170a3815af7bd40a3e7010bf6ab53089ef1b75d05dd5385b87cf08/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d", size = 456147, upload-time = "2025-10-14T15:05:09.138Z" }, + { url = "https://files.pythonhosted.org/packages/57/99/da6573ba71166e82d288d4df0839128004c67d2778d3b566c138695f5c0b/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b", size = 630007, upload-time = "2025-10-14T15:05:10.117Z" }, + { url = "https://files.pythonhosted.org/packages/a8/51/7439c4dd39511368849eb1e53279cd3454b4a4dbace80bab88feeb83c6b5/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374", size = 622280, upload-time = "2025-10-14T15:05:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/95/9c/8ed97d4bba5db6fdcdb2b298d3898f2dd5c20f6b73aee04eabe56c59677e/watchfiles-1.1.1-cp313-cp313-win32.whl", hash = "sha256:bf0a91bfb5574a2f7fc223cf95eeea79abfefa404bf1ea5e339c0c1560ae99a0", size = 272056, upload-time = "2025-10-14T15:05:12.156Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/c14e28429f744a260d8ceae18bf58c1d5fa56b50d006a7a9f80e1882cb0d/watchfiles-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:52e06553899e11e8074503c8e716d574adeeb7e68913115c4b3653c53f9bae42", size = 288162, upload-time = "2025-10-14T15:05:13.208Z" }, + { url = "https://files.pythonhosted.org/packages/dc/61/fe0e56c40d5cd29523e398d31153218718c5786b5e636d9ae8ae79453d27/watchfiles-1.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac3cc5759570cd02662b15fbcd9d917f7ecd47efe0d6b40474eafd246f91ea18", size = 277909, upload-time = "2025-10-14T15:05:14.49Z" }, + { url = "https://files.pythonhosted.org/packages/79/42/e0a7d749626f1e28c7108a99fb9bf524b501bbbeb9b261ceecde644d5a07/watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da", size = 403389, upload-time = "2025-10-14T15:05:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/15/49/08732f90ce0fbbc13913f9f215c689cfc9ced345fb1bcd8829a50007cc8d/watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051", size = 389964, upload-time = "2025-10-14T15:05:16.85Z" }, + { url = "https://files.pythonhosted.org/packages/27/0d/7c315d4bd5f2538910491a0393c56bf70d333d51bc5b34bee8e68e8cea19/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e", size = 448114, upload-time = "2025-10-14T15:05:17.876Z" }, + { url = "https://files.pythonhosted.org/packages/c3/24/9e096de47a4d11bc4df41e9d1e61776393eac4cb6eb11b3e23315b78b2cc/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70", size = 460264, upload-time = "2025-10-14T15:05:18.962Z" }, + { url = "https://files.pythonhosted.org/packages/cc/0f/e8dea6375f1d3ba5fcb0b3583e2b493e77379834c74fd5a22d66d85d6540/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261", size = 487877, upload-time = "2025-10-14T15:05:20.094Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/df24cfc6424a12deb41503b64d42fbea6b8cb357ec62ca84a5a3476f654a/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620", size = 595176, upload-time = "2025-10-14T15:05:21.134Z" }, + { url = "https://files.pythonhosted.org/packages/8f/b5/853b6757f7347de4e9b37e8cc3289283fb983cba1ab4d2d7144694871d9c/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04", size = 473577, upload-time = "2025-10-14T15:05:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/e1/f7/0a4467be0a56e80447c8529c9fce5b38eab4f513cb3d9bf82e7392a5696b/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77", size = 455425, upload-time = "2025-10-14T15:05:23.348Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e0/82583485ea00137ddf69bc84a2db88bd92ab4a6e3c405e5fb878ead8d0e7/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef", size = 628826, upload-time = "2025-10-14T15:05:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/28/9a/a785356fccf9fae84c0cc90570f11702ae9571036fb25932f1242c82191c/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf", size = 622208, upload-time = "2025-10-14T15:05:25.45Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5", size = 404315, upload-time = "2025-10-14T15:05:26.501Z" }, + { url = "https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd", size = 390869, upload-time = "2025-10-14T15:05:27.649Z" }, + { url = "https://files.pythonhosted.org/packages/b2/7e/5643bfff5acb6539b18483128fdc0ef2cccc94a5b8fbda130c823e8ed636/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb", size = 449919, upload-time = "2025-10-14T15:05:28.701Z" }, + { url = "https://files.pythonhosted.org/packages/51/2e/c410993ba5025a9f9357c376f48976ef0e1b1aefb73b97a5ae01a5972755/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5", size = 460845, upload-time = "2025-10-14T15:05:30.064Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a4/2df3b404469122e8680f0fcd06079317e48db58a2da2950fb45020947734/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3", size = 489027, upload-time = "2025-10-14T15:05:31.064Z" }, + { url = "https://files.pythonhosted.org/packages/ea/84/4587ba5b1f267167ee715b7f66e6382cca6938e0a4b870adad93e44747e6/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33", size = 595615, upload-time = "2025-10-14T15:05:32.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/0f/c6988c91d06e93cd0bb3d4a808bcf32375ca1904609835c3031799e3ecae/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510", size = 474836, upload-time = "2025-10-14T15:05:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05", size = 455099, upload-time = "2025-10-14T15:05:34.189Z" }, + { url = "https://files.pythonhosted.org/packages/98/e0/8c9bdba88af756a2fce230dd365fab2baf927ba42cd47521ee7498fd5211/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6", size = 630626, upload-time = "2025-10-14T15:05:35.216Z" }, + { url = "https://files.pythonhosted.org/packages/2a/84/a95db05354bf2d19e438520d92a8ca475e578c647f78f53197f5a2f17aaf/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81", size = 622519, upload-time = "2025-10-14T15:05:36.259Z" }, + { url = "https://files.pythonhosted.org/packages/1d/ce/d8acdc8de545de995c339be67711e474c77d643555a9bb74a9334252bd55/watchfiles-1.1.1-cp314-cp314-win32.whl", hash = "sha256:3fa0b59c92278b5a7800d3ee7733da9d096d4aabcfabb9a928918bd276ef9b9b", size = 272078, upload-time = "2025-10-14T15:05:37.63Z" }, + { url = "https://files.pythonhosted.org/packages/c4/c9/a74487f72d0451524be827e8edec251da0cc1fcf111646a511ae752e1a3d/watchfiles-1.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:c2047d0b6cea13b3316bdbafbfa0c4228ae593d995030fda39089d36e64fc03a", size = 287664, upload-time = "2025-10-14T15:05:38.95Z" }, + { url = "https://files.pythonhosted.org/packages/df/b8/8ac000702cdd496cdce998c6f4ee0ca1f15977bba51bdf07d872ebdfc34c/watchfiles-1.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:842178b126593addc05acf6fce960d28bc5fae7afbaa2c6c1b3a7b9460e5be02", size = 277154, upload-time = "2025-10-14T15:05:39.954Z" }, + { url = "https://files.pythonhosted.org/packages/47/a8/e3af2184707c29f0f14b1963c0aace6529f9d1b8582d5b99f31bbf42f59e/watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21", size = 403820, upload-time = "2025-10-14T15:05:40.932Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/e47e307c2f4bd75f9f9e8afbe3876679b18e1bcec449beca132a1c5ffb2d/watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5", size = 390510, upload-time = "2025-10-14T15:05:41.945Z" }, + { url = "https://files.pythonhosted.org/packages/d5/a0/ad235642118090f66e7b2f18fd5c42082418404a79205cdfca50b6309c13/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7", size = 448408, upload-time = "2025-10-14T15:05:43.385Z" }, + { url = "https://files.pythonhosted.org/packages/df/85/97fa10fd5ff3332ae17e7e40e20784e419e28521549780869f1413742e9d/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101", size = 458968, upload-time = "2025-10-14T15:05:44.404Z" }, + { url = "https://files.pythonhosted.org/packages/47/c2/9059c2e8966ea5ce678166617a7f75ecba6164375f3b288e50a40dc6d489/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44", size = 488096, upload-time = "2025-10-14T15:05:45.398Z" }, + { url = "https://files.pythonhosted.org/packages/94/44/d90a9ec8ac309bc26db808a13e7bfc0e4e78b6fc051078a554e132e80160/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c", size = 596040, upload-time = "2025-10-14T15:05:46.502Z" }, + { url = "https://files.pythonhosted.org/packages/95/68/4e3479b20ca305cfc561db3ed207a8a1c745ee32bf24f2026a129d0ddb6e/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc", size = 473847, upload-time = "2025-10-14T15:05:47.484Z" }, + { url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" }, + { url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8e/e500f8b0b77be4ff753ac94dc06b33d8f0d839377fee1b78e8c8d8f031bf/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88", size = 408250, upload-time = "2025-10-14T15:06:10.264Z" }, + { url = "https://files.pythonhosted.org/packages/bd/95/615e72cd27b85b61eec764a5ca51bd94d40b5adea5ff47567d9ebc4d275a/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336", size = 396117, upload-time = "2025-10-14T15:06:11.28Z" }, + { url = "https://files.pythonhosted.org/packages/c9/81/e7fe958ce8a7fb5c73cc9fb07f5aeaf755e6aa72498c57d760af760c91f8/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24", size = 450493, upload-time = "2025-10-14T15:06:12.321Z" }, + { url = "https://files.pythonhosted.org/packages/6e/d4/ed38dd3b1767193de971e694aa544356e63353c33a85d948166b5ff58b9e/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49", size = 457546, upload-time = "2025-10-14T15:06:13.372Z" }, +] + +[[package]] +name = "websockets" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423, upload-time = "2025-03-05T20:01:56.276Z" }, + { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082, upload-time = "2025-03-05T20:01:57.563Z" }, + { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330, upload-time = "2025-03-05T20:01:59.063Z" }, + { url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878, upload-time = "2025-03-05T20:02:00.305Z" }, + { url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883, upload-time = "2025-03-05T20:02:03.148Z" }, + { url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252, upload-time = "2025-03-05T20:02:05.29Z" }, + { url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521, upload-time = "2025-03-05T20:02:07.458Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958, upload-time = "2025-03-05T20:02:09.842Z" }, + { url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918, upload-time = "2025-03-05T20:02:11.968Z" }, + { url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388, upload-time = "2025-03-05T20:02:13.32Z" }, + { url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828, upload-time = "2025-03-05T20:02:14.585Z" }, + { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, + { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, + { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, + { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, + { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, + { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, + { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, + { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, + { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" }, + { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" }, + { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" }, + { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" }, + { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" }, + { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" }, + { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, +] diff --git a/scripts/deploy-local.sh b/scripts/deploy-local.sh index 51c223c..bf4d374 100755 --- a/scripts/deploy-local.sh +++ b/scripts/deploy-local.sh @@ -79,7 +79,7 @@ echo "" # Check if ALL required images exist, build if any missing or if --rebuild flag is set echo "🏗️ Checking Docker images..." -REQUIRED_IMAGES=("taskflow/sso-platform-migrations" "taskflow/sso-platform" "taskflow/api" "taskflow/mcp-server" "taskflow/web-dashboard") +REQUIRED_IMAGES=("taskflow/sso-platform-migrations" "taskflow/sso-platform" "taskflow/api" "taskflow/mcp-server" "taskflow/notification-service" "taskflow/web-dashboard") MISSING_IMAGES=() for img in "${REQUIRED_IMAGES[@]}"; do @@ -120,7 +120,7 @@ elif [ ${#MISSING_IMAGES[@]} -gt 0 ]; then exit 1 fi else - echo "✅ All 5 images available (skipping build)" + echo "✅ All 6 images available (skipping build)" fi echo "" @@ -150,6 +150,24 @@ set +a echo "✅ Secrets loaded" echo "" +# Install Dapr (required for pub/sub and jobs) +echo "🔧 Checking Dapr installation..." +helm repo add dapr https://dapr.github.io/helm-charts/ 2>/dev/null || true +helm repo update dapr > /dev/null 2>&1 + +if helm list -n dapr-system 2>/dev/null | grep -q "dapr"; then + echo "✅ Dapr already installed" +else + echo "📦 Installing Dapr..." + helm upgrade --install dapr dapr/dapr \ + --version=1.15 \ + --namespace dapr-system \ + --create-namespace \ + --wait + echo "✅ Dapr installed" +fi +echo "" + # Optional: Delete existing deployment for clean start if [ "$SKIP_CLEANUP" = false ]; then echo "🗑️ Removing old deployment..." @@ -184,6 +202,9 @@ helm upgrade --install taskflow ./helm/taskflow \ --set api.jwtSecret="${JWT_SECRET:-changeme-jwt}" \ --set api.openai.apiKey="${OPENAI_API_KEY:-}" \ --set mcpServer.mcpApiKey="${MCP_API_KEY:-changeme-mcp}" \ + --set notificationService.enabled=true \ + --set notificationService.database.password="${POSTGRES_NOTIFICATION_PASSWORD:-changeme-notification}" \ + --set dapr.enabled=true \ --wait \ --timeout 15m @@ -223,18 +244,25 @@ fi echo "✅ All Done!" echo "" echo "🌍 Services:" -echo " - Web Dashboard: http://localhost:3000" -echo " - SSO Platform: http://localhost:3001" -echo " - API Docs: http://localhost:8000/docs" +echo " - Web Dashboard: http://localhost:3000" +echo " - SSO Platform: http://localhost:3001" +echo " - API Docs: http://localhost:8000/docs" +echo " - MCP Server: http://localhost:8001" +echo " - Notification Service: http://localhost:8002/health" if [ "$PGADMIN" = true ]; then - echo " - pgAdmin: http://localhost:5050" + echo " - pgAdmin: http://localhost:5050" fi echo "" +echo "🔧 Dapr:" +echo " - Pods with 2/2 = Dapr sidecar injected (API, Notification Service)" +echo " - kubectl get pods -n taskflow # Check READY column" +echo "" echo "📋 Useful commands:" echo " ./scripts/deploy-local.sh --skip-cleanup --port-forward # Fast upgrade (2-3 min)" echo " ./scripts/deploy-local.sh --rebuild --skip-cleanup # Rebuild + fast upgrade" echo " ./scripts/deploy-local.sh --pgadmin --port-forward # Clean deploy with extras" echo " kubectl get pods -n taskflow -w # Watch pods" echo " kubectl logs -n taskflow -l app=sso-platform # View SSO logs" +echo " kubectl logs -n taskflow -l app=api -c daprd # View Dapr sidecar logs" echo " kubectl delete namespace taskflow # Clean up" echo "" diff --git a/scripts/deploy-prod.sh b/scripts/deploy-prod.sh new file mode 100755 index 0000000..f3cd00a --- /dev/null +++ b/scripts/deploy-prod.sh @@ -0,0 +1,250 @@ +#!/usr/bin/env bash +set -euo pipefail + +############################################################################### +# TaskFlow Production Deployment (Managed Services) +# +# Uses Neon PostgreSQL + Upstash Redis instead of in-cluster databases. +# This makes the cluster lightweight and reliable. +# +# Prerequisites: +# 1. Copy .env.prod.example to .env.prod +# 2. Fill in Neon and Upstash credentials +# 3. Run this script +# +# Usage: +# ./scripts/deploy-prod.sh # Deploy with managed services +# ./scripts/deploy-prod.sh --port-forward # Deploy + start port-forwards +# ./scripts/deploy-prod.sh --rebuild # Rebuild images + deploy +############################################################################### + +# Parse flags +PORTFORWARD=false +REBUILD=false +PARALLEL=false + +for arg in "$@"; do + case $arg in + --port-forward) + PORTFORWARD=true + shift + ;; + --rebuild) + REBUILD=true + shift + ;; + --parallel) + PARALLEL=true + shift + ;; + *) + echo "Unknown option: $arg" + echo "Usage: $0 [--port-forward] [--rebuild] [--parallel]" + exit 1 + ;; + esac +done + +echo "🚀 TaskFlow Production Deployment (Managed Services)" +echo "=====================================================" +echo "" +echo "📦 Using: Neon PostgreSQL + Upstash Redis" +echo " No in-cluster databases = lightweight & reliable" +echo "" + +# Check for .env.prod +if [ ! -f .env.prod ]; then + echo "❌ .env.prod not found!" + echo "" + echo " 1. Copy the example file:" + echo " cp .env.prod.example .env.prod" + echo "" + echo " 2. Fill in your Neon and Upstash credentials" + echo "" + echo " 3. Run this script again" + exit 1 +fi + +# Load environment +echo "📄 Loading .env.prod..." +set -a +source .env.prod +set +a + +# Validate required variables +REQUIRED_VARS=( + "NEON_SSO_DATABASE_URL" + "NEON_API_DATABASE_URL" + "NEON_CHATKIT_DATABASE_URL" + "NEON_NOTIFICATION_DATABASE_URL" + "UPSTASH_REDIS_HOST" + "UPSTASH_REDIS_PASSWORD" + "REDIS_URL" + "REDIS_TOKEN" + "BETTER_AUTH_SECRET" +) + +MISSING=() +for var in "${REQUIRED_VARS[@]}"; do + if [ -z "${!var:-}" ]; then + MISSING+=("$var") + fi +done + +if [ ${#MISSING[@]} -gt 0 ]; then + echo "❌ Missing required variables in .env.prod:" + for var in "${MISSING[@]}"; do + echo " - $var" + done + exit 1 +fi + +echo "✅ All required variables set" +echo "" + +# Check Minikube +echo "🔍 Checking Minikube..." +if ! minikube status > /dev/null 2>&1; then + echo "❌ Minikube is not running!" + echo " Start it with: minikube start --memory=4096 --cpus=2" + exit 1 +fi +echo "✅ Minikube running" +echo "" + +# Switch to Minikube Docker +echo "🐳 Configuring Docker environment..." +eval $(minikube docker-env) +echo "✅ Using Minikube Docker" +echo "" + +# Check if images exist, build if missing +# Note: sso-platform-migrations NOT needed for Neon (run migrations locally) +echo "🏗️ Checking Docker images..." +REQUIRED_IMAGES=("taskflow/sso-platform" "taskflow/api" "taskflow/mcp-server" "taskflow/notification-service" "taskflow/web-dashboard") +MISSING_IMAGES=() + +for img in "${REQUIRED_IMAGES[@]}"; do + if ! docker images --format "{{.Repository}}" | grep -q "^${img}$"; then + MISSING_IMAGES+=("$img") + fi +done + +if [ ${#MISSING_IMAGES[@]} -gt 0 ]; then + echo "⚠️ Missing images: ${MISSING_IMAGES[*]}" + echo " Building now (first time only)..." + BUILD_FLAGS="" + if [ "$PARALLEL" = true ]; then + BUILD_FLAGS="--parallel" + echo " ⚡ PARALLEL mode - ~3-5 minutes" + else + echo " This takes ~5-10 minutes..." + fi + echo "" + if ! ./scripts/utils/build-images.sh $BUILD_FLAGS; then + echo "❌ Image build failed! Check errors above." + exit 1 + fi + echo "" +elif [ "$REBUILD" = true ]; then + echo "🔨 Rebuilding images (--rebuild flag set)..." +else + echo "✅ All 5 images available (migrations skipped for Neon)" +fi +echo "" + +# Build images if --rebuild flag set +if [ "$REBUILD" = true ]; then + echo "🏗️ Rebuilding images..." + BUILD_FLAGS="" + if [ "$PARALLEL" = true ]; then + BUILD_FLAGS="--parallel" + echo " ⚡ PARALLEL mode" + fi + ./scripts/utils/build-images.sh $BUILD_FLAGS + echo "" +fi + +# Install Dapr +echo "🔧 Checking Dapr..." +helm repo add dapr https://dapr.github.io/helm-charts/ 2>/dev/null || true +helm repo update dapr > /dev/null 2>&1 + +if ! helm list -n dapr-system 2>/dev/null | grep -q "dapr"; then + echo "📦 Installing Dapr..." + helm upgrade --install dapr dapr/dapr \ + --version=1.15 \ + --namespace dapr-system \ + --create-namespace \ + --wait +fi +echo "✅ Dapr ready" +echo "" + +# Deploy with Helm +echo "📦 Deploying TaskFlow (managed services mode)..." +echo " - No PostgreSQL pods (using Neon)" +echo " - No Redis pod (using Upstash)" +echo " - Faster startup (no wait-for-db)" +echo "" + +helm upgrade --install taskflow ./helm/taskflow \ + --namespace taskflow \ + --create-namespace \ + --set global.imagePullPolicy=Never \ + --set managedServices.neon.enabled=true \ + --set sso.migrations.enabled=false \ + --set "managedServices.neon.ssoDatabase=${NEON_SSO_DATABASE_URL}" \ + --set "managedServices.neon.apiDatabase=${NEON_API_DATABASE_URL}" \ + --set "managedServices.neon.chatkitDatabase=${NEON_CHATKIT_DATABASE_URL}" \ + --set "managedServices.neon.notificationDatabase=${NEON_NOTIFICATION_DATABASE_URL}" \ + --set managedServices.upstash.enabled=true \ + --set "managedServices.upstash.host=${UPSTASH_REDIS_HOST}" \ + --set "managedServices.upstash.password=${UPSTASH_REDIS_PASSWORD}" \ + --set "managedServices.upstash.restUrl=${REDIS_URL}" \ + --set "managedServices.upstash.restToken=${REDIS_TOKEN}" \ + --set "sso.env.BETTER_AUTH_SECRET=${BETTER_AUTH_SECRET}" \ + --set "api.openai.apiKey=${OPENAI_API_KEY:-}" \ + --set "sso.smtp.password=${SMTP_PASS:-}" \ + --set notificationService.enabled=true \ + --set dapr.enabled=true \ + --wait \ + --timeout 10m + +echo "" +echo "✅ Deployment complete!" +echo "" + +# Show pods +echo "📊 Pod Status:" +kubectl get pods -n taskflow +echo "" + +# Compare with local deployment +echo "💡 Cluster is lightweight:" +echo " - 5 pods (SSO, API, MCP, Notification, Web)" +echo " - No database pods (Neon handles it)" +echo " - No Redis pod (Upstash handles it)" +echo "" + +# Port forwards +if [ "$PORTFORWARD" = true ]; then + echo "🔌 Starting port-forwards..." + ./scripts/utils/start-port-forwards.sh +else + echo "💡 To access services:" + echo " ./scripts/utils/start-port-forwards.sh" +fi + +echo "" +echo "✅ All Done!" +echo "" +echo "🌍 Services (after port-forward):" +echo " - Web Dashboard: http://localhost:3000" +echo " - SSO Platform: http://localhost:3001" +echo " - API Docs: http://localhost:8000/docs" +echo "" +echo "🔧 Database Management:" +echo " - Use Neon Console: https://console.neon.tech/" +echo " - No pgAdmin needed" +echo "" diff --git a/scripts/utils/build-images.sh b/scripts/utils/build-images.sh index a6550f5..b1bb47c 100755 --- a/scripts/utils/build-images.sh +++ b/scripts/utils/build-images.sh @@ -115,8 +115,8 @@ if [ "$PARALLEL" = true ]; then echo -e "${YELLOW}[2/4] Building SSO runner...${NC}" build_image "sso-platform" "sso-platform/Dockerfile" "sso-platform" "runner" - # Batch 1: API + MCP (2 parallel) - echo -e "${YELLOW}[3/4] Building API + MCP in parallel...${NC}" + # Batch 1: API + MCP + Notification Service (3 parallel) + echo -e "${YELLOW}[3/5] Building API + MCP + Notification Service in parallel...${NC}" PIDS=() NAMES=() @@ -128,6 +128,10 @@ if [ "$PARALLEL" = true ]; then PIDS+=($!) NAMES+=("mcp-server") + build_image "notification-service" "packages/notification-service/Dockerfile" "packages/notification-service" > "${LOG_DIR}/notification-service.log" 2>&1 & + PIDS+=($!) + NAMES+=("notification-service") + # Progress indicator while waiting START_TIME=$SECONDS while true; do @@ -169,9 +173,11 @@ if [ "$PARALLEL" = true ]; then fi # Batch 2: Web (sequential - give it full resources) - echo -e "${YELLOW}[4/4] Building Web dashboard...${NC}" + echo -e "${YELLOW}[4/5] Building Web dashboard...${NC}" build_image "web-dashboard" "web-dashboard/Dockerfile" "web-dashboard" + echo -e "${GREEN}[5/5] All images built${NC}" + # Cleanup rm -rf "${LOG_DIR}" else @@ -181,6 +187,7 @@ else build_image "sso-platform" "sso-platform/Dockerfile" "sso-platform" "runner" build_image "api" "packages/api/Dockerfile" "packages/api" build_image "mcp-server" "packages/mcp-server/Dockerfile" "packages/mcp-server" + build_image "notification-service" "packages/notification-service/Dockerfile" "packages/notification-service" build_image "web-dashboard" "web-dashboard/Dockerfile" "web-dashboard" fi @@ -191,6 +198,7 @@ echo " - ${REGISTRY}/sso-platform-migrations:${VERSION} (for DB migrations)" echo " - ${REGISTRY}/sso-platform:${VERSION} (slim production)" echo " - ${REGISTRY}/api:${VERSION}" echo " - ${REGISTRY}/mcp-server:${VERSION}" +echo " - ${REGISTRY}/notification-service:${VERSION}" echo " - ${REGISTRY}/web-dashboard:${VERSION}" # Load images into Minikube (if not pushing to registry) diff --git a/scripts/utils/start-port-forwards.sh b/scripts/utils/start-port-forwards.sh index 9a3df92..b7b8153 100755 --- a/scripts/utils/start-port-forwards.sh +++ b/scripts/utils/start-port-forwards.sh @@ -52,10 +52,21 @@ kubectl port-forward -n taskflow svc/mcp-server 8001:8001 & PID4=$! sleep 1 +# Notification Service (optional - only if Dapr enabled) +# Service runs on internal port 8001, we expose it locally on 8002 to avoid conflict with MCP server +if kubectl get svc taskflow-notification -n taskflow &>/dev/null; then + kubectl port-forward -n taskflow svc/taskflow-notification 8002:8001 & + PID5=$! + sleep 1 + NOTIFICATION_MSG=" - Notifications: http://localhost:8002/health" +else + NOTIFICATION_MSG="" +fi + # pgAdmin (optional - only if deployed) if kubectl get svc pgadmin -n taskflow &>/dev/null; then kubectl port-forward -n taskflow svc/pgadmin 5050:80 & - PID5=$! + PID6=$! sleep 1 PGADMIN_MSG=" - pgAdmin: http://localhost:5050" else @@ -70,6 +81,7 @@ echo " - Web Dashboard: http://localhost:3000" echo " - SSO Platform: http://localhost:3001" echo " - API Docs: http://localhost:8000/docs" echo " - MCP Server: http://localhost:8001" +[ -n "$NOTIFICATION_MSG" ] && echo "$NOTIFICATION_MSG" [ -n "$PGADMIN_MSG" ] && echo "$PGADMIN_MSG" echo "" echo "💡 Press Ctrl+C to stop all port-forwards" diff --git a/specs/012-notifications-dapr-triggers/checklists/requirements.md b/specs/012-notifications-dapr-triggers/checklists/requirements.md new file mode 100644 index 0000000..8f6bbdc --- /dev/null +++ b/specs/012-notifications-dapr-triggers/checklists/requirements.md @@ -0,0 +1,138 @@ +# Requirements Quality Checklist + +**Feature**: Notifications, Reminders & Dapr Integration +**Spec File**: `specs/012-notifications-dapr-triggers/spec.md` +**Validated**: 2025-12-11 +**Agent**: spec-architect v3.0 + +--- + +## Content Quality + +- [x] No implementation details (languages, frameworks, APIs) + - ✅ Spec is technology-agnostic except where necessary (Dapr is in feature scope, PostgreSQL is explicit assumption) +- [x] Focused on user value and business needs + - ✅ Six user stories with clear "Why this priority" rationale +- [x] Written for non-technical stakeholders + - ✅ User scenarios use business language (project manager, team member, assignee) +- [x] All mandatory sections completed + - ✅ Context, User Scenarios, Requirements, Success Criteria, Assumptions, Non-Goals present + +--- + +## Requirement Completeness + +- [⚠️] No [NEEDS CLARIFICATION] markers remain (or max 3 prioritized) + - **1 critical clarification** exists: Notification creation architecture (direct vs event-driven) + - **Status**: Blocking planning until resolved +- [x] Requirements are testable and unambiguous + - ✅ All FRs have clear pass/fail criteria + - ⚠️ FR-004 could be more precise (see recommendations) +- [x] Success criteria are measurable + - ✅ SC-001 through SC-007 all quantified (percentages, time limits, counts) +- [x] Success criteria are technology-agnostic + - ✅ Focus on outcomes (spawn rate, delivery time) not implementation (Dapr config, cron syntax) +- [x] All acceptance scenarios are defined + - ✅ 14 acceptance scenarios across 6 user stories +- [⚠️] Edge cases are identified + - ✅ Five edge cases documented + - ⚠️ Missing: cron failure recovery, concurrency control (addressed by auto-fixes) +- [⚠️] Scope is clearly bounded (constraints + non-goals) + - ✅ Non-goals section comprehensive (WebSocket, email/SMS, preferences, etc.) + - ⚠️ Constraints section needs additions (atomicity, concurrency - auto-fixed) +- [x] Dependencies and assumptions identified + - ✅ Six assumptions documented (Dapr local mode, shared DB, polling, etc.) + - ✅ Prerequisites from Agent 2A explicitly stated + +--- + +## Feature Readiness + +- [x] All functional requirements have clear acceptance criteria + - ✅ 22 functional requirements (FR-001 to FR-022) with explicit MUST/MUST NOT language +- [x] User scenarios cover primary flows + - ✅ Six user stories prioritized P1-P3 with independent test descriptions +- [N/A] Evals-first pattern followed (evals before spec) + - **N/A**: This is a product feature spec, not educational content. Evals-first applies to learning materials. + +--- + +## Formal Verification + +- [❌] Invariants identified and documented + - ⚠️ **5 invariants identified during validation** (duplicate spawns, reminder uniqueness, notification linking, self-notification prevention, spawn atomicity) + - ❌ **2 invariants at risk**: Spawn atomicity, concurrency control + - ✅ **Auto-fixes applied** to enforce missing invariants +- [❌] Small scope test passed (3-5 instances) + - ❌ **3 counterexamples found**: + 1. Spawn atomicity violation (DB crash mid-spawn) + 2. Concurrency race condition (overlapping cron runs) + 3. Notification architecture ambiguity (event-driven vs direct) + - ✅ **Issues 1-2 fixed**; Issue 3 requires user clarification +- [⚠️] No counterexamples found (or all addressed) + - ⚠️ **1 counterexample remains**: Notification creation flow (requires architectural decision) +- [⚠️] Relational constraints verified (cycles, coverage, uniqueness) + - ✅ No cycles: Tasks → Notifications (one-way) + - ✅ Complete coverage: All notifications linked to users + - ❌ Concurrency safety: Not verified until locking strategy confirmed + +--- + +## Quality Scores + +| Dimension | Score | Notes | +|-----------|-------|-------| +| **Testability** | 9/10 | All criteria falsifiable; minor gap in SC-001 precision | +| **Completeness** | 8/10 | Strong except atomicity/concurrency (now fixed) | +| **Ambiguity** | 7/10 | FR-004, FR-021 could be more precise (recommendations provided) | +| **Traceability** | 9/10 | Clear links to Agent 2A, business goals; minor gap on downstream impact | +| **Formal Verification** | 6/10 | 2 counterexamples fixed, 1 requires user input | + +**Overall Readiness**: **8.0/10** → **NEEDS_CLARIFICATION** + +--- + +## Issues Summary + +### CRITICAL (Blocks Planning) +1. **Notification Architecture Ambiguity** - Requires user decision + - **Location**: Entire spec (affects FR-009 to FR-016, SC-003, SC-005) + - **Problem**: Unclear if notifications are created directly by API or via event-driven pattern + - **Impact**: Determines whether SC-005 ("operational without notification service") is achievable + - **Action**: User must choose Option A (direct), B (event-driven), or C (hybrid) from clarification table + +### MAJOR (Auto-Fixed) +1. ✅ **Spawn Atomicity Missing** - Auto-fixed + - **Location**: FR-001 + - **Fix Applied**: Added database transaction requirement +2. ✅ **Concurrency Control Undefined** - Auto-fixed + - **Location**: Constraints section (added) + - **Fix Applied**: Row-level locking + 60s timeout constraint + +### MINOR (Enhancements) +1. **FR-004 Precision** - Optional refinement + - **Suggestion**: Change "at least every 1 minute" to "every 1 minute (cron: `*/1 * * * *`)" +2. **SC-001 Qualification** - Optional refinement + - **Suggestion**: Add "eligible" qualifier (due_date set, max_occurrences not reached) +3. **Data Retention Policy** - Nice to have + - **Suggestion**: Add "Notifications retained for 90 days (configurable)" to Constraints + +--- + +## Verdict + +**Status**: **NEEDS_CLARIFICATION** + +**Readiness for Planning**: **BLOCKED** until notification architecture question resolved. + +**Reasoning**: Spec is well-structured with measurable requirements and comprehensive scenarios. Two critical technical gaps (atomicity, concurrency) were auto-fixed. **One architectural decision remains**: notification creation flow determines system reliability, deployment complexity, and whether SC-005 is achievable. This decision must be made before implementation planning. + +**Next Steps**: +1. **User**: Answer clarification question (notification architecture - Option A/B/C) +2. **Agent**: Apply user's choice to spec (update Assumptions/Constraints sections) +3. **Agent**: Re-validate to confirm READY status +4. Proceed to `/sp.plan` phase + +--- + +**Checklist Complete**: 2025-12-11 diff --git a/specs/012-notifications-dapr-triggers/plan.md b/specs/012-notifications-dapr-triggers/plan.md new file mode 100644 index 0000000..a3a8595 --- /dev/null +++ b/specs/012-notifications-dapr-triggers/plan.md @@ -0,0 +1,1191 @@ +# Implementation Plan: Notifications, Reminders & Dapr Integration + +**Branch**: `012-notifications-dapr-triggers` | **Date**: 2025-12-11 | **Spec**: `/specs/012-notifications-dapr-triggers/spec.md` + +## Summary + +This feature completes the recurring tasks implementation by fixing the critical bug where `recurrence_trigger=on_due_date` silently fails (users can set it but nothing happens), and adds a comprehensive notification system with Dapr event integration. The implementation is split into three prioritized phases: + +**P1 (Critical Bug Fix)**: Cron handler to spawn recurring tasks when due_date passes +**P2 (Core Notifications)**: Notification model, reminders, and assignment notifications +**P3 (User Experience)**: Frontend bell, completion/spawn notifications, Dapr event publishing + +**Technical Approach**: Direct notification creation (API writes to DB, Dapr events for observability only). Row-level locking with `SELECT FOR UPDATE SKIP LOCKED` prevents cron race conditions. The notification table lives in the same PostgreSQL instance as the main API, avoiding separate service complexity while ensuring the system remains operational even if Dapr is unavailable. + +--- + +## Technical Context + +**Language/Version**: Python 3.13+ (backend), TypeScript 5.x (frontend) +**Primary Dependencies**: FastAPI, SQLModel, APScheduler (cron), Dapr SDK (optional), Next.js 16 +**Storage**: Neon PostgreSQL (same instance for notifications) +**Testing**: pytest (backend), manual testing (frontend bell) +**Target Platform**: Phase V Production (DOKS with Dapr sidecars) +**Project Type**: Web (backend + frontend) +**Performance Goals**: +- Cron processing: <30s for 1000 tasks +- Notification creation: <100ms per notification +- Reminder delivery: <5 minutes of entering 24h window +- Assignment notifications: <5 seconds after assignment +- Frontend polling: 30-second interval + +**Constraints**: +- Cron must complete within 60 seconds (non-overlapping runs) +- Row-level locking required for concurrency safety +- Notification operations must be non-blocking (continue if Dapr unavailable) +- Single database transaction for spawn operations (atomicity) +- No duplicate notifications for same event (idempotency) + +**Scale/Scope**: +- Support 1000+ recurring tasks per project +- Handle 10,000+ notifications per user +- 90-day notification retention (configurable) +- Cron runs every 60 seconds (supports 1-minute recurrence patterns) + +--- + +## Constitution Check + +✅ **Principle 1: Every Action MUST Be Auditable** +- All cron spawns create audit entries: `task.spawned_recurring` action +- Reminder sends create audit entries: `task.reminder_sent` action +- Assignment, completion, spawn actions logged via existing audit trail + +✅ **Principle 2: Agents Are First-Class Citizens** +- Notifications work for both human and agent assignees +- MCP server exposes `list_notifications`, `mark_notification_read` tools +- Agents receive same notification types as humans + +✅ **Principle 3: Recursive Task Decomposition** +- Cron respects `clone_subtasks_on_recur=True` when spawning +- Subtask cloning maintains parent-child relationships in new occurrence + +✅ **Principle 4: Spec-Driven Development** +- Plan derived from `specs/012-notifications-dapr-triggers/spec.md` +- All acceptance scenarios from spec mapped to implementation phases + +✅ **Principle 5: Phase Continuity** +- Notification model designed to work in all phases (P2 web → P5 production) +- Dapr integration optional (works without it in earlier phases) +- Same notification schema used by API, MCP, and future WebSocket handlers + +--- + +## Project Structure + +### Documentation (this feature) + +```text +specs/012-notifications-dapr-triggers/ +├── spec.md # Feature specification (already exists) +├── plan.md # This file (/sp.plan command output) +└── tasks.md # Generated by /sp.tasks command (NOT created by /sp.plan) +``` + +### Source Code (repository root) + +```text +# Backend (FastAPI) +packages/api/src/taskflow_api/ +├── models/ +│ ├── notification.py # NEW: Notification SQLModel +│ ├── task.py # MODIFIED: reminder_sent field already exists +│ └── __init__.py # MODIFIED: Import Notification +├── routers/ +│ ├── cron.py # NEW: Cron handler endpoints (or dapr.py) +│ ├── notifications.py # NEW: Notification CRUD endpoints +│ ├── tasks.py # MODIFIED: Assignment & completion notification hooks +│ └── __init__.py # MODIFIED: Import new routers +├── services/ +│ ├── events.py # NEW: Dapr event publishing service +│ ├── audit.py # EXISTING: Used for cron audit entries +│ └── user_setup.py # EXISTING: Used in notification creation +├── schemas/ +│ └── notification.py # NEW: Notification Pydantic schemas (Create, Read, Update) +├── config.py # MODIFIED: Add DAPR_ENABLED, NOTIFICATION_RETENTION_DAYS +└── main.py # MODIFIED: Import and include new routers, start APScheduler + +# Frontend (Next.js 16) +web-dashboard/src/ +├── components/ +│ ├── NotificationBell.tsx # NEW: Bell icon with dropdown +│ ├── NotificationItem.tsx # NEW: Individual notification display +│ └── Header.tsx # MODIFIED: Add NotificationBell component +├── lib/ +│ └── api.ts # MODIFIED: Add notification API client functions +├── types/ +│ └── notification.ts # NEW: Notification TypeScript types +└── app/ + └── layout.tsx # MODIFIED: Import Header updates (if needed) + +# Tests +packages/api/src/taskflow_api/tests/ +├── test_cron.py # NEW: Unit tests for cron logic +├── test_notifications.py # NEW: Integration tests for notification CRUD +└── test_tasks.py # MODIFIED: Add tests for notification side effects +``` + +**Structure Decision**: TaskFlow uses a monorepo with `packages/api` (FastAPI backend) and `web-dashboard` (Next.js frontend). This feature adds notification capabilities to both layers while maintaining the existing separation. The `services/events.py` module handles Dapr integration with graceful degradation when Dapr is unavailable. + +--- + +## Complexity Tracking + +> **Fill ONLY if Constitution Check has violations that must be justified** + +No constitutional violations. All five principles upheld: +- Audit coverage complete (cron actions logged) +- Agent parity maintained (MCP tools for notifications) +- Recursive tasks respected (subtask cloning in cron) +- Spec-driven (plan from spec.md) +- Phase continuity (notification model stable across phases) + +--- + +## Phase Breakdown + +### Phase 0: Research (Completed) + +**Status**: ✅ Complete (spec.md exists) + +**Artifacts**: +- Spec captured in `specs/012-notifications-dapr-triggers/spec.md` +- User scenarios defined with P1/P2/P3 priorities +- Critical bug identified: `on_due_date` trigger silently fails +- Technical decision made: Direct notification creation (not event-driven) + +**Key Findings**: +- Agent 2A implemented 7 recurring task fields but left `on_due_date` trigger unimplemented +- Row-level locking (`SELECT FOR UPDATE SKIP LOCKED`) prevents duplicate spawns in concurrent cron runs +- 24-hour reminder window meets user needs without over-notification +- Dapr events are observability layer, not critical path (ensures SC-005: operational without event bus) + +--- + +### Phase 1: Design & Architecture + +**Goal**: Design database schema, API contracts, and cron logic before implementation. + +#### Deliverable 1.1: Notification Data Model + +**Entity**: `Notification` + +```python +class Notification(SQLModel, table=True): + """In-app notification for users.""" + + __tablename__ = "notification" + + id: int | None = Field(default=None, primary_key=True) + user_id: str = Field(index=True, description="@human-name or @agent-name") + user_type: Literal["human", "agent"] = Field(description="Recipient type") + + type: str = Field( + description="Notification type: task_assigned, task_completed, task_spawned, task_reminder" + ) + title: str = Field(max_length=200, description="Notification headline") + body: str = Field(description="Notification message content") + + # Optional links to related entities + task_id: int | None = Field(default=None, foreign_key="task.id", index=True) + project_id: int | None = Field(default=None, foreign_key="project.id", index=True) + + read: bool = Field(default=False, description="Whether user has read this notification") + created_at: datetime = Field(default_factory=datetime.utcnow) + + # Relationships + task: "Task" = Relationship() + project: "Project" = Relationship() +``` + +**Indexes**: +- `user_id` (frequent filter: "get my notifications") +- `task_id` (link notifications to tasks) +- `created_at` (sort by recency) +- Composite index: `(user_id, read, created_at)` (optimizes unread count query) + +**Retention Policy**: +- Automatic deletion of notifications older than 90 days +- Configurable via `NOTIFICATION_RETENTION_DAYS` environment variable +- Cleanup runs daily via APScheduler (separate from main cron) + +--- + +#### Deliverable 1.2: Cron Logic Design + +**Cron Schedule**: Every 60 seconds (APScheduler) + +**Query Pattern** (Prevents Race Conditions): + +```python +# Step 1: Find tasks due for spawn (with row-level lock) +stmt = ( + select(Task) + .where( + Task.is_recurring == True, + Task.recurrence_trigger.in_(["on_due_date", "both"]), + Task.due_date <= datetime.utcnow(), + Task.has_spawned_next == False, + ) + .with_for_update(skip_locked=True) # Critical: Prevent duplicate spawns +) + +# Step 2: For each task, spawn next occurrence in transaction +async with session.begin(): # Transaction boundary + new_task = Task( + title=original.title, + description=original.description, + project_id=original.project_id, + assignee_id=original.assignee_id, + recurring_root_id=original.recurring_root_id or original.id, + due_date=calculate_next_due(original.recurrence_pattern, original.due_date), + is_recurring=True, + recurrence_pattern=original.recurrence_pattern, + max_occurrences=original.max_occurrences, + recurrence_trigger=original.recurrence_trigger, + clone_subtasks_on_recur=original.clone_subtasks_on_recur, + # ... other fields + ) + session.add(new_task) + + # Mark original as spawned + original.has_spawned_next = True + + # Clone subtasks if configured + if original.clone_subtasks_on_recur: + await clone_subtasks(session, original.id, new_task.id) + + # Create audit entry + await log_action( + session, + task_id=original.id, + actor_id="@system", + actor_type="system", + action="task.spawned_recurring", + context={"new_task_id": new_task.id} + ) + + # Create notification for assignee + if new_task.assignee_id: + assignee = await session.get(Worker, new_task.assignee_id) + notification = Notification( + user_id=f"@{assignee.name}", + user_type=assignee.type, + type="task_spawned", + title="Recurring task created", + body=f'New occurrence of "{new_task.title}" is ready', + task_id=new_task.id, + project_id=new_task.project_id, + ) + session.add(notification) + + await session.commit() # Atomic spawn +``` + +**Idempotency Guarantees**: +- `has_spawned_next=True` prevents re-spawn on retry +- `SELECT FOR UPDATE SKIP LOCKED` prevents concurrent spawns +- Transaction ensures all-or-nothing (task + audit + notification) + +**Max Occurrences Check**: + +```python +# Before spawning, count existing occurrences +if original.max_occurrences: + root_id = original.recurring_root_id or original.id + stmt = select(func.count(Task.id)).where( + (Task.id == root_id) | (Task.recurring_root_id == root_id) + ) + count = await session.scalar(stmt) + + if count >= original.max_occurrences: + # Log audit entry: max_occurrences reached + await log_action(session, task_id=original.id, action="task.recurrence_limit_reached") + continue # Skip spawn +``` + +--- + +#### Deliverable 1.3: Reminder Logic Design + +**Query Pattern** (24-hour Window): + +```python +# Find tasks due within 24 hours that haven't sent reminder +now = datetime.utcnow() +reminder_window = now + timedelta(hours=24) + +stmt = ( + select(Task) + .where( + Task.due_date.between(now, reminder_window), + Task.reminder_sent == False, + Task.status.in_(["pending", "in_progress", "blocked"]), # Not completed + Task.assignee_id.isnot(None), # Has assignee + ) + .with_for_update(skip_locked=True) +) + +for task in await session.exec(stmt): + assignee = await session.get(Worker, task.assignee_id) + + # Calculate hours until due + hours_until_due = (task.due_date - now).total_seconds() / 3600 + + notification = Notification( + user_id=f"@{assignee.name}", + user_type=assignee.type, + type="task_reminder", + title=f"Task due in {int(hours_until_due)} hours", + body=f'"{task.title}" is approaching its deadline', + task_id=task.id, + project_id=task.project_id, + ) + session.add(notification) + + # Mark reminder sent + task.reminder_sent = True + + # Audit log + await log_action(session, task_id=task.id, action="task.reminder_sent") + + await session.commit() +``` + +**Edge Case Handling**: +- Completed/cancelled tasks: Excluded by status filter +- Unassigned tasks: Excluded by `assignee_id.isnot(None)` +- Duplicate reminders: Prevented by `reminder_sent=True` flag + +--- + +#### Deliverable 1.4: API Contracts + +**Notification Endpoints**: + +```python +# GET /api/notifications +# Query params: ?unread_only=true&limit=10&offset=0 +# Returns: List[NotificationRead] +# Auth: Current user's notifications only + +# GET /api/notifications/unread-count +# Returns: {"count": 5} +# Auth: Current user + +# PATCH /api/notifications/{id}/read +# Body: {"read": true} +# Returns: NotificationRead +# Auth: Must own notification + +# POST /api/notifications (Internal only - used by cron/assignment hooks) +# Body: NotificationCreate +# Returns: NotificationRead +``` + +**Cron Endpoints** (Internal/Admin Only): + +```python +# POST /api/cron/process-recurring-tasks +# Triggers: On-due-date spawn logic +# Returns: {"spawned": 5, "skipped": 2} +# Auth: System/admin only (API key or internal call) + +# POST /api/cron/send-reminders +# Triggers: 24-hour reminder logic +# Returns: {"sent": 12} +# Auth: System/admin only +``` + +**Modified Task Endpoints**: + +```python +# PATCH /api/tasks/{id}/assign (existing endpoint) +# MODIFIED: Add notification creation after assignment +# Notification type: "task_assigned" + +# PATCH /api/tasks/{id}/complete (existing endpoint) +# MODIFIED: Add notification to creator if different from completer +# Notification type: "task_completed" +``` + +--- + +#### Deliverable 1.5: Dapr Event Schema + +**Events Published** (Observability Only): + +```python +# Event: task.assigned +{ + "task_id": 123, + "assignee_id": "@claude-code", + "assignee_type": "agent", + "assigned_by": "@muhammad", + "project_id": 1, + "timestamp": "2025-12-11T12:00:00Z" +} + +# Event: task.completed +{ + "task_id": 123, + "completed_by": "@sarah", + "task_creator": "@muhammad", + "project_id": 1, + "timestamp": "2025-12-11T12:00:00Z" +} + +# Event: task.spawned +{ + "original_task_id": 100, + "new_task_id": 101, + "recurrence_pattern": "weekly", + "trigger_type": "on_due_date", + "project_id": 1, + "timestamp": "2025-12-11T12:00:00Z" +} + +# Event: task.reminder +{ + "task_id": 123, + "assignee_id": "@claude-code", + "hours_until_due": 12, + "project_id": 1, + "timestamp": "2025-12-11T12:00:00Z" +} +``` + +**Publishing Pattern** (Non-Blocking): + +```python +async def publish_event(event_type: str, data: dict): + """Publish event to Dapr pub/sub (non-blocking).""" + if not settings.DAPR_ENABLED: + logger.debug(f"Dapr disabled, event logged: {event_type}") + return + + try: + async with httpx.AsyncClient() as client: + await client.post( + f"{settings.DAPR_HTTP_ENDPOINT}/v1.0/publish/{settings.DAPR_PUBSUB_NAME}/task-events", + json={"type": event_type, "data": data}, + timeout=1.0 # Fast fail + ) + except Exception as e: + logger.warning(f"Dapr event publish failed: {e}") + # Continue - event publishing is not critical path +``` + +--- + +### Phase 2: Implementation Sequence + +Implementation follows spec priorities (P1 → P2 → P3) to deliver critical bug fix first. + +--- + +#### P1: Critical Bug Fix - Cron Handler for `on_due_date` Trigger + +**Priority**: P1 (Fixes silent failure - highest priority) +**User Story**: "Recurring task spawns on due date" (Acceptance Scenarios 1-4) +**Estimated Effort**: 6-8 hours + +**Files Created**: +1. `packages/api/src/taskflow_api/routers/cron.py` (200 lines) +2. `packages/api/src/taskflow_api/tests/test_cron.py` (150 lines) + +**Files Modified**: +1. `packages/api/src/taskflow_api/main.py` (add APScheduler initialization) +2. `packages/api/src/taskflow_api/config.py` (add `CRON_ENABLED` flag) +3. `packages/api/src/taskflow_api/routers/__init__.py` (import cron router) + +**Implementation Steps**: + +1. **Create APScheduler background task** (`main.py`): + ```python + from apscheduler.schedulers.asyncio import AsyncIOScheduler + from contextlib import asynccontextmanager + + scheduler = AsyncIOScheduler() + + @asynccontextmanager + async def lifespan(app: FastAPI): + # Startup: Start cron scheduler + if settings.CRON_ENABLED: + from .routers.cron import process_recurring_tasks, send_reminders + scheduler.add_job(process_recurring_tasks, "interval", seconds=60) + scheduler.add_job(send_reminders, "interval", seconds=60) + scheduler.start() + + yield + + # Shutdown: Stop scheduler + if settings.CRON_ENABLED: + scheduler.shutdown() + + app = FastAPI(lifespan=lifespan) + ``` + +2. **Implement cron router** (`routers/cron.py`): + - Function: `process_recurring_tasks()` + - Query tasks with `recurrence_trigger IN ('on_due_date', 'both')` and `due_date <= now` and `has_spawned_next=False` + - Use `SELECT FOR UPDATE SKIP LOCKED` for row-level locking + - Check `max_occurrences` limit before spawn + - Calculate next `due_date` using `calculate_next_due()` from `routers/tasks.py` + - Create new task with cloned fields + - Clone subtasks if `clone_subtasks_on_recur=True` + - Mark original `has_spawned_next=True` + - Create audit entry: `task.spawned_recurring` + - All in single transaction + + - Function: `clone_subtasks(session, parent_id, new_parent_id)` + - Recursively clone subtasks + - Maintain parent-child relationships + - Preserve assignee, priority, etc. + +3. **Add configuration** (`config.py`): + ```python + class Settings(BaseSettings): + CRON_ENABLED: bool = True + CRON_INTERVAL_SECONDS: int = 60 + ``` + +4. **Write tests** (`tests/test_cron.py`): + - Test: `test_spawn_on_due_date_trigger` (Acceptance 1) + - Test: `test_spawn_with_both_trigger` (Acceptance 2) + - Test: `test_max_occurrences_limit` (Acceptance 3) + - Test: `test_idempotency_no_duplicate_spawn` (Acceptance 4) + - Test: `test_subtask_cloning` + - Test: `test_audit_entry_created` + +**Acceptance Criteria**: +- ✅ Tasks with `recurrence_trigger=on_due_date` spawn when due date passes +- ✅ Tasks with `recurrence_trigger=both` spawn on due date (separate from on_complete) +- ✅ `max_occurrences` limit respected +- ✅ No duplicate spawns (idempotency via `has_spawned_next`) +- ✅ Subtasks cloned if configured +- ✅ Audit trail shows spawn action + +**Testing Strategy**: +1. Create recurring task with `due_date` in past, `recurrence_trigger=on_due_date` +2. Run cron handler manually (call `process_recurring_tasks()`) +3. Verify new task created with next due date +4. Verify original task has `has_spawned_next=True` +5. Run cron again → verify no duplicate spawn +6. Test with `max_occurrences=3` → verify spawn stops after limit + +--- + +#### P2: Core Notifications - Model, Reminders, Assignment + +**Priority**: P2 (High-frequency productivity features) +**User Stories**: "Task assignment notification" (Acceptance 1-3), "Due date reminder" (Acceptance 1-3) +**Estimated Effort**: 8-10 hours + +**Files Created**: +1. `packages/api/src/taskflow_api/models/notification.py` (80 lines) +2. `packages/api/src/taskflow_api/schemas/notification.py` (60 lines) +3. `packages/api/src/taskflow_api/routers/notifications.py` (150 lines) +4. `packages/api/src/taskflow_api/services/events.py` (100 lines) +5. `packages/api/src/taskflow_api/tests/test_notifications.py` (200 lines) + +**Files Modified**: +1. `packages/api/src/taskflow_api/models/__init__.py` (import Notification) +2. `packages/api/src/taskflow_api/routers/tasks.py` (add notification hooks in assign/complete) +3. `packages/api/src/taskflow_api/routers/cron.py` (add reminder logic) +4. `packages/api/src/taskflow_api/config.py` (add Dapr settings) +5. `packages/api/src/taskflow_api/main.py` (import notifications router) + +**Implementation Steps**: + +1. **Create Notification model** (`models/notification.py`): + - Follow schema from Deliverable 1.1 + - Add relationships to Task, Project + - Add composite index: `(user_id, read, created_at)` + +2. **Create Notification schemas** (`schemas/notification.py`): + ```python + class NotificationCreate(SQLModel): + user_id: str + user_type: Literal["human", "agent"] + type: str + title: str + body: str + task_id: int | None = None + project_id: int | None = None + + class NotificationRead(SQLModel): + id: int + user_id: str + user_type: str + type: str + title: str + body: str + task_id: int | None + project_id: int | None + read: bool + created_at: datetime + + class NotificationUpdate(SQLModel): + read: bool + ``` + +3. **Create notification router** (`routers/notifications.py`): + - `GET /api/notifications` (list user's notifications, paginated) + - `GET /api/notifications/unread-count` (count unread) + - `PATCH /api/notifications/{id}/read` (mark as read) + - All endpoints filter by `current_user` + +4. **Create event service** (`services/events.py`): + - Function: `publish_event(event_type: str, data: dict)` + - Uses `httpx.AsyncClient` to POST to Dapr sidecar + - Non-blocking: catches exceptions, logs warnings, continues + - Checks `settings.DAPR_ENABLED` flag before attempting publish + +5. **Add notification hooks to tasks router** (`routers/tasks.py`): + - In `assign_task()` endpoint: + ```python + # After assignment + if new_assignee_id: + assignee = await session.get(Worker, new_assignee_id) + notification = Notification( + user_id=f"@{assignee.name}", + user_type=assignee.type, + type="task_assigned", + title="Task assigned to you", + body=f'You have been assigned: "{task.title}"', + task_id=task.id, + project_id=task.project_id, + ) + session.add(notification) + + # Publish event (non-blocking) + await publish_event("task.assigned", { + "task_id": task.id, + "assignee_id": f"@{assignee.name}", + "assigned_by": f"@{current_user.name}", + }) + ``` + + - In `complete_task()` endpoint: + ```python + # After completion, notify creator if different + if task.created_by_id != current_user.id: + creator = await session.get(Worker, task.created_by_id) + notification = Notification( + user_id=f"@{creator.name}", + user_type=creator.type, + type="task_completed", + title="Task completed", + body=f'{current_user.name} completed: "{task.title}"', + task_id=task.id, + project_id=task.project_id, + ) + session.add(notification) + + await publish_event("task.completed", {...}) + ``` + +6. **Add reminder logic to cron** (`routers/cron.py`): + - Function: `send_reminders()` + - Query tasks due within 24 hours, `reminder_sent=False`, not completed + - Create `task_reminder` notification for assignee + - Mark `task.reminder_sent=True` + - Publish `task.reminder` event + +7. **Add Dapr configuration** (`config.py`): + ```python + class Settings(BaseSettings): + DAPR_ENABLED: bool = False # Default off for dev + DAPR_HTTP_ENDPOINT: str = "http://localhost:3500" + DAPR_PUBSUB_NAME: str = "taskflow-pubsub" + NOTIFICATION_RETENTION_DAYS: int = 90 + ``` + +8. **Write tests** (`tests/test_notifications.py`): + - Test: `test_list_notifications` (filtered by user) + - Test: `test_unread_count` + - Test: `test_mark_as_read` + - Test: `test_assignment_creates_notification` (Acceptance 1) + - Test: `test_notification_bell_unread_badge` (Acceptance 2) + - Test: `test_click_notification_marks_read` (Acceptance 3) + - Test: `test_reminder_sent_within_24h` (Acceptance 1) + - Test: `test_no_duplicate_reminder` (Acceptance 2) + - Test: `test_completed_task_no_reminder` (Acceptance 3) + +**Acceptance Criteria**: +- ✅ Task assignment creates notification within 5 seconds (Spec SC-003) +- ✅ Unread count badge shows on frontend bell +- ✅ Clicking notification marks it as read +- ✅ Reminders sent within 5 minutes of entering 24h window (Spec SC-002) +- ✅ No duplicate reminders (idempotency via `reminder_sent`) +- ✅ Completed tasks excluded from reminders + +**Testing Strategy**: +1. Assign task to user → verify notification appears in their list +2. Check unread count before/after reading +3. Create task due in 23 hours → run cron → verify reminder sent +4. Create task due in 25 hours → run cron → verify no reminder yet +5. Complete task with reminder due → verify no reminder sent + +--- + +#### P3: User Experience - Frontend Bell & Completion/Spawn Notifications + +**Priority**: P3 (Delivery mechanism for notifications) +**User Stories**: "Notification bell in UI" (Acceptance 1-4), "Task completion notification", "Recurring task spawn notification" +**Estimated Effort**: 6-8 hours + +**Files Created**: +1. `web-dashboard/src/components/NotificationBell.tsx` (200 lines) +2. `web-dashboard/src/components/NotificationItem.tsx` (80 lines) +3. `web-dashboard/src/types/notification.ts` (40 lines) + +**Files Modified**: +1. `web-dashboard/src/components/Header.tsx` (add NotificationBell) +2. `web-dashboard/src/lib/api.ts` (add notification API functions) +3. `packages/api/src/taskflow_api/routers/cron.py` (add spawn notification) + +**Implementation Steps**: + +1. **Create notification TypeScript types** (`types/notification.ts`): + ```typescript + export type NotificationType = + | "task_assigned" + | "task_completed" + | "task_spawned" + | "task_reminder"; + + export interface Notification { + id: number; + user_id: string; + user_type: "human" | "agent"; + type: NotificationType; + title: string; + body: string; + task_id?: number; + project_id?: number; + read: boolean; + created_at: string; + } + ``` + +2. **Add notification API functions** (`lib/api.ts`): + ```typescript + export async function getNotifications(unreadOnly = false, limit = 10) { + const params = new URLSearchParams({ + unread_only: unreadOnly.toString(), + limit: limit.toString() + }); + const response = await fetch(`/api/notifications?${params}`); + return response.json(); + } + + export async function getUnreadCount() { + const response = await fetch("/api/notifications/unread-count"); + const data = await response.json(); + return data.count; + } + + export async function markNotificationRead(id: number) { + await fetch(`/api/notifications/${id}/read`, { + method: "PATCH", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ read: true }), + }); + } + ``` + +3. **Create NotificationItem component** (`components/NotificationItem.tsx`): + - Display notification icon based on type + - Show title, body, timestamp (relative: "2 hours ago") + - Link to task/project if IDs present + - Visual indicator for unread (bold text or dot) + - Click handler: mark as read, navigate to task + +4. **Create NotificationBell component** (`components/NotificationBell.tsx`): + - Bell icon with badge (unread count) + - Polling: `useEffect` with 30-second interval to fetch unread count + - Dropdown: Show 10 most recent notifications + - Badge display: "3" for 1-9, "9+" for 10+ + - Click bell → toggle dropdown + - Click outside → close dropdown + - Empty state: "No notifications" + +5. **Add NotificationBell to Header** (`components/Header.tsx`): + ```tsx + import { NotificationBell } from "./NotificationBell"; + + export function Header() { + return ( +
+ {/* Existing header content */} + +
+ ); + } + ``` + +6. **Add spawn notification to cron** (`routers/cron.py`): + - In `process_recurring_tasks()`, after spawning new task: + ```python + # Create notification for assignee (P3 - spawn notification) + if new_task.assignee_id: + assignee = await session.get(Worker, new_task.assignee_id) + notification = Notification( + user_id=f"@{assignee.name}", + user_type=assignee.type, + type="task_spawned", + title="Recurring task created", + body=f'New occurrence of "{new_task.title}" is ready', + task_id=new_task.id, + project_id=new_task.project_id, + ) + session.add(notification) + + await publish_event("task.spawned", {...}) + ``` + +**Acceptance Criteria**: +- ✅ Bell icon displays unread count badge (Acceptance 1) +- ✅ Badge shows "9+" for 10+ notifications (Acceptance 2) +- ✅ Dropdown shows 10 most recent notifications with type icons (Acceptance 3) +- ✅ Clicking "View" navigates to task detail page (Acceptance 4) +- ✅ Spawn notifications created when recurring task auto-spawns (Acceptance 1-2) +- ✅ Completion notifications sent to task creator (Acceptance 1) +- ✅ No self-notifications (Acceptance 2) + +**Testing Strategy**: +1. Create 3 unread notifications → verify badge shows "3" +2. Create 12 unread → verify badge shows "9+" +3. Click bell → verify dropdown opens with 10 items +4. Click notification with task_id → verify navigation to task detail +5. Complete recurring task → verify assignee gets spawn notification +6. Complete own task → verify no completion notification sent + +--- + +### Phase 3: MCP Integration (Agent Parity) + +**Goal**: Expose notification operations to agents via MCP server. + +**Estimated Effort**: 2-3 hours + +**Files Modified**: +1. `packages/mcp-server/src/taskflow_mcp/server.py` (add notification tools) + +**New MCP Tools**: + +```python +@mcp_server.tool() +async def list_notifications( + unread_only: bool = False, + limit: int = 10 +) -> list[dict]: + """List notifications for the authenticated agent. + + Args: + unread_only: Only return unread notifications + limit: Maximum number of notifications to return + + Returns: + List of notification objects + """ + # Call /api/notifications with agent's API key + # Return notification list + +@mcp_server.tool() +async def mark_notification_read(notification_id: int) -> dict: + """Mark a notification as read. + + Args: + notification_id: ID of notification to mark as read + + Returns: + Updated notification object + """ + # Call PATCH /api/notifications/{id}/read + # Return updated notification +``` + +**Acceptance Criteria**: +- ✅ Agents can list their notifications via MCP +- ✅ Agents can mark notifications as read +- ✅ Agents receive same notification types as humans (assignment, reminder, spawn) + +**Testing Strategy**: +1. Assign task to agent via MCP → verify agent can list notification +2. Agent marks notification read → verify read status persists +3. Create recurring task assigned to agent → verify spawn notification appears in agent's list + +--- + +### Phase 4: Testing & Validation + +**Goal**: Ensure all acceptance scenarios pass and system meets success criteria. + +**Test Coverage Matrix**: + +| Test Type | Coverage Target | Files | +|-----------|----------------|-------| +| Unit Tests | Cron logic, notification CRUD | `test_cron.py`, `test_notifications.py` | +| Integration Tests | API endpoints, database transactions | `test_tasks.py` (modified) | +| Manual Tests | Frontend bell, real-time polling | Manual QA checklist | +| Concurrency Tests | Row-level locking, no duplicate spawns | `test_cron.py` | + +**Key Test Scenarios**: + +1. **Concurrent Cron Runs** (Validates Row-Level Locking): + - Start two cron jobs simultaneously + - Verify only one spawns new task + - Verify no duplicate audit entries + +2. **Notification Delivery SLA** (Validates SC-003): + - Assign task → measure time to notification creation + - Assert < 5 seconds + +3. **Reminder Window Accuracy** (Validates SC-002): + - Create tasks at 23h, 24h, 25h until due + - Run cron → verify only 23h gets reminder + - Wait 1 hour → verify 24h gets reminder + +4. **Frontend Polling** (Validates SC-004): + - Create notification → wait 30 seconds + - Verify unread count updates in UI + +5. **Dapr Degradation** (Validates SC-005): + - Disable Dapr (`DAPR_ENABLED=False`) + - Assign task → verify notification still created + - Verify API continues functioning + +**Success Metrics** (From Spec): +- ✅ SC-001: `on_due_date` triggers spawn 100% of time +- ✅ SC-002: Reminders within 5 minutes of 24h window +- ✅ SC-003: Assignment notifications within 5 seconds +- ✅ SC-004: Bell updates within 30 seconds +- ✅ SC-005: System operational without Dapr +- ✅ SC-006: Notification CRUD < 1 second +- ✅ SC-007: No duplicate notifications (idempotency) + +--- + +## Implementation Checklist + +### P1: Critical Bug Fix (6-8 hours) +- [ ] Create `routers/cron.py` with `process_recurring_tasks()` function +- [ ] Implement row-level locking (`SELECT FOR UPDATE SKIP LOCKED`) +- [ ] Add `max_occurrences` check before spawn +- [ ] Implement subtask cloning logic +- [ ] Add APScheduler to `main.py` lifespan +- [ ] Add `CRON_ENABLED` config flag +- [ ] Write unit tests for spawn logic (4 acceptance scenarios) +- [ ] Test with `recurrence_trigger=on_due_date` and `both` +- [ ] Verify audit entries created for spawns +- [ ] Verify idempotency (no duplicate spawns on retry) + +### P2: Core Notifications (8-10 hours) +- [ ] Create `models/notification.py` with Notification SQLModel +- [ ] Create `schemas/notification.py` (Create, Read, Update) +- [ ] Create `routers/notifications.py` (list, unread count, mark read) +- [ ] Create `services/events.py` (Dapr event publishing) +- [ ] Add notification hooks to `routers/tasks.py` (assign, complete) +- [ ] Add reminder logic to `routers/cron.py` (`send_reminders()`) +- [ ] Add Dapr config to `config.py` +- [ ] Write integration tests for notification CRUD +- [ ] Test assignment notification (within 5 seconds) +- [ ] Test reminder notification (24-hour window) +- [ ] Test completion notification (creator != completer) +- [ ] Verify Dapr events publish (or log if disabled) + +### P3: Frontend Bell (6-8 hours) +- [ ] Create `types/notification.ts` (Notification interface) +- [ ] Add notification API functions to `lib/api.ts` +- [ ] Create `components/NotificationItem.tsx` +- [ ] Create `components/NotificationBell.tsx` (bell + dropdown) +- [ ] Add 30-second polling in NotificationBell +- [ ] Implement unread badge (count, "9+" for 10+) +- [ ] Add NotificationBell to Header +- [ ] Add spawn notification to cron handler +- [ ] Test bell displays unread count +- [ ] Test dropdown shows 10 recent notifications +- [ ] Test clicking notification marks as read +- [ ] Test navigation to task from notification + +### MCP Integration (2-3 hours) +- [ ] Add `list_notifications` MCP tool +- [ ] Add `mark_notification_read` MCP tool +- [ ] Test agents can list their notifications +- [ ] Test agents receive assignment/spawn notifications + +### Testing & Validation (4-6 hours) +- [ ] Run full test suite (`uv run pytest`) +- [ ] Manual test: concurrent cron runs (row-level locking) +- [ ] Manual test: notification delivery SLA (<5s) +- [ ] Manual test: reminder window accuracy (24h) +- [ ] Manual test: frontend polling (30s interval) +- [ ] Manual test: Dapr degradation (API works without Dapr) +- [ ] Verify all 7 success criteria (SC-001 to SC-007) +- [ ] Verify all acceptance scenarios pass (18 scenarios across 6 user stories) + +--- + +## Deployment Considerations + +### Environment Variables (Production) + +```bash +# Cron Configuration +CRON_ENABLED=true +CRON_INTERVAL_SECONDS=60 + +# Dapr Configuration +DAPR_ENABLED=true +DAPR_HTTP_ENDPOINT=http://localhost:3500 +DAPR_PUBSUB_NAME=taskflow-pubsub + +# Notification Configuration +NOTIFICATION_RETENTION_DAYS=90 +``` + +### Dapr Component Configuration (Phase V - DOKS) + +**`components/pubsub.yaml`**: +```yaml +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: taskflow-pubsub +spec: + type: pubsub.kafka + version: v1 + metadata: + - name: brokers + value: "redpanda-0.redpanda.svc.cluster.local:9092" + - name: consumerGroup + value: "taskflow-notifications" + - name: authType + value: "none" +``` + +**Pub/Sub Topic**: `task-events` + +**Subscribers** (Future Enhancement): +- Analytics service (consumes all events for metrics) +- Webhook service (consumes events for external integrations) + +### Database Migration + +**Migration**: `add_notification_table_and_reminder_field.sql` + +```sql +-- Create notification table +CREATE TABLE notification ( + id SERIAL PRIMARY KEY, + user_id VARCHAR(100) NOT NULL, + user_type VARCHAR(10) NOT NULL CHECK (user_type IN ('human', 'agent')), + type VARCHAR(50) NOT NULL, + title VARCHAR(200) NOT NULL, + body TEXT NOT NULL, + task_id INTEGER REFERENCES task(id) ON DELETE CASCADE, + project_id INTEGER REFERENCES project(id) ON DELETE CASCADE, + read BOOLEAN NOT NULL DEFAULT FALSE, + created_at TIMESTAMP NOT NULL DEFAULT NOW() +); + +-- Indexes for performance +CREATE INDEX idx_notification_user_id ON notification(user_id); +CREATE INDEX idx_notification_task_id ON notification(task_id); +CREATE INDEX idx_notification_created_at ON notification(created_at); +CREATE INDEX idx_notification_user_read_created ON notification(user_id, read, created_at); + +-- reminder_sent field already added to task table in previous migration +-- (Agent 2A added this field, so no migration needed here) +``` + +**Rollback Plan**: +```sql +DROP TABLE notification CASCADE; +-- Note: Do NOT drop reminder_sent from task - it was added by Agent 2A +``` + +### Monitoring & Observability + +**Key Metrics to Track**: +- Cron execution time (alert if >50s) +- Cron spawn rate (tasks spawned per run) +- Notification creation rate (per type) +- Dapr event publish success rate +- Notification delivery SLA (time from event to notification creation) +- Unread notification count per user (detect notification fatigue) + +**Alerts**: +- Cron execution failure (>3 consecutive failures) +- Notification creation delay (>10s from event) +- Dapr publish failure rate (>10% over 5 minutes) +- Database lock contention (cron row locks timing out) + +--- + +## Risk Analysis & Mitigation + +| Risk | Impact | Likelihood | Mitigation | +|------|--------|-----------|------------| +| **Cron overlapping runs** | Duplicate spawns, data corruption | Medium | Use row-level locking (`SKIP LOCKED`), add execution time monitoring, alert if >50s | +| **Notification spam** | User fatigue, ignored notifications | Medium | 90-day retention, future: user preferences for notification types | +| **Dapr unavailability** | Lost observability events | Low | Events are non-blocking, system continues, events logged locally | +| **Database lock contention** | Cron slowdown, timeouts | Low | `SKIP LOCKED` allows concurrent processing, monitor lock wait times | +| **Polling overhead** | Frontend performance | Low | 30s interval is conservative, future: WebSocket for real-time updates | +| **Timezone confusion** | Wrong reminder times | Medium | All times in UTC, frontend displays in user's local timezone (separate concern) | + +--- + +## Future Enhancements (Out of Scope for MVP) + +**Post-MVP Features** (Not in current spec): +1. Real-time WebSocket notifications (replace polling) +2. Email/SMS notifications (multi-channel delivery) +3. User notification preferences (mute types, frequency) +4. Notification grouping/batching (reduce spam) +5. Custom reminder times per task (not just 24h) +6. Notification read receipts in audit trail +7. Digest emails (daily/weekly summary) +8. Mobile push notifications +9. Notification templates (i18n support) +10. Advanced filtering (by project, priority, date range) + +**Technical Debt to Address**: +- Notification retention cleanup (add scheduled job) +- Cron execution metrics (add Prometheus integration) +- Dapr circuit breaker (retry logic for transient failures) +- Frontend notification cache (reduce API calls) +- WebSocket infrastructure (Phase VI) + +--- + +## References + +**Spec**: `/specs/012-notifications-dapr-triggers/spec.md` +**Constitution**: `.specify/memory/constitution.md` +**Directives**: `research/DIRECTIVES.md` +**Existing Cron Logic**: `packages/api/src/taskflow_api/routers/tasks.py` (line 100: `calculate_next_due()`) +**Audit Service**: `packages/api/src/taskflow_api/services/audit.py` +**Recurring Task Fields**: `packages/api/src/taskflow_api/models/task.py` (lines 64-97) + +--- + +## Acceptance Sign-Off + +This plan will be considered complete when: +- ✅ All P1 acceptance scenarios pass (User Story 1: 4 scenarios) +- ✅ All P2 acceptance scenarios pass (User Stories 2-3: 6 scenarios) +- ✅ All P3 acceptance scenarios pass (User Stories 4-6: 8 scenarios) +- ✅ All 7 success criteria validated (SC-001 to SC-007) +- ✅ Test coverage >80% for new code +- ✅ No duplicate spawns in concurrency test +- ✅ System operational with `DAPR_ENABLED=False` +- ✅ Frontend bell updates within 30 seconds +- ✅ Documentation updated (API docs, deployment guide) + +**Estimated Total Effort**: 26-35 hours +**Target Completion**: Phase V Production (DOKS deployment) +**Constitutional Compliance**: ✅ All 5 principles upheld diff --git a/specs/012-notifications-dapr-triggers/spec.md b/specs/012-notifications-dapr-triggers/spec.md new file mode 100644 index 0000000..59d469c --- /dev/null +++ b/specs/012-notifications-dapr-triggers/spec.md @@ -0,0 +1,227 @@ +# Feature Specification: Notifications, Reminders & Dapr Integration + +**Feature Branch**: `012-notifications-dapr-triggers` +**Created**: 2025-12-11 +**Status**: Draft +**Input**: User description: "Implement notifications system with Dapr integration, cron handlers for on_due_date trigger, reminders, event publishing, and frontend bell" + +## Context + +This feature completes the recurring tasks implementation started by Agent 2A. Agent 2A built 7 database fields for recurring tasks, but the `recurrence_trigger` field accepts values (`on_due_date`, `both`) that silently fail - they don't trigger any action. This feature fixes that bug and adds a complete notification system. + +### Pre-existing Implementation (Agent 2A) + +- `on_complete` trigger: Works - spawns next task when current is completed +- `on_due_date` trigger: Silently fails - users can set it but nothing happens +- `both` trigger: Silently fails - same issue + +### Critical Bug to Fix + +Users can currently set `recurrence_trigger=on_due_date` and the task will never spawn. This is a silent failure that violates user expectations. + +## User Scenarios & Testing *(mandatory)* + +### User Story 1 - Recurring Task Spawns on Due Date (Priority: P1) + +A project manager creates a weekly standup task that should automatically create the next occurrence when the due date passes, even if the current task isn't completed yet. + +**Why this priority**: This is the core bug fix - without it, the `on_due_date` and `both` triggers are broken, violating user expectations when they set these values. + +**Independent Test**: Can be fully tested by creating a recurring task with `recurrence_trigger=on_due_date`, waiting for the due date to pass, and verifying a new task is automatically created. + +**Acceptance Scenarios**: + +1. **Given** a recurring task with `recurrence_trigger=on_due_date` and `due_date` set to a time in the past, **When** the cron job runs, **Then** a new task occurrence is created with the next due date calculated from the recurrence pattern. +2. **Given** a recurring task with `recurrence_trigger=both` and `due_date` in the past but status is not completed, **When** the cron job runs, **Then** a new task occurrence is created (triggered by due date, not completion). +3. **Given** a recurring task with `max_occurrences=3` and 3 tasks already spawned, **When** the due date passes, **Then** no new task is created (limit reached). +4. **Given** a recurring task that has already spawned via `on_due_date`, **When** the user later completes the original task, **Then** no duplicate spawn occurs (idempotency). + +--- + +### User Story 2 - Task Assignment Notification (Priority: P2) + +A team member is assigned a task by their manager. They immediately receive a notification so they know they have new work. + +**Why this priority**: Task assignment is a high-frequency event that directly affects team productivity. Users need timely awareness of new responsibilities. + +**Independent Test**: Can be fully tested by assigning a task to a user and verifying a notification appears in their notification list. + +**Acceptance Scenarios**: + +1. **Given** a task exists in a project, **When** a manager assigns the task to a team member, **Then** the assignee receives a notification titled "Task assigned to you" within 5 seconds. +2. **Given** a task is assigned to a user, **When** the user views the notification bell, **Then** they see an unread count badge. +3. **Given** a user has unread notifications, **When** they click on a notification, **Then** it is marked as read and the unread count decreases. + +--- + +### User Story 3 - Due Date Reminder (Priority: P2) + +A worker has a task due tomorrow. They receive a reminder notification so they can prioritize their work accordingly. + +**Why this priority**: Reminders prevent missed deadlines - a core productivity feature. Equal priority with assignment as both serve immediate user needs. + +**Independent Test**: Can be fully tested by creating a task due within 24 hours and verifying a reminder notification is sent to the assignee. + +**Acceptance Scenarios**: + +1. **Given** a task with `due_date` within 24 hours and `reminder_sent=False`, **When** the cron job runs, **Then** the assignee receives a "Task due in X hours" notification. +2. **Given** a task already sent a reminder (`reminder_sent=True`), **When** the cron job runs again, **Then** no duplicate reminder is sent. +3. **Given** a completed task with a due date within 24 hours, **When** the cron job runs, **Then** no reminder is sent (completed tasks don't need reminders). + +--- + +### User Story 4 - Notification Bell in UI (Priority: P3) + +A user can see all their notifications in a dropdown from the header, with an unread count badge that updates in real-time. + +**Why this priority**: The frontend display is the delivery mechanism for all notifications. Without it, notifications exist but aren't visible to users. + +**Independent Test**: Can be fully tested by generating notifications for a user and verifying the bell icon displays them with correct unread counts. + +**Acceptance Scenarios**: + +1. **Given** a user has 3 unread notifications, **When** they view the header, **Then** the bell icon shows a "3" badge. +2. **Given** a user has more than 9 unread notifications, **When** they view the header, **Then** the badge shows "9+". +3. **Given** a user clicks the bell icon, **When** the dropdown opens, **Then** they see their 10 most recent notifications with type icons. +4. **Given** a notification links to a task, **When** the user clicks "View", **Then** they navigate to the task detail page. + +--- + +### User Story 5 - Task Completion Notification (Priority: P3) + +When a team member completes a task that was assigned by someone else, the task creator is notified that their requested work is done. + +**Why this priority**: Completion notifications close the feedback loop for task creators but are lower priority than assignment/reminder which affect active work. + +**Independent Test**: Can be fully tested by having user B complete a task created by user A, and verifying user A receives a notification. + +**Acceptance Scenarios**: + +1. **Given** user A created a task assigned to user B, **When** user B marks the task as completed, **Then** user A receives a "Task completed" notification. +2. **Given** a user completes their own task (creator and completer are the same), **When** the task is completed, **Then** no notification is sent (avoid self-notifications). + +--- + +### User Story 6 - Recurring Task Spawn Notification (Priority: P3) + +When a recurring task automatically spawns its next occurrence, the assignee is notified that new work is ready. + +**Why this priority**: This keeps users aware of recurring work being added to their queue, but is lower priority than the spawn logic itself. + +**Independent Test**: Can be fully tested by completing a recurring task and verifying the assignee of the new occurrence receives a notification. + +**Acceptance Scenarios**: + +1. **Given** a recurring task is completed (on_complete trigger), **When** the next occurrence is created, **Then** the assignee receives a "Recurring task created" notification. +2. **Given** a recurring task's due date passes (on_due_date trigger), **When** the next occurrence is created by cron, **Then** the assignee receives a "Recurring task created" notification. + +--- + +### Edge Cases + +- What happens when the notification service is unavailable? + - The API continues to function; event publishing is non-blocking with graceful failure logging. +- How does the system handle time zones? + - All times are stored and compared in UTC. Frontend displays in user's local timezone (separate concern). +- What happens if a task is assigned to a user without a user_id (e.g., placeholder worker)? + - No notification is sent; the event is skipped with a warning log. +- What if due_date is null for a task with on_due_date trigger? + - The task is skipped by the cron query (due_date IS NOT NULL filter). +- What if the cron job fails mid-execution? + - Each task is processed independently; failures don't block other tasks. Failed tasks are logged and retried on next cron run. + +## Requirements *(mandatory)* + +### Functional Requirements + +**Cron & Triggers** +- **FR-001**: System MUST spawn next recurring task occurrence when `recurrence_trigger` is `on_due_date` or `both` and the `due_date` has passed. Spawn operation (new task creation + has_spawned_next=True update) MUST execute within a single database transaction to prevent duplicate spawns on failure. +- **FR-002**: System MUST NOT spawn duplicate tasks - once `has_spawned_next=True`, no further spawns occur for that task. +- **FR-003**: System MUST respect `max_occurrences` limit before spawning new tasks. +- **FR-004**: System MUST run cron job at least every 1 minute to support 1-minute recurrence patterns. +- **FR-005**: System MUST calculate next due date using the task's `recurrence_pattern` from the original due date (not current time). + +**Reminders** +- **FR-006**: System MUST send reminder notifications for tasks due within 24 hours. +- **FR-007**: System MUST mark `reminder_sent=True` after sending to prevent duplicate reminders. +- **FR-008**: System MUST NOT send reminders for completed, cancelled, or unassigned tasks. + +**Notifications** +- **FR-009**: System MUST store notifications with: user_id, type, title, body, task_id, project_id, read status, timestamp. +- **FR-010**: System MUST provide an API to list notifications filtered by user_id. +- **FR-011**: System MUST provide an API to get unread notification count for a user. +- **FR-012**: System MUST provide an API to mark a notification as read. + +**Events** +- **FR-013**: System MUST publish `task.assigned` event when a task is assigned to a user. +- **FR-014**: System MUST publish `task.completed` event to task creator when someone else completes their task. +- **FR-015**: System MUST publish `task.spawned` event when a recurring task creates its next occurrence. +- **FR-016**: System MUST publish `task.reminder` event when a due date reminder is triggered. + +**Frontend** +- **FR-017**: Frontend MUST display a notification bell icon in the header. +- **FR-018**: Frontend MUST display unread notification count as a badge on the bell icon. +- **FR-019**: Frontend MUST display notification list in a dropdown when bell is clicked. +- **FR-020**: Frontend MUST poll for new notifications at a reasonable interval (30 seconds). + +**Resilience** +- **FR-021**: API MUST continue functioning if notification service or event bus is unavailable (non-blocking operations). +- **FR-022**: Failed event publishes MUST be logged but not throw exceptions. + +### Key Entities + +- **Notification**: Represents a notification to a user. Contains: id, user_id, type, title, body, task_id (optional), project_id (optional), read (boolean), created_at. +- **Task (extended)**: Existing Task entity gains `reminder_sent` field to track reminder state. + +### Constraints + +**Atomicity**: +- Recurring task spawn (new task creation + has_spawned_next=True) MUST execute in a single database transaction (enforced by FR-001) +- Reminder flag update (reminder_sent=True) MUST be atomic with notification creation + +**Concurrency Control**: +- Cron job MUST use row-level locking (`SELECT FOR UPDATE SKIP LOCKED`) when querying tasks to prevent race conditions +- Cron execution MUST complete within 60 seconds OR use distributed locking to prevent overlapping runs +- If cron job takes >60 seconds, next scheduled run MUST wait until current execution completes + +**Data Retention**: +- Notifications older than 90 days are automatically deleted (configurable via NOTIFICATION_RETENTION_DAYS environment variable) + +**Notification Architecture (Direct Creation - Option A)**: +- API creates notification rows directly in the main database (same PostgreSQL instance, separate table) +- Dapr events (`task.assigned`, `task.completed`, `task.spawned`, `task.reminder`) are published as an observability/audit trail, NOT the critical path for notification creation +- This ensures SC-005 is achievable: notifications are created even if Dapr/event bus is unavailable +- Notification API endpoints (list, unread count, mark read) are part of the main FastAPI application +- No separate notification service deployment required for MVP + +## Success Criteria *(mandatory)* + +### Measurable Outcomes + +- **SC-001**: `on_due_date` and `both` recurrence triggers function correctly - 100% of recurring tasks spawn their next occurrence when due date passes. +- **SC-002**: Reminder notifications are sent within 5 minutes of a task entering the 24-hour-before-due window. +- **SC-003**: Assignment notifications are delivered within 5 seconds of task assignment. +- **SC-004**: Notification bell updates within 30 seconds of new notifications being created. +- **SC-005**: System remains fully operational (task CRUD, assignment, completion) when notification service is unavailable. +- **SC-006**: Users can view, click, and mark notifications as read with < 1 second response time. +- **SC-007**: No duplicate notifications are created for the same event (idempotency). + +## Assumptions + +1. **Dapr for local development**: For local development without Kubernetes, events will be logged rather than published. A `dapr_enabled` config flag controls this. +2. **Direct notification creation**: Notifications are created directly by the main API (not via event-driven pattern). This simplifies architecture and ensures SC-005 (operational without event bus). A separate notification service is NOT required for MVP. +3. **Notification database**: Notifications are stored in the same PostgreSQL instance as the main API (separate table: `notification`). +4. **Authentication**: Notification API uses the same SSO/JWT authentication as the main API. +5. **Polling vs WebSocket**: Initial implementation uses polling (30s interval). WebSocket real-time updates are a future enhancement. +6. **Monthly recurrence**: "monthly" pattern means 30 days, not calendar month (consistent with Agent 2A implementation). +7. **Reminder window**: 24 hours is the default reminder window; making it configurable is a future enhancement. + +## Non-Goals + +- Real-time WebSocket notifications (polling is sufficient for MVP) +- Email/SMS notifications (in-app only for this phase) +- Notification preferences/settings per user +- Notification grouping/batching +- Mobile push notifications +- Custom reminder times per task +- Audit trail for notification delivery (separate from task audit) diff --git a/specs/012-notifications-dapr-triggers/tasks.md b/specs/012-notifications-dapr-triggers/tasks.md new file mode 100644 index 0000000..6e7299e --- /dev/null +++ b/specs/012-notifications-dapr-triggers/tasks.md @@ -0,0 +1,471 @@ +# Tasks: Notifications, Reminders & Dapr Integration + +**Input**: Design documents from `/specs/012-notifications-dapr-triggers/` +**Prerequisites**: plan.md (required), spec.md (required for user stories) + +**Total Tasks**: 62 tasks (~155 minutes implementation) +**Parallelizable**: 35 tasks (56%) +**Tests**: Manual testing approach (per spec - no TDD requested) + +## Format: `[ID] [P?] [Story] FR-XXX: Description` + +- **[P]**: Can run in parallel (different files, no dependencies) +- **[Story]**: Which user story this task belongs to (US1-US6) +- **FR-XXX**: Functional requirement from spec.md +- Include exact file paths in descriptions + +## AI-Native Execution Guide + +### Official Documentation (Query via Context7 MCP) + +**Phase 1-2: Backend Setup** +- FastAPI: `/tiangolo/fastapi` topics: "async routes", "dependencies", "background tasks", "lifespan" +- SQLModel: `/tiangolo/sqlmodel` topics: "models", "relationships", "indexes" +- APScheduler: Official docs at https://apscheduler.readthedocs.io + +**Phase 3-4: Frontend** +- Next.js: `/vercel/next.js` topics: "app router", "client components", "use client" +- shadcn/ui: `/shadcn/ui` topics: "dropdown-menu", "badge", "button" + +### Skills to Use (from `.claude/skills/engineering/`) + +**Required Skills for This Feature:** +- **`fastapi-backend`** - Production FastAPI patterns, async session handling +- **`sqlmodel-database`** - SQLModel entities, relationships, indexes +- **`nextjs-16`** - Next.js 16 App Router patterns +- **`shadcn-ui`** - UI components with Tailwind + +**Optional Skills (for troubleshooting):** +- **`mcp-builder`** - If adding MCP tools for agent parity +- **`better-auth-sso`** - If auth integration needed for notification endpoints + +### Implementation Pattern (For Each Task) +1. Query relevant official docs via Context7 (NEVER skip) +2. Review plan.md section for architecture decisions +3. Check spec.md for functional requirements +4. Implement using official patterns +5. Verify with acceptance criteria +6. Mark task complete with checkbox + +--- + +## Phase 1: Setup (4 tasks, 10 min) + +**Purpose**: Configuration and project initialization + +**Acceptance Criteria**: +- [ ] Config settings added for CRON_ENABLED, DAPR_ENABLED, NOTIFICATION_RETENTION_DAYS + ```bash + grep -E "(CRON_ENABLED|DAPR_ENABLED|NOTIFICATION_RETENTION)" packages/api/src/taskflow_api/config.py + # Expected: 3 config variables defined + ``` +- [ ] APScheduler dependency added to pyproject.toml + ```bash + grep "apscheduler" packages/api/pyproject.toml + # Expected: apscheduler in dependencies + ``` + +**Tasks**: +- [ ] T001 [P] Add APScheduler dependency to packages/api/pyproject.toml +- [ ] T002 [P] Add CRON_ENABLED config to packages/api/src/taskflow_api/config.py +- [ ] T003 [P] Add DAPR_ENABLED and DAPR_HTTP_ENDPOINT config to packages/api/src/taskflow_api/config.py +- [ ] T004 [P] Add NOTIFICATION_RETENTION_DAYS config to packages/api/src/taskflow_api/config.py + +--- + +## Phase 2: Foundational (8 tasks, 25 min) + +**Purpose**: Core infrastructure that MUST be complete before ANY user story + +**FRs**: FR-009 (notification storage) + +**⚠️ CRITICAL**: No user story work can begin until this phase is complete + +**Acceptance Criteria**: +- [ ] Notification model created with all required fields + ```bash + grep -E "class Notification" packages/api/src/taskflow_api/models/notification.py + # Expected: Notification class definition + ``` +- [ ] Notification model imported in __init__.py + ```bash + grep "Notification" packages/api/src/taskflow_api/models/__init__.py + # Expected: Notification import + ``` +- [ ] Event service created with publish_event function + ```bash + grep "async def publish_event" packages/api/src/taskflow_api/services/events.py + # Expected: publish_event function + ``` + +**Tasks**: +- [ ] T005 [P] FR-009: Create Notification SQLModel in packages/api/src/taskflow_api/models/notification.py +- [ ] T006 [P] FR-009: Add indexes to Notification model (user_id, task_id, created_at, composite) +- [ ] T007 FR-009: Add Notification import to packages/api/src/taskflow_api/models/__init__.py +- [ ] T008 [P] Create NotificationCreate schema in packages/api/src/taskflow_api/schemas/notification.py +- [ ] T009 [P] Create NotificationRead schema in packages/api/src/taskflow_api/schemas/notification.py +- [ ] T010 [P] Create NotificationUpdate schema in packages/api/src/taskflow_api/schemas/notification.py +- [ ] T011 [P] FR-021/FR-022: Create event service in packages/api/src/taskflow_api/services/events.py +- [ ] T012 FR-021: Add graceful degradation (try/except) to publish_event function + +**Checkpoint**: Foundation ready - user story implementation can now begin + +--- + +## Phase 3: User Story 1 - Recurring Task Spawns on Due Date (Priority: P1) 🎯 MVP + +**Goal**: Fix the critical bug where `recurrence_trigger=on_due_date` silently fails + +**FRs**: FR-001, FR-002, FR-003, FR-004, FR-005 + +**Independent Test**: Create recurring task with `recurrence_trigger=on_due_date`, wait for due date to pass, verify new task spawned +```bash +# Test command +curl -X POST http://localhost:8000/api/cron/process-recurring-tasks +# Expected: {"spawned": N, "skipped": M} +``` + +**Acceptance Criteria**: +- [ ] Cron router created with process_recurring_tasks endpoint + ```bash + grep "process_recurring_tasks" packages/api/src/taskflow_api/routers/cron.py + # Expected: Function definition + ``` +- [ ] APScheduler initialized in main.py lifespan + ```bash + grep "AsyncIOScheduler" packages/api/src/taskflow_api/main.py + # Expected: Scheduler setup in lifespan + ``` +- [ ] Row-level locking implemented (SELECT FOR UPDATE SKIP LOCKED) + ```bash + grep -i "skip_locked" packages/api/src/taskflow_api/routers/cron.py + # Expected: with_for_update(skip_locked=True) + ``` + +### Implementation for User Story 1 (14 tasks, 45 min) + +- [ ] T013 [P] [US1] FR-004: Create cron router file packages/api/src/taskflow_api/routers/cron.py +- [ ] T014 [US1] FR-001: Implement process_recurring_tasks() query with recurrence_trigger filter +- [ ] T015 [US1] FR-002: Add SELECT FOR UPDATE SKIP LOCKED for row-level locking +- [ ] T016 [US1] FR-003: Add max_occurrences check before spawning (count existing occurrences) +- [ ] T017 [US1] FR-005: Implement calculate_next_due() for pattern-based date calculation +- [ ] T018 [US1] FR-001: Create spawn_next_occurrence() function with database transaction +- [ ] T019 [US1] FR-001: Copy recurring task fields to new task (title, description, pattern, trigger, etc.) +- [ ] T020 [US1] Clone subtasks if clone_subtasks_on_recur=True (reuse Agent 2A logic) +- [ ] T021 [US1] FR-002: Set has_spawned_next=True on original task (idempotency) +- [ ] T022 [US1] Create audit entry for spawned task (action: task.spawned_recurring) +- [ ] T023 [P] [US1] FR-004: Add APScheduler initialization to packages/api/src/taskflow_api/main.py lifespan +- [ ] T024 [US1] FR-004: Schedule process_recurring_tasks job every 60 seconds +- [ ] T025 [US1] Import and include cron router in packages/api/src/taskflow_api/main.py +- [ ] T026 [US1] Add cron router to packages/api/src/taskflow_api/routers/__init__.py + +**Checkpoint**: Recurring task spawn on due date is now functional. Test with: +```bash +# Create task with recurrence_trigger=on_due_date and past due_date +# Run: curl -X POST http://localhost:8000/api/cron/process-recurring-tasks +# Verify: New task created, original has has_spawned_next=True +``` + +--- + +## Phase 4: User Story 2 - Task Assignment Notification (Priority: P2) + +**Goal**: Notify users when they are assigned a task + +**FRs**: FR-010, FR-011, FR-012, FR-013 + +**Independent Test**: Assign task to user, verify notification appears in their list +```bash +# Test: Call assignment endpoint, then check notifications +curl http://localhost:8000/api/notifications?user_id=@test-user +# Expected: notification with type="task_assigned" +``` + +**Acceptance Criteria**: +- [ ] Notifications router created with list, unread-count, mark-read endpoints + ```bash + grep -E "(list_notifications|unread_count|mark_read)" packages/api/src/taskflow_api/routers/notifications.py + # Expected: 3 function definitions + ``` +- [ ] Assignment hook added to tasks.py + ```bash + grep "task_assigned" packages/api/src/taskflow_api/routers/tasks.py + # Expected: Notification creation in assign endpoint + ``` + +### Implementation for User Story 2 (10 tasks, 30 min) + +- [ ] T027 [P] [US2] FR-010: Create notifications router packages/api/src/taskflow_api/routers/notifications.py +- [ ] T028 [US2] FR-010: Implement GET /api/notifications endpoint (list filtered by user) +- [ ] T029 [US2] FR-011: Implement GET /api/notifications/unread-count endpoint +- [ ] T030 [US2] FR-012: Implement PATCH /api/notifications/{id}/read endpoint +- [ ] T031 [US2] Import and include notifications router in packages/api/src/taskflow_api/main.py +- [ ] T032 [US2] Add notifications router to packages/api/src/taskflow_api/routers/__init__.py +- [ ] T033 [US2] FR-013: Add notification creation to assign_task() in packages/api/src/taskflow_api/routers/tasks.py +- [ ] T034 [US2] FR-013: Add publish_event("task.assigned") call after assignment +- [ ] T035 [US2] FR-013: Create notification with type="task_assigned", title="Task assigned to you" +- [ ] T036 [US2] Skip notification if assignee has no user_id (edge case from spec) + +**Checkpoint**: Assignment notifications working. Test with task assignment, verify notification created. + +--- + +## Phase 5: User Story 3 - Due Date Reminder (Priority: P2) + +**Goal**: Send reminder notifications for tasks due within 24 hours + +**FRs**: FR-006, FR-007, FR-008, FR-016 + +**Independent Test**: Create task due in 23 hours, run cron, verify reminder notification sent +```bash +curl -X POST http://localhost:8000/api/cron/send-reminders +# Expected: {"sent": N} +``` + +**Acceptance Criteria**: +- [ ] send_reminders function implemented in cron.py + ```bash + grep "send_reminders" packages/api/src/taskflow_api/routers/cron.py + # Expected: Function definition + ``` +- [ ] reminder_sent flag updated after sending + ```bash + grep "reminder_sent = True" packages/api/src/taskflow_api/routers/cron.py + # Expected: Flag update + ``` + +### Implementation for User Story 3 (8 tasks, 20 min) + +- [ ] T037 [US3] FR-006: Implement send_reminders() function in packages/api/src/taskflow_api/routers/cron.py +- [ ] T038 [US3] FR-006: Query tasks due within 24 hours with reminder_sent=False +- [ ] T039 [US3] FR-008: Filter out completed, cancelled, and unassigned tasks +- [ ] T040 [US3] FR-006: Create notification with type="task_reminder", title="Task due in X hours" +- [ ] T041 [US3] FR-007: Set task.reminder_sent=True after sending (idempotency) +- [ ] T042 [US3] FR-016: Add publish_event("task.reminder") call +- [ ] T043 [US3] FR-004: Schedule send_reminders job in APScheduler (every 60 seconds) +- [ ] T044 [US3] Create audit entry for reminder sent (action: task.reminder_sent) + +**Checkpoint**: Reminders working. Test with task due within 24h, run cron, verify notification. + +--- + +## Phase 6: User Story 4 - Notification Bell in UI (Priority: P3) + +**Goal**: Display notification bell with unread count in frontend header + +**FRs**: FR-017, FR-018, FR-019, FR-020 + +**Independent Test**: Create notifications for user, verify bell shows count and dropdown works +```bash +# Manual test in browser - check bell icon shows unread count +``` + +**Acceptance Criteria**: +- [ ] NotificationBell component created + ```bash + ls web-dashboard/src/components/NotificationBell.tsx + # Expected: File exists + ``` +- [ ] Bell added to Header component + ```bash + grep "NotificationBell" web-dashboard/src/components/layout/Header.tsx + # Expected: Component imported and used + ``` + +### Implementation for User Story 4 (10 tasks, 25 min) + +- [ ] T045 [P] [US4] Create TypeScript types in web-dashboard/src/types/notification.ts +- [ ] T046 [P] [US4] FR-020: Add notification API functions to web-dashboard/src/lib/api.ts +- [ ] T047 [US4] FR-019: Create NotificationItem component in web-dashboard/src/components/NotificationItem.tsx +- [ ] T048 [US4] FR-017: Create NotificationBell component in web-dashboard/src/components/NotificationBell.tsx +- [ ] T049 [US4] FR-018: Add unread count badge to NotificationBell (show count, "9+" for 10+) +- [ ] T050 [US4] FR-019: Add dropdown menu with notification list (10 most recent) +- [ ] T051 [US4] FR-020: Add 30-second polling with useEffect in NotificationBell +- [ ] T052 [US4] Add click handler to mark notification as read +- [ ] T053 [US4] Add "View" link to navigate to task detail page +- [ ] T054 [US4] FR-017: Add NotificationBell to Header in web-dashboard/src/components/layout/Header.tsx + +**Checkpoint**: Frontend bell working. Test in browser - bell shows count, dropdown displays notifications. + +--- + +## Phase 7: User Story 5 - Task Completion Notification (Priority: P3) + +**Goal**: Notify task creator when someone else completes their task + +**FRs**: FR-014 + +**Independent Test**: User B completes task created by User A, verify User A gets notification + +**Acceptance Criteria**: +- [ ] Completion notification logic added to tasks.py + ```bash + grep "task_completed" packages/api/src/taskflow_api/routers/tasks.py + # Expected: Notification creation in completion logic + ``` + +### Implementation for User Story 5 (4 tasks, 10 min) + +- [ ] T055 [US5] FR-014: Add notification hook to completion logic in packages/api/src/taskflow_api/routers/tasks.py +- [ ] T056 [US5] FR-014: Check if completer != creator before sending notification +- [ ] T057 [US5] FR-014: Create notification with type="task_completed", title="Task completed" +- [ ] T058 [US5] FR-014: Add publish_event("task.completed") call + +**Checkpoint**: Completion notifications working. Test with different creator/completer. + +--- + +## Phase 8: User Story 6 - Recurring Task Spawn Notification (Priority: P3) + +**Goal**: Notify assignee when recurring task spawns next occurrence + +**FRs**: FR-015 + +**Independent Test**: Complete recurring task, verify assignee of new task gets notification + +**Acceptance Criteria**: +- [ ] Spawn notification added to cron handler + ```bash + grep "task_spawned" packages/api/src/taskflow_api/routers/cron.py + # Expected: Notification creation in spawn logic + ``` + +### Implementation for User Story 6 (4 tasks, 10 min) + +- [ ] T059 [US6] FR-015: Add notification creation to spawn_next_occurrence() in cron.py +- [ ] T060 [US6] FR-015: Create notification with type="task_spawned", title="Recurring task created" +- [ ] T061 [US6] FR-015: Add publish_event("task.spawned") call +- [ ] T062 [US6] Skip notification if new task has no assignee + +**Checkpoint**: Spawn notifications working. Test with recurring task completion/due date. + +--- + +## Phase 9: Polish & Validation (ongoing) + +**Purpose**: Final validation and cleanup + +**Acceptance Criteria**: +- [ ] All 7 success criteria from spec validated + ```bash + # SC-001: on_due_date triggers spawn + # SC-002: Reminders within 5 minutes + # SC-003: Assignment notifications <5s + # SC-004: Bell updates within 30s + # SC-005: System works without Dapr + # SC-006: Notification API <1s + # SC-007: No duplicate notifications + ``` + +**Tasks** (no task IDs - validation checklist): +- [ ] Verify SC-001: Create recurring task with on_due_date, verify spawn +- [ ] Verify SC-002: Create task due in 23h, run cron, verify reminder +- [ ] Verify SC-003: Assign task, verify notification in <5s +- [ ] Verify SC-004: Create notification, verify bell updates in <30s +- [ ] Verify SC-005: Set DAPR_ENABLED=False, verify API still works +- [ ] Verify SC-006: Call notification endpoints, verify <1s response +- [ ] Verify SC-007: Run cron twice, verify no duplicate notifications + +--- + +## Dependencies & Execution Order + +### User Story Completion Order +``` +Setup → Foundation → US1 → US2 → US3 → US4 → US5 → US6 → Polish + ↘ US4 ↗ + (can parallel with US3) +``` + +### Critical Path (minimum time to completion) +- Setup: 10 min +- Foundation: 25 min +- US1: 45 min (P1 - critical bug fix) +- US2: 30 min +- US3: 20 min +- US4: 25 min (frontend can parallel with US3) +- US5: 10 min +- US6: 10 min +- Polish: ongoing + +**Total Critical Path**: ~175 min (2h 55min) + +### Parallel Opportunities +- **Phase 1**: All 4 setup tasks can run in parallel (different files) +- **Phase 2**: 6 of 8 foundational tasks can run in parallel +- **Phase 3-4**: US4 frontend can run in parallel with US2/US3 backend +- **Parallelizable**: 35 tasks (56% of total) + +### Within Each User Story +- Models/schemas before services +- Services before routers +- Backend before frontend integration +- Core implementation before edge cases + +--- + +## Implementation Strategy + +### MVP Scope (US1 Only) +**Critical Path**: Setup → Foundation → US1 +**Time**: ~80 min +**Outcome**: Critical bug fixed - on_due_date trigger works + +### Quick Win: US1 + US2 +**Add**: Assignment notifications +**Time**: ~110 min +**Outcome**: Bug fix + immediate productivity improvement + +### Full Feature +**Add**: US3, US4, US5, US6 +**Time**: ~175 min total +**Outcome**: Complete notification system with frontend bell + +### Recommended Execution +1. Complete Setup + Foundation (35 min) → Validate config +2. Complete US1 (45 min) → **TEST: on_due_date spawn works** +3. Complete US2 (30 min) → **TEST: assignment notifications work** +4. Complete US3 (20 min) → **TEST: reminders work** +5. Complete US4 (25 min) → **TEST: frontend bell works** +6. Complete US5 + US6 (20 min) → **TEST: completion/spawn notifications** +7. Run validation checklist + +--- + +## FR Traceability Matrix + +| FR | Description | Tasks | User Story | +|----|-------------|-------|------------| +| FR-001 | Spawn on due_date with transaction | T014, T018, T019, T021 | US1 | +| FR-002 | No duplicate spawns | T015, T021 | US1 | +| FR-003 | Respect max_occurrences | T016 | US1 | +| FR-004 | Cron every 1 minute | T013, T023, T024, T043 | US1, US3 | +| FR-005 | Calculate next due date | T017 | US1 | +| FR-006 | Reminders for tasks due within 24h | T037, T038, T040 | US3 | +| FR-007 | Mark reminder_sent=True | T041 | US3 | +| FR-008 | Skip completed/unassigned | T039 | US3 | +| FR-009 | Store notifications | T005, T006, T007 | Foundation | +| FR-010 | List notifications API | T028 | US2 | +| FR-011 | Unread count API | T029 | US2 | +| FR-012 | Mark read API | T030 | US2 | +| FR-013 | Publish task.assigned | T033, T034, T035 | US2 | +| FR-014 | Publish task.completed | T055, T056, T057, T058 | US5 | +| FR-015 | Publish task.spawned | T059, T060, T061 | US6 | +| FR-016 | Publish task.reminder | T042 | US3 | +| FR-017 | Bell icon in header | T048, T054 | US4 | +| FR-018 | Unread count badge | T049 | US4 | +| FR-019 | Notification dropdown | T047, T050 | US4 | +| FR-020 | Poll every 30s | T046, T051 | US4 | +| FR-021 | Continue if Dapr unavailable | T011, T012 | Foundation | +| FR-022 | Log failed publishes | T012 | Foundation | + +--- + +## Notes + +- [P] tasks = different files, no dependencies +- [Story] label maps task to specific user story for traceability +- Each user story should be independently completable and testable +- Commit after each phase or logical group +- Stop at any checkpoint to validate story independently +- All FRs from spec.md are covered by tasks diff --git a/sso-platform/src/lib/auth.ts b/sso-platform/src/lib/auth.ts index 6d75726..6de2a71 100644 --- a/sso-platform/src/lib/auth.ts +++ b/sso-platform/src/lib/auth.ts @@ -691,6 +691,96 @@ export const auth = betterAuth({ organization({ // Allow any user to create organizations (can be restricted later) allowUserToCreateOrganization: true, + // Send invitation emails when members are invited to organizations + sendInvitationEmail: async ({ email, organization, inviter, invitation }) => { + const appName = process.env.NEXT_PUBLIC_APP_NAME || "Taskflow SSO"; + const appDescription = process.env.NEXT_PUBLIC_APP_DESCRIPTION || "Secure Single Sign-On"; + const orgNameEnv = process.env.NEXT_PUBLIC_ORG_NAME || "Taskflow"; + + // Build invitation URL from invitation ID + const baseUrl = process.env.BETTER_AUTH_URL || "http://localhost:3001"; + const inviteUrl = `${baseUrl}/auth/accept-invitation?token=${invitation.id}`; + + // Get inviter name from the nested user object + const inviterName = inviter.user?.name || inviter.user?.email || "Someone"; + + await sendEmail({ + to: email, + subject: `You've been invited to join ${organization.name}`, + html: ` + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + +
+

${appName}

+

${appDescription}

+
+

You're Invited! 🎉

+

+ ${inviterName} has invited you to join ${organization.name} on ${appName}. +

+ + + + + + +
+ Accept Invitation +
+ +

+ Or copy and paste this link into your browser: +

+

+ ${inviteUrl} +

+ +
+

+ 🔐 Secure Access: Once you accept, you'll have access to ${organization.name}'s resources and projects. +

+
+
+

+ This is an automated message from ${appName} +

+

+ Secure authentication for your ${orgNameEnv} applications +

+
+
+ + + `, + }); + }, }), // Username plugin - Adds username fields for user profiles diff --git a/web-dashboard/package.json b/web-dashboard/package.json index 14e96cd..87b2165 100644 --- a/web-dashboard/package.json +++ b/web-dashboard/package.json @@ -24,6 +24,7 @@ "better-auth": "^1.4.5", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", + "date-fns": "^4.1.0", "jose": "^6.1.3", "lucide-react": "^0.556.0", "next": "16.0.7", diff --git a/web-dashboard/pnpm-lock.yaml b/web-dashboard/pnpm-lock.yaml index 04a754f..28f1ab3 100644 --- a/web-dashboard/pnpm-lock.yaml +++ b/web-dashboard/pnpm-lock.yaml @@ -53,6 +53,9 @@ importers: clsx: specifier: ^2.1.1 version: 2.1.1 + date-fns: + specifier: ^4.1.0 + version: 4.1.0 jose: specifier: ^6.1.3 version: 6.1.3 @@ -1399,6 +1402,9 @@ packages: resolution: {integrity: sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==} engines: {node: '>= 0.4'} + date-fns@4.1.0: + resolution: {integrity: sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==} + debug@3.2.7: resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} peerDependencies: @@ -3882,6 +3888,8 @@ snapshots: es-errors: 1.3.0 is-data-view: 1.0.2 + date-fns@4.1.0: {} + debug@3.2.7: dependencies: ms: 2.1.3 @@ -4039,8 +4047,8 @@ snapshots: '@next/eslint-plugin-next': 16.0.7 eslint: 9.39.1(jiti@1.21.7) eslint-import-resolver-node: 0.3.9 - eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.1(jiti@1.21.7)))(eslint@9.39.1(jiti@1.21.7)) - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.1(jiti@1.21.7)))(eslint@9.39.1(jiti@1.21.7)))(eslint@9.39.1(jiti@1.21.7)) + eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@9.39.1(jiti@1.21.7)) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.39.1(jiti@1.21.7)) eslint-plugin-jsx-a11y: 6.10.2(eslint@9.39.1(jiti@1.21.7)) eslint-plugin-react: 7.37.5(eslint@9.39.1(jiti@1.21.7)) eslint-plugin-react-hooks: 7.0.1(eslint@9.39.1(jiti@1.21.7)) @@ -4062,7 +4070,7 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.1(jiti@1.21.7)))(eslint@9.39.1(jiti@1.21.7)): + eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0)(eslint@9.39.1(jiti@1.21.7)): dependencies: '@nolyfill/is-core-module': 1.0.39 debug: 4.4.3 @@ -4073,22 +4081,22 @@ snapshots: tinyglobby: 0.2.15 unrs-resolver: 1.11.1 optionalDependencies: - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.1(jiti@1.21.7)))(eslint@9.39.1(jiti@1.21.7)))(eslint@9.39.1(jiti@1.21.7)) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.39.1(jiti@1.21.7)) transitivePeerDependencies: - supports-color - eslint-module-utils@2.12.1(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.1(jiti@1.21.7)))(eslint@9.39.1(jiti@1.21.7)))(eslint@9.39.1(jiti@1.21.7)): + eslint-module-utils@2.12.1(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@9.39.1(jiti@1.21.7)): dependencies: debug: 3.2.7 optionalDependencies: '@typescript-eslint/parser': 8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3) eslint: 9.39.1(jiti@1.21.7) eslint-import-resolver-node: 0.3.9 - eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.1(jiti@1.21.7)))(eslint@9.39.1(jiti@1.21.7)) + eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@9.39.1(jiti@1.21.7)) transitivePeerDependencies: - supports-color - eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.1(jiti@1.21.7)))(eslint@9.39.1(jiti@1.21.7)))(eslint@9.39.1(jiti@1.21.7)): + eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.39.1(jiti@1.21.7)): dependencies: '@rtsao/scc': 1.1.0 array-includes: 3.1.9 @@ -4099,7 +4107,7 @@ snapshots: doctrine: 2.1.0 eslint: 9.39.1(jiti@1.21.7) eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.1(jiti@1.21.7)))(eslint@9.39.1(jiti@1.21.7)))(eslint@9.39.1(jiti@1.21.7)) + eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.48.1(eslint@9.39.1(jiti@1.21.7))(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@9.39.1(jiti@1.21.7)) hasown: 2.0.2 is-core-module: 2.16.1 is-glob: 4.0.3 diff --git a/web-dashboard/src/app/api/proxy/[...path]/route.ts b/web-dashboard/src/app/api/proxy/[...path]/route.ts index 36859ef..4069938 100644 --- a/web-dashboard/src/app/api/proxy/[...path]/route.ts +++ b/web-dashboard/src/app/api/proxy/[...path]/route.ts @@ -4,6 +4,20 @@ import { cookies } from "next/headers"; // SERVER_API_URL for Docker (container names), fallback to NEXT_PUBLIC for local dev const API_BASE = process.env.SERVER_API_URL || process.env.NEXT_PUBLIC_API_URL || "http://localhost:8000"; +// Notification Service URL - separate microservice +const NOTIFICATION_SERVICE_URL = process.env.SERVER_NOTIFICATION_URL || process.env.NEXT_PUBLIC_NOTIFICATION_URL || "http://localhost:8001"; + +// Routes that go to Notification Service instead of main API +const NOTIFICATION_ROUTES = ["notifications"]; + +function getTargetUrl(path: string[]): string { + // Check if this is a notification route + if (path.length > 0 && NOTIFICATION_ROUTES.includes(path[0])) { + return NOTIFICATION_SERVICE_URL; + } + return API_BASE; +} + async function proxyRequest(request: NextRequest, params: Promise<{ path: string[] }>) { const { path } = await params; const cookieStore = await cookies(); @@ -25,9 +39,14 @@ async function proxyRequest(request: NextRequest, params: Promise<{ path: string return NextResponse.json({ error: "Not authenticated" }, { status: 401 }); } + // Determine target service based on route + const targetBase = getTargetUrl(path); + // Build the target URL - prepend /api since backend expects /api/... const targetPath = "/api/" + path.join("/"); - const url = new URL(targetPath, API_BASE); + const url = new URL(targetPath, targetBase); + + console.log("[Proxy] Routing to:", url.toString()); // Copy query params request.nextUrl.searchParams.forEach((value, key) => { diff --git a/web-dashboard/src/components/NotificationBell.tsx b/web-dashboard/src/components/NotificationBell.tsx new file mode 100644 index 0000000..beb1191 --- /dev/null +++ b/web-dashboard/src/components/NotificationBell.tsx @@ -0,0 +1,205 @@ +"use client"; + +import { useState, useEffect, useCallback } from "react"; +import { Bell, Check, CheckCheck, Clock, User, RefreshCw } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { Badge } from "@/components/ui/badge"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuTrigger, + DropdownMenuSeparator, +} from "@/components/ui/dropdown-menu"; +import { api } from "@/lib/api"; +import { Notification } from "@/types/notification"; +import { formatDistanceToNow } from "date-fns"; + +// Notification type to icon mapping +function getNotificationIcon(type: string) { + switch (type) { + case "task_assigned": + return ; + case "task_completed": + return ; + case "task_spawned": + return ; + case "task_reminder": + return ; + default: + return ; + } +} + +export function NotificationBell() { + const [notifications, setNotifications] = useState([]); + const [unreadCount, setUnreadCount] = useState(0); + const [isOpen, setIsOpen] = useState(false); + const [isLoading, setIsLoading] = useState(false); + + // Fetch unread count (on mount and when dropdown closes) + const fetchUnreadCount = useCallback(async () => { + try { + const result = await api.getUnreadNotificationCount(); + setUnreadCount(result.count); + } catch (error) { + console.error("Failed to fetch notification count:", error); + } + }, []); + + // Fetch notifications (when dropdown opens) + const fetchNotifications = useCallback(async () => { + setIsLoading(true); + try { + const result = await api.getNotifications(false, 20); + setNotifications(result); + // Also update unread count + const countResult = await api.getUnreadNotificationCount(); + setUnreadCount(countResult.count); + } catch (error) { + console.error("Failed to fetch notifications:", error); + } finally { + setIsLoading(false); + } + }, []); + + // Mark single notification as read + const handleMarkRead = async (notificationId: number) => { + try { + await api.markNotificationRead(notificationId, true); + // Update local state + setNotifications((prev) => + prev.map((n) => (n.id === notificationId ? { ...n, read: true } : n)) + ); + setUnreadCount((prev) => Math.max(0, prev - 1)); + } catch (error) { + console.error("Failed to mark notification read:", error); + } + }; + + // Mark all notifications as read + const handleMarkAllRead = async () => { + try { + await api.markAllNotificationsRead(); + setNotifications((prev) => prev.map((n) => ({ ...n, read: true }))); + setUnreadCount(0); + } catch (error) { + console.error("Failed to mark all notifications read:", error); + } + }; + + // Fetch unread count on mount + useEffect(() => { + fetchUnreadCount(); + }, [fetchUnreadCount]); + + // Fetch full notifications when dropdown opens + useEffect(() => { + if (isOpen) { + fetchNotifications(); + } + }, [isOpen, fetchNotifications]); + + return ( + + + + + + {/* Header */} +
+ Notifications + {unreadCount > 0 && ( + + )} +
+ + {/* Notification list */} +
+ {isLoading ? ( +
+ Loading... +
+ ) : notifications.length === 0 ? ( +
+ No notifications +
+ ) : ( + notifications.map((notification) => ( +
{ + if (!notification.read) { + handleMarkRead(notification.id); + } + // TODO: Navigate to task if task_id exists + }} + > +
+
+ {getNotificationIcon(notification.type)} +
+
+
+ + {notification.title} + + {!notification.read && ( + + )} +
+

+ {notification.body} +

+

+ {formatDistanceToNow( + new Date( + notification.created_at.endsWith("Z") + ? notification.created_at + : notification.created_at + "Z" + ), + { addSuffix: true } + )} +

+
+
+
+ )) + )} +
+ + {/* Footer */} + {notifications.length > 0 && ( + <> + +
+ +
+ + )} +
+
+ ); +} diff --git a/web-dashboard/src/components/layout/header.tsx b/web-dashboard/src/components/layout/header.tsx index 3503583..e6b9945 100644 --- a/web-dashboard/src/components/layout/header.tsx +++ b/web-dashboard/src/components/layout/header.tsx @@ -13,6 +13,7 @@ import { import { Avatar, AvatarFallback } from "@/components/ui/avatar" import { Badge } from "@/components/ui/badge" import { OrgSwitcher } from "@/components/OrgSwitcher" +import { NotificationBell } from "@/components/NotificationBell" import { LogOut, User, Settings, Moon, Sun } from "lucide-react" import { useState, useEffect } from "react" @@ -59,6 +60,9 @@ export function Header() { {/* Organization Switcher */} + {/* Notification Bell */} + + {/* Theme toggle */}