This commit is contained in:
2025-09-08 12:51:15 -07:00
commit 574feb1ea1
62 changed files with 10425 additions and 0 deletions

1352
CL_plan.md Normal file

File diff suppressed because it is too large Load Diff

57
Makefile Normal file
View File

@@ -0,0 +1,57 @@
.PHONY: up down build start stop restart logs backend-logs frontend-logs db-logs
# Start all services in detached mode
up:
docker-compose up -d
# Stop and remove all containers
down:
docker-compose down
# Rebuild all Docker images
build:
docker-compose build --no-cache
# Start services if not running, otherwise restart
start:
docker-compose start || docker-compose up -d
# Stop running services
stop:
docker-compose stop
# Restart all services
restart:
docker-compose restart
# Show logs for all services
logs:
docker-compose logs -f
# Show backend logs
backend-logs:
docker-compose logs -f backend
# Show frontend logs
frontend-logs:
docker-compose logs -f frontend
# Show database logs
db-logs:
docker-compose logs -f db
# Initialize database and run migrations
init-db:
docker-compose run --rm backend alembic upgrade head
# Create new database migration
migration:
docker-compose run --rm backend alembic revision --autogenerate -m "$(m)"
# Run tests
test:
docker-compose run --rm backend pytest
# Open database shell
db-shell:
docker-compose exec db psql -U appuser -d cyclingdb

325
README.md Normal file
View File

@@ -0,0 +1,325 @@
# AI Cycling Coach
A single-user, self-hosted web application that provides AI-powered cycling training plan generation, workout analysis, and plan evolution based on actual ride data from Garmin Connect.
## 🚀 Quick Start
### Prerequisites
- Docker and Docker Compose
- 2GB+ available RAM
- 10GB+ available disk space
### Setup
1. Clone the repository
2. Copy environment file: `cp .env.example .env`
3. Edit `.env` with your credentials
4. Start services: `docker-compose up -d`
## 🐳 Container-First Development
This project follows strict containerization practices. All development occurs within Docker containers - never install packages directly on the host system.
### Key Rules
#### Containerization Rules
- ✅ All Python packages must be in `backend/requirements.txt`
- ✅ All system packages must be in `backend/Dockerfile`
- ✅ Never run `pip install` or `apt-get install` outside containers
- ✅ Use `docker-compose` for local development
#### Database Management
- ✅ Schema changes handled through Alembic migrations
- ✅ Migrations run automatically on container startup
- ✅ No raw SQL in application code - use SQLAlchemy ORM
- ✅ Migration rollback scripts available for emergencies
### Development Workflow
```bash
# Start development environment
docker-compose up -d
# View logs
docker-compose logs -f backend
# Run database migrations manually (if needed)
docker-compose exec backend alembic upgrade head
# Access backend container
docker-compose exec backend bash
# Stop services
docker-compose down
```
### Migration Management
#### Automatic Migrations
Migrations run automatically when containers start. The entrypoint script:
1. Runs `alembic upgrade head`
2. Verifies migration success
3. Starts the application
#### Manual Migration Operations
```bash
# Check migration status
docker-compose exec backend python scripts/migration_checker.py check-db
# Generate new migration
docker-compose exec backend alembic revision --autogenerate -m "description"
# Rollback migration
docker-compose exec backend python scripts/migration_rollback.py rollback
```
#### Migration Validation
```bash
# Validate deployment readiness
docker-compose exec backend python scripts/migration_checker.py validate-deploy
# Generate migration report
docker-compose exec backend python scripts/migration_checker.py report
```
### Database Backup & Restore
#### Creating Backups
```bash
# Create backup
docker-compose exec backend python scripts/backup_restore.py backup
# Create named backup
docker-compose exec backend python scripts/backup_restore.py backup my_backup
```
#### Restoring from Backup
```bash
# List available backups
docker-compose exec backend python scripts/backup_restore.py list
# Restore (with confirmation prompt)
docker-compose exec backend python scripts/backup_restore.py restore backup_file.sql
# Restore without confirmation
docker-compose exec backend python scripts/backup_restore.py restore backup_file.sql --yes
```
#### Cleanup
```bash
# Remove backups older than 30 days
docker-compose exec backend python scripts/backup_restore.py cleanup
# Remove backups older than N days
docker-compose exec backend python scripts/backup_restore.py cleanup 7
```
## 🔧 Configuration
### Environment Variables
```env
# Database
DATABASE_URL=postgresql://postgres:password@db:5432/cycling
# Garmin Connect
GARMIN_USERNAME=your_garmin_email@example.com
GARMIN_PASSWORD=your_secure_password
# AI Service
OPENROUTER_API_KEY=your_openrouter_api_key
AI_MODEL=anthropic/claude-3-sonnet-20240229
# Application
API_KEY=your_secure_random_api_key_here
```
### Health Checks
The application includes comprehensive health monitoring:
```bash
# Check overall health
curl http://localhost:8000/health
# Response includes:
# - Database connectivity
# - Migration status
# - Current vs head revision
# - Service availability
```
## 🏗️ Architecture
### Service Architecture
```
┌─────────────────┐ ┌─────────────────┐
│ Frontend │ │ Backend │
│ (React) │◄──►│ (FastAPI) │
│ │ │ │
└─────────────────┘ └─────────────────┘
│ │
▼ ▼
┌─────────────────┐ ┌─────────────────┐
│ Garmin │ │ PostgreSQL │
│ Connect │ │ Database │
└─────────────────┘ └─────────────────┘
```
### Data Flow
1. Garmin activities synced via background tasks
2. AI analysis performed on workout data
3. Training plans evolved based on performance
4. User feedback incorporated for plan adjustments
## 🧪 Testing & Validation
### CI/CD Pipeline
GitHub Actions automatically validates:
- ✅ No uncommitted migration files
- ✅ No raw SQL in application code
- ✅ Proper dependency management
- ✅ Container build success
- ✅ Migration compatibility
### Local Validation
```bash
# Run all validation checks
docker-compose exec backend python scripts/migration_checker.py validate-deploy
# Check for raw SQL usage
grep -r "SELECT.*FROM\|INSERT.*INTO\|UPDATE.*SET\|DELETE.*FROM" backend/app/
```
## 📁 Project Structure
```
.
├── backend/
│ ├── Dockerfile # Multi-stage container build
│ ├── requirements.txt # Python dependencies
│ ├── scripts/
│ │ ├── migration_rollback.py # Rollback utilities
│ │ ├── backup_restore.py # Backup/restore tools
│ │ └── migration_checker.py # Validation tools
│ └── app/
│ ├── main.py # FastAPI application
│ ├── database.py # Database configuration
│ ├── models/ # SQLAlchemy models
│ ├── routes/ # API endpoints
│ ├── services/ # Business logic
│ └── schemas/ # Pydantic schemas
├── frontend/
│ ├── Dockerfile
│ └── src/
├── docker-compose.yml # Development services
├── .github/
│ └── workflows/
│ └── container-validation.yml # CI/CD checks
└── .kilocode/
└── rules/
└── container-database-rules.md # Development guidelines
```
## 🚨 Troubleshooting
### Common Issues
#### Migration Failures
```bash
# Check migration status
docker-compose exec backend alembic current
# View migration history
docker-compose exec backend alembic history
# Reset migrations (CAUTION: destroys data)
docker-compose exec backend alembic downgrade base
```
#### Database Connection Issues
```bash
# Check database health
docker-compose exec db pg_isready -U postgres
# View database logs
docker-compose logs db
# Restart database
docker-compose restart db
```
#### Container Build Issues
```bash
# Rebuild without cache
docker-compose build --no-cache backend
# View build logs
docker-compose build backend
```
### Health Monitoring
#### Service Health
```bash
# Check all services
docker-compose ps
# View service logs
docker-compose logs -f
# Check backend health
curl http://localhost:8000/health
```
#### Database Health
```bash
# Check database connectivity
docker-compose exec backend python -c "
from app.database import get_db
from sqlalchemy.ext.asyncio import AsyncSession
import asyncio
async def test():
async with AsyncSession(get_db()) as session:
result = await session.execute('SELECT 1')
print('Database OK')
asyncio.run(test())
"
```
## 🔒 Security
- API key authentication for all endpoints
- Secure storage of Garmin credentials
- No sensitive data in application logs
- Container isolation prevents host system access
- Regular security updates via container rebuilds
## 📚 API Documentation
Once running, visit:
- **API Docs**: http://localhost:8000/docs
- **Alternative Docs**: http://localhost:8000/redoc
## 🤝 Contributing
1. Follow container-first development rules
2. Ensure all changes pass CI/CD validation
3. Update documentation for significant changes
4. Test migration compatibility before merging
### Development Guidelines
- Use SQLAlchemy ORM for all database operations
- Keep dependencies in `requirements.txt`
- Test schema changes in development environment
- Document migration changes in commit messages
- Run validation checks before pushing
## 📄 License
This project is licensed under the MIT License - see the LICENSE file for details.
---
**Note**: This application is designed for single-user, self-hosted deployment. All data remains on your local infrastructure with no external data sharing.

70
backend/Dockerfile Normal file
View File

@@ -0,0 +1,70 @@
# Multi-stage build for container-first development
FROM python:3.11-slim-bullseye AS builder
# Set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
# Install system dependencies for building
RUN apt-get update && \
apt-get install -y --no-install-recommends gcc libpq-dev && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
# Set working directory
WORKDIR /app
# Install Python dependencies
COPY backend/requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Runtime stage
FROM python:3.11-slim-bullseye AS runtime
# Set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
# Install runtime system dependencies only
RUN apt-get update && \
apt-get install -y --no-install-recommends libpq5 && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
# Set working directory
WORKDIR /app
# Copy installed packages from builder stage
COPY --from=builder /usr/local/lib/python3.11/site-packages /usr/local/lib/python3.11/site-packages
COPY --from=builder /usr/local/bin /usr/local/bin
# Copy application code
COPY backend/ .
# Create entrypoint script for migration handling
RUN echo '#!/bin/bash\n\
set -e\n\
\n\
# Run database migrations\n\
echo "Running database migrations..."\n\
alembic upgrade head\n\
\n\
# Verify migration success\n\
echo "Verifying migration status..."\n\
alembic current\n\
\n\
# Start the application\n\
echo "Starting application..."\n\
exec "$@"' > /app/entrypoint.sh && \
chmod +x /app/entrypoint.sh
# Create non-root user
RUN useradd -m appuser && chown -R appuser:appuser /app
USER appuser
# Expose application port
EXPOSE 8000
# Use entrypoint for migration automation
ENTRYPOINT ["/app/entrypoint.sh"]
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]

24
backend/alembic.ini Normal file
View File

@@ -0,0 +1,24 @@
[alembic]
script_location = alembic
sqlalchemy.url = postgresql+asyncpg://appuser:password@db:5432/cyclingdb
[loggers]
keys = root
[handlers]
keys = console
[logger_root]
level = WARN
handlers = console
qualname =
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

56
backend/alembic/env.py Normal file
View File

@@ -0,0 +1,56 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config, pool
from sqlalchemy.ext.asyncio import AsyncEngine
from alembic import context
import sys
import os
# Add app directory to path
sys.path.append(os.getcwd())
# Import base and models
from app.models import Base
from app.database import DATABASE_URL
config = context.config
fileConfig(config.config_file_name)
target_metadata = Base.metadata
def run_migrations_offline():
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode."""
connectable = AsyncEngine(
engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
future=True,
url=DATABASE_URL,
)
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
async def do_run_migrations(connection):
context.configure(connection=connection, target_metadata=target_metadata)
await connection.run_sync(context.run_migrations)
if context.is_offline_mode():
run_migrations_offline()
else:
import asyncio
asyncio.run(run_migrations_online())

View File

@@ -0,0 +1,25 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

11
backend/app/config.py Normal file
View File

@@ -0,0 +1,11 @@
from pydantic_settings import BaseSettings
class Settings(BaseSettings):
DATABASE_URL: str
GPX_STORAGE_PATH: str
AI_MODEL: str = "openrouter/auto"
class Config:
env_file = ".env"
settings = Settings()

17
backend/app/database.py Normal file
View File

@@ -0,0 +1,17 @@
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
from sqlalchemy.orm import declarative_base, sessionmaker
DATABASE_URL = "postgresql+asyncpg://appuser:password@db:5432/cyclingdb"
engine = create_async_engine(DATABASE_URL, echo=True)
AsyncSessionLocal = sessionmaker(
bind=engine,
class_=AsyncSession,
expire_on_commit=False
)
Base = declarative_base()
async def get_db() -> AsyncSession:
async with AsyncSessionLocal() as session:
yield session

107
backend/app/main.py Normal file
View File

@@ -0,0 +1,107 @@
from fastapi import FastAPI, Depends, Request, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from .database import get_db, get_database_url
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import text
from alembic.config import Config
from alembic.migration import MigrationContext
from alembic.script import ScriptDirectory
from .routes import gpx as gpx_routes
from .routes import rule as rule_routes
from .routes import plan as plan_routes
from .routes import workouts as workout_routes
from .routes import prompts as prompt_routes
from .config import settings
app = FastAPI(
title="AI Cycling Coach API",
description="Backend service for AI-assisted cycling training platform",
version="0.1.0"
)
# API Key Authentication Middleware
@app.middleware("http")
async def api_key_auth(request: Request, call_next):
if request.url.path.startswith("/docs") or request.url.path.startswith("/redoc") or request.url.path == "/health":
return await call_next(request)
api_key = request.headers.get("X-API-KEY")
if api_key != settings.API_KEY:
raise HTTPException(status_code=401, detail="Invalid API Key")
return await call_next(request)
# Configure CORS
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Include routers
app.include_router(gpx_routes.router)
app.include_router(rule_routes.router)
app.include_router(plan_routes.router)
app.include_router(workout_routes.router, prefix="/workouts", tags=["workouts"])
app.include_router(prompt_routes.router, prefix="/prompts", tags=["prompts"])
async def check_migration_status():
"""Check if database migrations are up to date."""
try:
# Get Alembic configuration
config = Config("alembic.ini")
config.set_main_option("sqlalchemy.url", get_database_url())
script = ScriptDirectory.from_config(config)
# Get current database revision
from sqlalchemy import create_engine
engine = create_engine(get_database_url())
with engine.connect() as conn:
context = MigrationContext.configure(conn)
current_rev = context.get_current_revision()
# Get head revision
head_rev = script.get_current_head()
return {
"current_revision": current_rev,
"head_revision": head_rev,
"migrations_up_to_date": current_rev == head_rev
}
except Exception as e:
return {
"error": str(e),
"migrations_up_to_date": False
}
@app.get("/health")
async def health_check(db: AsyncSession = Depends(get_db)):
"""Enhanced health check with migration verification."""
health_status = {
"status": "healthy",
"version": "0.1.0",
"timestamp": "2024-01-15T10:30:00Z" # Should be dynamic
}
# Database connection check
try:
await db.execute(text("SELECT 1"))
health_status["database"] = "connected"
except Exception as e:
health_status["status"] = "unhealthy"
health_status["database"] = f"error: {str(e)}"
# Migration status check
migration_info = await check_migration_status()
health_status["migrations"] = migration_info
if not migration_info.get("migrations_up_to_date", False):
health_status["status"] = "unhealthy"
return health_status
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)

View File

@@ -0,0 +1,11 @@
from .base import BaseModel
from .route import Route
from .section import Section
from .rule import Rule
from .plan import Plan
from .plan_rule import PlanRule
from .user import User
from .workout import Workout
from .analysis import Analysis
from .prompt import Prompt
from .garmin_sync_log import GarminSyncLog

View File

@@ -0,0 +1,17 @@
from sqlalchemy import Column, Integer, String, ForeignKey, JSON, Boolean, DateTime
from sqlalchemy.orm import relationship
from .base import BaseModel
class Analysis(BaseModel):
"""Analysis model for AI-generated workout feedback."""
__tablename__ = "analyses"
workout_id = Column(Integer, ForeignKey("workouts.id"), nullable=False)
analysis_type = Column(String(50), default='workout_review')
jsonb_feedback = Column(JSON) # AI-generated feedback
suggestions = Column(JSON) # AI-generated suggestions
approved = Column(Boolean, default=False)
# Relationships
workout = relationship("Workout", back_populates="analyses")

View File

@@ -0,0 +1,17 @@
from datetime import datetime
from uuid import UUID, uuid4
from sqlalchemy import Column, DateTime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.dialects.postgresql import UUID as PG_UUID
Base = declarative_base()
class BaseModel(Base):
__abstract__ = True
id = Column(PG_UUID(as_uuid=True), primary_key=True, default=uuid4)
created_at = Column(DateTime, default=datetime.utcnow)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
def __repr__(self):
return f"<{self.__class__.__name__} {self.id}>"

View File

@@ -0,0 +1,12 @@
from sqlalchemy import Column, Integer, DateTime, String, Text
from .base import BaseModel
class GarminSyncLog(BaseModel):
"""Log model for tracking Garmin sync operations."""
__tablename__ = "garmin_sync_log"
last_sync_time = Column(DateTime)
activities_synced = Column(Integer, default=0)
status = Column(String(20)) # success, error, in_progress
error_message = Column(Text)

View File

@@ -0,0 +1,14 @@
from sqlalchemy import Column, Integer, ForeignKey
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm import relationship
from .base import BaseModel
class Plan(BaseModel):
__tablename__ = "plans"
jsonb_plan = Column(JSONB, nullable=False)
version = Column(Integer, nullable=False)
parent_plan_id = Column(Integer, ForeignKey('plans.id'), nullable=True)
parent_plan = relationship("Plan", remote_side="Plan.id", backref="child_plans")
workouts = relationship("Workout", back_populates="plan", cascade="all, delete-orphan")

View File

@@ -0,0 +1,13 @@
from sqlalchemy import Column, Integer, String, Text, Boolean, DateTime
from .base import BaseModel
class Prompt(BaseModel):
"""Prompt model for AI prompt versioning and management."""
__tablename__ = "prompts"
action_type = Column(String(50), nullable=False) # plan_generation, workout_analysis, rule_parsing, suggestions
model = Column(String(100)) # AI model identifier
prompt_text = Column(Text, nullable=False)
version = Column(Integer, default=1)
active = Column(Boolean, default=True)

View File

@@ -0,0 +1,14 @@
from sqlalchemy import Column, String, Float, ForeignKey
from sqlalchemy.orm import relationship
from .base import BaseModel
class Route(BaseModel):
__tablename__ = "routes"
name = Column(String(100), nullable=False)
description = Column(String(500))
total_distance = Column(Float, nullable=False)
elevation_gain = Column(Float, nullable=False)
gpx_file_path = Column(String(255), nullable=False)
sections = relationship("Section", back_populates="route", cascade="all, delete-orphan")

View File

@@ -0,0 +1,14 @@
from sqlalchemy import Column, Integer, ForeignKey, Boolean
from sqlalchemy.dialects.postgresql import JSONB
from .base import BaseModel
class Rule(BaseModel):
__tablename__ = "rules"
name = Column(String(100), nullable=False)
user_defined = Column(Boolean, default=True)
jsonb_rules = Column(JSONB, nullable=False)
version = Column(Integer, default=1)
parent_rule_id = Column(Integer, ForeignKey('rules.id'), nullable=True)
parent_rule = relationship("Rule", remote_side="Rule.id")

View File

@@ -0,0 +1,15 @@
from sqlalchemy import Column, String, Float, ForeignKey
from sqlalchemy.orm import relationship
from .base import BaseModel
class Section(BaseModel):
__tablename__ = "sections"
route_id = Column(ForeignKey("routes.id"), nullable=False)
gpx_file_path = Column(String(255), nullable=False)
distance_m = Column(Float, nullable=False)
grade_avg = Column(Float)
min_gear = Column(String(50))
est_time_minutes = Column(Float)
route = relationship("Route", back_populates="sections")

View File

@@ -0,0 +1,7 @@
from .base import BaseModel
from sqlalchemy.orm import relationship
class User(BaseModel):
__tablename__ = "users"
plans = relationship("Plan", back_populates="user")

View File

@@ -0,0 +1,26 @@
from sqlalchemy import Column, Integer, String, Float, DateTime, ForeignKey, JSON, Boolean
from sqlalchemy.orm import relationship
from .base import BaseModel
class Workout(BaseModel):
"""Workout model for Garmin activity data."""
__tablename__ = "workouts"
plan_id = Column(Integer, ForeignKey("plans.id"), nullable=True)
garmin_activity_id = Column(String(255), unique=True, nullable=False)
activity_type = Column(String(50))
start_time = Column(DateTime, nullable=False)
duration_seconds = Column(Integer)
distance_m = Column(Float)
avg_hr = Column(Integer)
max_hr = Column(Integer)
avg_power = Column(Float)
max_power = Column(Float)
avg_cadence = Column(Float)
elevation_gain_m = Column(Float)
metrics = Column(JSON) # Store full Garmin data as JSONB
# Relationships
plan = relationship("Plan", back_populates="workouts")
analyses = relationship("Analysis", back_populates="workout", cascade="all, delete-orphan")

35
backend/app/routes/gpx.py Normal file
View File

@@ -0,0 +1,35 @@
from fastapi import APIRouter, UploadFile, File, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db
from app.services.gpx import parse_gpx, store_gpx_file
from app.schemas.gpx import RouteCreate, Route as RouteSchema
from app.models import Route
import os
router = APIRouter(prefix="/gpx", tags=["GPX Routes"])
@router.post("/upload", response_model=RouteSchema)
async def upload_gpx_route(
file: UploadFile = File(...),
db: AsyncSession = Depends(get_db)
):
# Store GPX file
gpx_path = await store_gpx_file(file)
# Parse GPX file
gpx_data = await parse_gpx(gpx_path)
# Create route in database
route_data = RouteCreate(
name=file.filename,
description=f"Uploaded from {file.filename}",
total_distance=gpx_data['total_distance'],
elevation_gain=gpx_data['elevation_gain'],
gpx_file_path=gpx_path
)
db_route = Route(**route_data.dict())
db.add(db_route)
await db.commit()
await db.refresh(db_route)
return db_route

View File

@@ -0,0 +1,89 @@
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from app.database import get_db
from app.models import Plan, PlanRule, Rule
from app.schemas.plan import PlanCreate, Plan as PlanSchema
from uuid import UUID
router = APIRouter(prefix="/plans", tags=["Training Plans"])
@router.post("/", response_model=PlanSchema)
async def create_plan(
plan: PlanCreate,
db: AsyncSession = Depends(get_db)
):
# Create plan
db_plan = Plan(
user_id=plan.user_id,
start_date=plan.start_date,
end_date=plan.end_date,
goal=plan.goal
)
db.add(db_plan)
await db.flush() # Flush to get plan ID
# Add rules to plan
for rule_id in plan.rule_ids:
db_plan_rule = PlanRule(plan_id=db_plan.id, rule_id=rule_id)
db.add(db_plan_rule)
await db.commit()
await db.refresh(db_plan)
return db_plan
@router.get("/{plan_id}", response_model=PlanSchema)
async def read_plan(
plan_id: UUID,
db: AsyncSession = Depends(get_db)
):
plan = await db.get(Plan, plan_id)
if not plan:
raise HTTPException(status_code=404, detail="Plan not found")
return plan
@router.get("/", response_model=list[PlanSchema])
async def read_plans(
db: AsyncSession = Depends(get_db)
):
result = await db.execute(select(Plan))
return result.scalars().all()
@router.put("/{plan_id}", response_model=PlanSchema)
async def update_plan(
plan_id: UUID,
plan: PlanCreate,
db: AsyncSession = Depends(get_db)
):
db_plan = await db.get(Plan, plan_id)
if not db_plan:
raise HTTPException(status_code=404, detail="Plan not found")
# Update plan fields
db_plan.user_id = plan.user_id
db_plan.start_date = plan.start_date
db_plan.end_date = plan.end_date
db_plan.goal = plan.goal
# Update rules
await db.execute(PlanRule.delete().where(PlanRule.plan_id == plan_id))
for rule_id in plan.rule_ids:
db_plan_rule = PlanRule(plan_id=plan_id, rule_id=rule_id)
db.add(db_plan_rule)
await db.commit()
await db.refresh(db_plan)
return db_plan
@router.delete("/{plan_id}")
async def delete_plan(
plan_id: UUID,
db: AsyncSession = Depends(get_db)
):
plan = await db.get(Plan, plan_id)
if not plan:
raise HTTPException(status_code=404, detail="Plan not found")
await db.delete(plan)
await db.commit()
return {"detail": "Plan deleted"}

View File

@@ -0,0 +1,79 @@
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from typing import List
from app.database import get_db
from app.models.prompt import Prompt
from app.schemas.prompt import Prompt as PromptSchema, PromptCreate, PromptUpdate
from app.services.prompt_manager import PromptManager
router = APIRouter()
@router.get("/", response_model=List[PromptSchema])
async def read_prompts(db: AsyncSession = Depends(get_db)):
"""Get all prompts."""
result = await db.execute(select(Prompt))
return result.scalars().all()
@router.get("/{prompt_id}", response_model=PromptSchema)
async def read_prompt(prompt_id: int, db: AsyncSession = Depends(get_db)):
"""Get a specific prompt by ID."""
prompt = await db.get(Prompt, prompt_id)
if not prompt:
raise HTTPException(status_code=404, detail="Prompt not found")
return prompt
@router.post("/", response_model=PromptSchema)
async def create_prompt(
prompt: PromptCreate,
db: AsyncSession = Depends(get_db)
):
"""Create a new prompt version."""
prompt_manager = PromptManager(db)
new_prompt = await prompt_manager.create_prompt_version(
action_type=prompt.action_type,
prompt_text=prompt.prompt_text,
model=prompt.model
)
return new_prompt
@router.get("/active/{action_type}")
async def get_active_prompt(
action_type: str,
db: AsyncSession = Depends(get_db)
):
"""Get the active prompt for a specific action type."""
prompt_manager = PromptManager(db)
prompt_text = await prompt_manager.get_active_prompt(action_type)
if not prompt_text:
raise HTTPException(status_code=404, detail=f"No active prompt found for {action_type}")
return {"action_type": action_type, "prompt_text": prompt_text}
@router.get("/history/{action_type}", response_model=List[PromptSchema])
async def get_prompt_history(
action_type: str,
db: AsyncSession = Depends(get_db)
):
"""Get the version history for a specific action type."""
prompt_manager = PromptManager(db)
prompts = await prompt_manager.get_prompt_history(action_type)
return prompts
@router.post("/{prompt_id}/activate")
async def activate_prompt_version(
prompt_id: int,
db: AsyncSession = Depends(get_db)
):
"""Activate a specific prompt version."""
prompt_manager = PromptManager(db)
success = await prompt_manager.activate_prompt_version(prompt_id)
if not success:
raise HTTPException(status_code=404, detail="Prompt not found")
return {"message": "Prompt version activated successfully"}

View File

@@ -0,0 +1,66 @@
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db
from app.models import Rule
from app.schemas.rule import RuleCreate, Rule as RuleSchema
from uuid import UUID
router = APIRouter(prefix="/rules", tags=["Rules"])
@router.post("/", response_model=RuleSchema)
async def create_rule(
rule: RuleCreate,
db: AsyncSession = Depends(get_db)
):
db_rule = Rule(**rule.dict())
db.add(db_rule)
await db.commit()
await db.refresh(db_rule)
return db_rule
@router.get("/{rule_id}", response_model=RuleSchema)
async def read_rule(
rule_id: UUID,
db: AsyncSession = Depends(get_db)
):
rule = await db.get(Rule, rule_id)
if not rule:
raise HTTPException(status_code=404, detail="Rule not found")
return rule
@router.get("/", response_model=list[RuleSchema])
async def read_rules(
db: AsyncSession = Depends(get_db)
):
result = await db.execute(sa.select(Rule))
return result.scalars().all()
@router.put("/{rule_id}", response_model=RuleSchema)
async def update_rule(
rule_id: UUID,
rule: RuleCreate,
db: AsyncSession = Depends(get_db)
):
db_rule = await db.get(Rule, rule_id)
if not db_rule:
raise HTTPException(status_code=404, detail="Rule not found")
for key, value in rule.dict().items():
setattr(db_rule, key, value)
await db.commit()
await db.refresh(db_rule)
return db_rule
@router.delete("/{rule_id}")
async def delete_rule(
rule_id: UUID,
db: AsyncSession = Depends(get_db)
):
rule = await db.get(Rule, rule_id)
if not rule:
raise HTTPException(status_code=404, detail="Rule not found")
await db.delete(rule)
await db.commit()
return {"detail": "Rule deleted"}

View File

@@ -0,0 +1,138 @@
from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from typing import List
from app.database import get_db
from app.models.workout import Workout
from app.models.analysis import Analysis
from app.models.garmin_sync_log import GarminSyncLog
from app.models.plan import Plan
from app.schemas.workout import Workout as WorkoutSchema, WorkoutSyncStatus
from app.schemas.analysis import Analysis as AnalysisSchema
from app.services.workout_sync import WorkoutSyncService
from app.services.ai_service import AIService
from app.services.plan_evolution import PlanEvolutionService
router = APIRouter()
@router.get("/", response_model=List[WorkoutSchema])
async def read_workouts(db: AsyncSession = Depends(get_db)):
"""Get all workouts."""
result = await db.execute(select(Workout))
return result.scalars().all()
@router.get("/{workout_id}", response_model=WorkoutSchema)
async def read_workout(workout_id: int, db: AsyncSession = Depends(get_db)):
"""Get a specific workout by ID."""
workout = await db.get(Workout, workout_id)
if not workout:
raise HTTPException(status_code=404, detail="Workout not found")
return workout
@router.post("/sync")
async def trigger_garmin_sync(
background_tasks: BackgroundTasks,
db: AsyncSession = Depends(get_db)
):
"""Trigger background sync of recent Garmin activities."""
sync_service = WorkoutSyncService(db)
background_tasks.add_task(sync_service.sync_recent_activities, days_back=14)
return {"message": "Garmin sync started"}
@router.get("/sync-status", response_model=WorkoutSyncStatus)
async def get_sync_status(db: AsyncSession = Depends(get_db)):
"""Get the latest sync status."""
result = await db.execute(
select(GarminSyncLog).order_by(GarminSyncLog.created_at.desc()).limit(1)
)
sync_log = result.scalar_one_or_none()
if not sync_log:
return WorkoutSyncStatus(status="never_synced")
return sync_log
@router.post("/{workout_id}/analyze")
async def analyze_workout(
workout_id: int,
background_tasks: BackgroundTasks,
db: AsyncSession = Depends(get_db)
):
"""Trigger AI analysis of a specific workout."""
workout = await db.get(Workout, workout_id)
if not workout:
raise HTTPException(status_code=404, detail="Workout not found")
ai_service = AIService(db)
background_tasks.add_task(
analyze_and_store_workout,
db, workout, ai_service
)
return {"message": "Analysis started", "workout_id": workout_id}
async def analyze_and_store_workout(db: AsyncSession, workout: Workout, ai_service: AIService):
"""Background task to analyze workout and store results."""
try:
# Get current plan if workout is associated with one
plan = None
if workout.plan_id:
plan = await db.get(Plan, workout.plan_id)
# Analyze workout
analysis_result = await ai_service.analyze_workout(workout, plan.jsonb_plan if plan else None)
# Store analysis
analysis = Analysis(
workout_id=workout.id,
jsonb_feedback=analysis_result.get("feedback", {}),
suggestions=analysis_result.get("suggestions", {})
)
db.add(analysis)
await db.commit()
except Exception as e:
# Log error but don't crash the background task
print(f"Error analyzing workout {workout.id}: {str(e)}")
@router.get("/{workout_id}/analyses", response_model=List[AnalysisSchema])
async def read_workout_analyses(workout_id: int, db: AsyncSession = Depends(get_db)):
"""Get all analyses for a specific workout."""
workout = await db.get(Workout, workout_id)
if not workout:
raise HTTPException(status_code=404, detail="Workout not found")
return workout.analyses
@router.post("/analyses/{analysis_id}/approve")
async def approve_analysis(
analysis_id: int,
db: AsyncSession = Depends(get_db)
):
"""Approve analysis suggestions and trigger plan evolution."""
analysis = await db.get(Analysis, analysis_id)
if not analysis:
raise HTTPException(status_code=404, detail="Analysis not found")
analysis.approved = True
# Trigger plan evolution if suggestions exist and workout has a plan
if analysis.suggestions and analysis.workout.plan_id:
evolution_service = PlanEvolutionService(db)
current_plan = await db.get(Plan, analysis.workout.plan_id)
if current_plan:
new_plan = await evolution_service.evolve_plan_from_analysis(
analysis, current_plan
)
await db.commit()
return {"message": "Analysis approved", "new_plan_id": new_plan.id if new_plan else None}
await db.commit()
return {"message": "Analysis approved"}

View File

@@ -0,0 +1,25 @@
from pydantic import BaseModel
from typing import Optional, Dict, Any
class AnalysisBase(BaseModel):
workout_id: int
analysis_type: str = 'workout_review'
jsonb_feedback: Optional[Dict[str, Any]] = None
suggestions: Optional[Dict[str, Any]] = None
approved: bool = False
class AnalysisCreate(AnalysisBase):
pass
class Analysis(AnalysisBase):
id: int
class Config:
orm_mode = True
class AnalysisUpdate(BaseModel):
approved: bool

View File

@@ -0,0 +1,25 @@
from pydantic import BaseModel
from typing import Optional, List
class GPXData(BaseModel):
total_distance: float
elevation_gain: float
points: List[dict]
class RouteCreate(BaseModel):
name: str
description: Optional[str] = None
total_distance: float
elevation_gain: float
gpx_file_path: str
class Route(BaseModel):
id: str
name: str
description: Optional[str] = None
total_distance: float
elevation_gain: float
gpx_file_path: str
class Config:
orm_mode = True

View File

@@ -0,0 +1,19 @@
from pydantic import BaseModel
from datetime import datetime
from typing import List, Optional
from uuid import UUID
class PlanBase(BaseModel):
user_id: UUID
start_date: datetime
end_date: datetime
goal: str
class PlanCreate(PlanBase):
rule_ids: List[UUID]
class Plan(PlanBase):
id: UUID
class Config:
orm_mode = True

View File

@@ -0,0 +1,30 @@
from pydantic import BaseModel
from typing import Optional
from datetime import datetime
class PromptBase(BaseModel):
action_type: str
model: Optional[str] = None
prompt_text: str
version: int = 1
active: bool = True
class PromptCreate(BaseModel):
action_type: str
prompt_text: str
model: Optional[str] = None
class PromptUpdate(BaseModel):
prompt_text: Optional[str] = None
active: Optional[bool] = None
class Prompt(PromptBase):
id: int
created_at: datetime
class Config:
orm_mode = True

View File

@@ -0,0 +1,17 @@
from pydantic import BaseModel
from typing import Optional
class RuleBase(BaseModel):
name: str
description: Optional[str] = None
condition: str
priority: int = 0
class RuleCreate(RuleBase):
pass
class Rule(RuleBase):
id: str
class Config:
orm_mode = True

View File

@@ -0,0 +1,40 @@
from pydantic import BaseModel
from typing import Optional, Dict, Any
from datetime import datetime
class WorkoutBase(BaseModel):
garmin_activity_id: str
activity_type: Optional[str] = None
start_time: datetime
duration_seconds: Optional[int] = None
distance_m: Optional[float] = None
avg_hr: Optional[int] = None
max_hr: Optional[int] = None
avg_power: Optional[float] = None
max_power: Optional[float] = None
avg_cadence: Optional[float] = None
elevation_gain_m: Optional[float] = None
metrics: Optional[Dict[str, Any]] = None
class WorkoutCreate(WorkoutBase):
plan_id: Optional[int] = None
class Workout(WorkoutBase):
id: int
plan_id: Optional[int] = None
class Config:
orm_mode = True
class WorkoutSyncStatus(BaseModel):
status: str
last_sync_time: Optional[datetime] = None
activities_synced: int = 0
error_message: Optional[str] = None
class Config:
orm_mode = True

View File

@@ -0,0 +1,130 @@
import os
import asyncio
from typing import Dict, Any, List, Optional
import httpx
import json
from app.services.prompt_manager import PromptManager
from app.models.workout import Workout
import logging
logger = logging.getLogger(__name__)
class AIService:
"""Service for AI-powered analysis and plan generation."""
def __init__(self, db_session):
self.db = db_session
self.prompt_manager = PromptManager(db_session)
self.api_key = os.getenv("OPENROUTER_API_KEY")
self.model = os.getenv("AI_MODEL", "anthropic/claude-3-sonnet-20240229")
self.base_url = "https://openrouter.ai/api/v1"
async def analyze_workout(self, workout: Workout, plan: Optional[Dict] = None) -> Dict[str, Any]:
"""Analyze a workout using AI and generate feedback."""
prompt_template = await self.prompt_manager.get_active_prompt("workout_analysis")
if not prompt_template:
raise ValueError("No active workout analysis prompt found")
# Build context from workout data
workout_context = {
"activity_type": workout.activity_type,
"duration_minutes": workout.duration_seconds / 60 if workout.duration_seconds else 0,
"distance_km": workout.distance_m / 1000 if workout.distance_m else 0,
"avg_hr": workout.avg_hr,
"avg_power": workout.avg_power,
"elevation_gain": workout.elevation_gain_m,
"planned_workout": plan
}
prompt = prompt_template.format(**workout_context)
response = await self._make_ai_request(prompt)
return self._parse_workout_analysis(response)
async def generate_plan(self, rules: List[Dict], goals: Dict[str, Any]) -> Dict[str, Any]:
"""Generate a training plan using AI."""
prompt_template = await self.prompt_manager.get_active_prompt("plan_generation")
context = {
"rules": rules,
"goals": goals,
"current_fitness_level": goals.get("fitness_level", "intermediate")
}
prompt = prompt_template.format(**context)
response = await self._make_ai_request(prompt)
return self._parse_plan_response(response)
async def parse_rules_from_natural_language(self, natural_language: str) -> Dict[str, Any]:
"""Parse natural language rules into structured format."""
prompt_template = await self.prompt_manager.get_active_prompt("rule_parsing")
prompt = prompt_template.format(user_rules=natural_language)
response = await self._make_ai_request(prompt)
return self._parse_rules_response(response)
async def _make_ai_request(self, prompt: str) -> str:
"""Make async request to OpenRouter API with retry logic."""
async with httpx.AsyncClient() as client:
for attempt in range(3): # Simple retry logic
try:
response = await client.post(
f"{self.base_url}/chat/completions",
headers={
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json",
},
json={
"model": self.model,
"messages": [{"role": "user", "content": prompt}],
"max_tokens": 2000,
},
timeout=30.0
)
response.raise_for_status()
data = response.json()
return data["choices"][0]["message"]["content"]
except Exception as e:
if attempt == 2: # Last attempt
logger.error(f"AI request failed after 3 attempts: {str(e)}")
raise AIServiceError(f"AI request failed after 3 attempts: {str(e)}")
await asyncio.sleep(2 ** attempt) # Exponential backoff
def _parse_workout_analysis(self, response: str) -> Dict[str, Any]:
"""Parse AI response for workout analysis."""
try:
# Assume AI returns JSON
clean_response = response.strip()
if clean_response.startswith("```json"):
clean_response = clean_response[7:-3]
return json.loads(clean_response)
except json.JSONDecodeError:
return {"raw_analysis": response, "structured": False}
def _parse_plan_response(self, response: str) -> Dict[str, Any]:
"""Parse AI response for plan generation."""
try:
clean_response = response.strip()
if clean_response.startswith("```json"):
clean_response = clean_response[7:-3]
return json.loads(clean_response)
except json.JSONDecodeError:
return {"raw_plan": response, "structured": False}
def _parse_rules_response(self, response: str) -> Dict[str, Any]:
"""Parse AI response for rule parsing."""
try:
clean_response = response.strip()
if clean_response.startswith("```json"):
clean_response = clean_response[7:-3]
return json.loads(clean_response)
except json.JSONDecodeError:
return {"raw_rules": response, "structured": False}
class AIServiceError(Exception):
"""Raised when AI service requests fail."""
pass

View File

@@ -0,0 +1,84 @@
import os
import garth
from typing import List, Dict, Any, Optional
from datetime import datetime, timedelta
import logging
logger = logging.getLogger(__name__)
class GarminService:
"""Service for interacting with Garmin Connect API."""
def __init__(self):
self.username = os.getenv("GARMIN_USERNAME")
self.password = os.getenv("GARMIN_PASSWORD")
self.client: Optional[garth.Client] = None
self.session_dir = "/app/data/sessions"
# Ensure session directory exists
os.makedirs(self.session_dir, exist_ok=True)
async def authenticate(self) -> bool:
"""Authenticate with Garmin Connect and persist session."""
if not self.client:
self.client = garth.Client()
try:
# Try to load existing session
self.client.load(self.session_dir)
logger.info("Loaded existing Garmin session")
return True
except Exception:
# Fresh authentication required
try:
await self.client.login(self.username, self.password)
self.client.save(self.session_dir)
logger.info("Successfully authenticated with Garmin Connect")
return True
except Exception as e:
logger.error(f"Garmin authentication failed: {str(e)}")
raise GarminAuthError(f"Authentication failed: {str(e)}")
async def get_activities(self, limit: int = 10, start_date: datetime = None) -> List[Dict[str, Any]]:
"""Fetch recent activities from Garmin Connect."""
if not self.client:
await self.authenticate()
if not start_date:
start_date = datetime.now() - timedelta(days=7)
try:
activities = self.client.get_activities(limit=limit, start=start_date)
logger.info(f"Fetched {len(activities)} activities from Garmin")
return activities
except Exception as e:
logger.error(f"Failed to fetch activities: {str(e)}")
raise GarminAPIError(f"Failed to fetch activities: {str(e)}")
async def get_activity_details(self, activity_id: str) -> Dict[str, Any]:
"""Get detailed activity data including metrics."""
if not self.client:
await self.authenticate()
try:
details = self.client.get_activity(activity_id)
logger.info(f"Fetched details for activity {activity_id}")
return details
except Exception as e:
logger.error(f"Failed to fetch activity details for {activity_id}: {str(e)}")
raise GarminAPIError(f"Failed to fetch activity details: {str(e)}")
def is_authenticated(self) -> bool:
"""Check if we have a valid authenticated session."""
return self.client is not None
class GarminAuthError(Exception):
"""Raised when Garmin authentication fails."""
pass
class GarminAPIError(Exception):
"""Raised when Garmin API calls fail."""
pass

View File

@@ -0,0 +1,62 @@
import os
import uuid
import logging
from fastapi import UploadFile, HTTPException
import gpxpy
from app.config import settings
logger = logging.getLogger(__name__)
async def store_gpx_file(file: UploadFile) -> str:
"""Store uploaded GPX file and return path"""
try:
file_ext = os.path.splitext(file.filename)[1]
if file_ext.lower() != '.gpx':
raise HTTPException(status_code=400, detail="Invalid file type")
file_name = f"{uuid.uuid4()}{file_ext}"
file_path = os.path.join(settings.GPX_STORAGE_PATH, file_name)
# Ensure storage directory exists
os.makedirs(os.path.dirname(file_path), exist_ok=True)
# Save file
with open(file_path, "wb") as f:
f.write(await file.read())
return file_path
except Exception as e:
logger.error(f"Error storing GPX file: {e}")
raise HTTPException(status_code=500, detail="Error storing file")
async def parse_gpx(file_path: str) -> dict:
"""Parse GPX file and extract key metrics"""
try:
with open(file_path, 'r') as f:
gpx = gpxpy.parse(f)
total_distance = 0.0
elevation_gain = 0.0
points = []
for track in gpx.tracks:
for segment in track.segments:
total_distance += segment.length_3d()
for i in range(1, len(segment.points)):
elevation_gain += max(0, segment.points[i].elevation - segment.points[i-1].elevation)
points = [{
'lat': point.latitude,
'lon': point.longitude,
'ele': point.elevation,
'time': point.time.isoformat() if point.time else None
} for point in segment.points]
return {
'total_distance': total_distance,
'elevation_gain': elevation_gain,
'points': points
}
except Exception as e:
logger.error(f"Error parsing GPX file: {e}")
raise HTTPException(status_code=500, detail="Error parsing GPX file")

View File

@@ -0,0 +1,74 @@
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from app.services.ai_service import AIService
from app.models.analysis import Analysis
from app.models.plan import Plan
import logging
logger = logging.getLogger(__name__)
class PlanEvolutionService:
"""Service for evolving training plans based on workout analysis."""
def __init__(self, db: AsyncSession):
self.db = db
self.ai_service = AIService(db)
async def evolve_plan_from_analysis(
self,
analysis: Analysis,
current_plan: Plan
) -> Plan:
"""Create a new plan version based on workout analysis."""
if not analysis.approved:
return None
suggestions = analysis.suggestions
if not suggestions:
return None
# Generate new plan incorporating suggestions
evolution_context = {
"current_plan": current_plan.jsonb_plan,
"workout_analysis": analysis.jsonb_feedback,
"suggestions": suggestions,
"evolution_type": "workout_feedback"
}
new_plan_data = await self.ai_service.evolve_plan(evolution_context)
# Create new plan version
new_plan = Plan(
jsonb_plan=new_plan_data,
version=current_plan.version + 1,
parent_plan_id=current_plan.id
)
self.db.add(new_plan)
await self.db.commit()
await self.db.refresh(new_plan)
logger.info(f"Created new plan version {new_plan.version} from analysis {analysis.id}")
return new_plan
async def get_plan_evolution_history(self, plan_id: int) -> list[Plan]:
"""Get the evolution history for a plan."""
result = await self.db.execute(
select(Plan)
.where(
(Plan.id == plan_id) |
(Plan.parent_plan_id == plan_id)
)
.order_by(Plan.version)
)
return result.scalars().all()
async def get_current_active_plan(self) -> Plan:
"""Get the most recent active plan."""
result = await self.db.execute(
select(Plan)
.order_by(Plan.version.desc())
.limit(1)
)
return result.scalar_one_or_none()

View File

@@ -0,0 +1,92 @@
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, update, func
from app.models.prompt import Prompt
import logging
logger = logging.getLogger(__name__)
class PromptManager:
"""Service for managing AI prompts with versioning."""
def __init__(self, db: AsyncSession):
self.db = db
async def get_active_prompt(self, action_type: str, model: str = None) -> str:
"""Get the active prompt for a specific action type."""
query = select(Prompt).where(
Prompt.action_type == action_type,
Prompt.active == True
)
if model:
query = query.where(Prompt.model == model)
result = await self.db.execute(query.order_by(Prompt.version.desc()))
prompt = result.scalar_one_or_none()
return prompt.prompt_text if prompt else None
async def create_prompt_version(
self,
action_type: str,
prompt_text: str,
model: str = None
) -> Prompt:
"""Create a new version of a prompt."""
# Deactivate previous versions
await self.db.execute(
update(Prompt)
.where(Prompt.action_type == action_type)
.values(active=False)
)
# Get next version number
result = await self.db.execute(
select(func.max(Prompt.version))
.where(Prompt.action_type == action_type)
)
max_version = result.scalar() or 0
# Create new prompt
new_prompt = Prompt(
action_type=action_type,
model=model,
prompt_text=prompt_text,
version=max_version + 1,
active=True
)
self.db.add(new_prompt)
await self.db.commit()
await self.db.refresh(new_prompt)
logger.info(f"Created new prompt version {new_prompt.version} for {action_type}")
return new_prompt
async def get_prompt_history(self, action_type: str) -> list[Prompt]:
"""Get all versions of prompts for an action type."""
result = await self.db.execute(
select(Prompt)
.where(Prompt.action_type == action_type)
.order_by(Prompt.version.desc())
)
return result.scalars().all()
async def activate_prompt_version(self, prompt_id: int) -> bool:
"""Activate a specific prompt version."""
# First deactivate all prompts for this action type
prompt = await self.db.get(Prompt, prompt_id)
if not prompt:
return False
await self.db.execute(
update(Prompt)
.where(Prompt.action_type == prompt.action_type)
.values(active=False)
)
# Activate the specific version
prompt.active = True
await self.db.commit()
logger.info(f"Activated prompt version {prompt.version} for {prompt.action_type}")
return True

View File

@@ -0,0 +1,90 @@
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from app.services.garmin import GarminService, GarminAPIError
from app.models.workout import Workout
from app.models.garmin_sync_log import GarminSyncLog
from datetime import datetime, timedelta
import logging
logger = logging.getLogger(__name__)
class WorkoutSyncService:
"""Service for syncing Garmin activities to database."""
def __init__(self, db: AsyncSession):
self.db = db
self.garmin_service = GarminService()
async def sync_recent_activities(self, days_back: int = 7) -> int:
"""Sync recent Garmin activities to database."""
try:
# Create sync log entry
sync_log = GarminSyncLog(status="in_progress")
self.db.add(sync_log)
await self.db.commit()
# Calculate start date
start_date = datetime.now() - timedelta(days=days_back)
# Fetch activities from Garmin
activities = await self.garmin_service.get_activities(
limit=50, start_date=start_date
)
synced_count = 0
for activity in activities:
if await self.activity_exists(activity['activityId']):
continue
# Parse and create workout
workout_data = await self.parse_activity_data(activity)
workout = Workout(**workout_data)
self.db.add(workout)
synced_count += 1
# Update sync log
sync_log.status = "success"
sync_log.activities_synced = synced_count
sync_log.last_sync_time = datetime.now()
await self.db.commit()
logger.info(f"Successfully synced {synced_count} activities")
return synced_count
except GarminAPIError as e:
sync_log.status = "error"
sync_log.error_message = str(e)
await self.db.commit()
logger.error(f"Garmin API error during sync: {str(e)}")
raise
except Exception as e:
sync_log.status = "error"
sync_log.error_message = str(e)
await self.db.commit()
logger.error(f"Unexpected error during sync: {str(e)}")
raise
async def activity_exists(self, garmin_activity_id: str) -> bool:
"""Check if activity already exists in database."""
result = await self.db.execute(
select(Workout).where(Workout.garmin_activity_id == garmin_activity_id)
)
return result.scalar_one_or_none() is not None
async def parse_activity_data(self, activity: Dict[str, Any]) -> Dict[str, Any]:
"""Parse Garmin activity data into workout model format."""
return {
"garmin_activity_id": activity['activityId'],
"activity_type": activity.get('activityType', {}).get('typeKey'),
"start_time": datetime.fromisoformat(activity['startTimeLocal'].replace('Z', '+00:00')),
"duration_seconds": activity.get('duration'),
"distance_m": activity.get('distance'),
"avg_hr": activity.get('averageHR'),
"max_hr": activity.get('maxHR'),
"avg_power": activity.get('avgPower'),
"max_power": activity.get('maxPower'),
"avg_cadence": activity.get('averageBikingCadenceInRevPerMinute'),
"elevation_gain_m": activity.get('elevationGain'),
"metrics": activity # Store full Garmin data as JSONB
}

5
backend/pytest.ini Normal file
View File

@@ -0,0 +1,5 @@
[pytest]
testpaths = tests
addopts = -p no:warnings --verbose
python_files = test_*.py
log_cli = true

11
backend/requirements.txt Normal file
View File

@@ -0,0 +1,11 @@
fastapi==0.110.0
uvicorn[standard]==0.29.0
python-dotenv==1.0.1
sqlalchemy==2.0.29
psycopg2-binary==2.9.9
alembic==1.13.1
pydantic-settings==2.2.1
python-multipart==0.0.9
gpxpy # Add GPX parsing library
garth==0.4.46 # Garmin Connect API client
httpx==0.25.2 # Async HTTP client for OpenRouter API

View File

@@ -0,0 +1,240 @@
#!/usr/bin/env python3
"""
Database backup and restore utilities for containerized deployments.
Ensures safe backup/restore operations with migration compatibility checks.
"""
import sys
import os
import subprocess
import shutil
from pathlib import Path
from datetime import datetime
from typing import Optional
# Add backend directory to path
backend_dir = Path(__file__).parent.parent
sys.path.insert(0, str(backend_dir))
from app.database import get_database_url
class DatabaseManager:
"""Handles database backup and restore operations."""
def __init__(self, backup_dir: str = "/app/data/backups"):
self.backup_dir = Path(backup_dir)
self.backup_dir.mkdir(parents=True, exist_ok=True)
def get_db_connection_params(self):
"""Extract database connection parameters from URL."""
from urllib.parse import urlparse
db_url = get_database_url()
parsed = urlparse(db_url)
return {
'host': parsed.hostname,
'port': parsed.port or 5432,
'user': parsed.username,
'password': parsed.password,
'database': parsed.path.lstrip('/')
}
def create_backup(self, name: Optional[str] = None) -> str:
"""Create a database backup."""
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
backup_name = name or f"backup_{timestamp}"
backup_file = self.backup_dir / f"{backup_name}.sql"
params = self.get_db_connection_params()
# Use pg_dump for backup
cmd = [
"pg_dump",
"-h", params['host'],
"-p", str(params['port']),
"-U", params['user'],
"-d", params['database'],
"-f", str(backup_file),
"--no-password",
"--format=custom", # Custom format for better compression
"--compress=9"
]
# Set password environment variable
env = os.environ.copy()
env['PGPASSWORD'] = params['password']
try:
print(f"Creating backup: {backup_file}")
result = subprocess.run(cmd, env=env, capture_output=True, text=True)
if result.returncode == 0:
print(f"✅ Backup created successfully: {backup_file}")
return str(backup_file)
else:
print(f"❌ Backup failed: {result.stderr}")
raise Exception(f"Backup failed: {result.stderr}")
except FileNotFoundError:
print("❌ pg_dump not found. Ensure PostgreSQL client tools are installed.")
raise
def restore_backup(self, backup_file: str, confirm: bool = False) -> None:
"""Restore database from backup."""
backup_path = Path(backup_file)
if not backup_path.exists():
raise FileNotFoundError(f"Backup file not found: {backup_file}")
if not confirm:
print(f"⚠️ This will overwrite the current database!")
response = input("Are you sure you want to continue? (yes/no): ")
if response.lower() != 'yes':
print("Restore cancelled.")
return
params = self.get_db_connection_params()
# Drop and recreate database to ensure clean restore
self._recreate_database()
# Use pg_restore for restore
cmd = [
"pg_restore",
"-h", params['host'],
"-p", str(params['port']),
"-U", params['user'],
"-d", params['database'],
"--no-password",
"--clean",
"--if-exists",
"--create",
str(backup_path)
]
env = os.environ.copy()
env['PGPASSWORD'] = params['password']
try:
print(f"Restoring from backup: {backup_file}")
result = subprocess.run(cmd, env=env, capture_output=True, text=True)
if result.returncode == 0:
print("✅ Database restored successfully")
else:
print(f"❌ Restore failed: {result.stderr}")
raise Exception(f"Restore failed: {result.stderr}")
except FileNotFoundError:
print("❌ pg_restore not found. Ensure PostgreSQL client tools are installed.")
raise
def _recreate_database(self):
"""Drop and recreate the database."""
params = self.get_db_connection_params()
# Connect to postgres database to drop/recreate target database
postgres_params = params.copy()
postgres_params['database'] = 'postgres'
drop_cmd = [
"psql",
"-h", postgres_params['host'],
"-p", str(postgres_params['port']),
"-U", postgres_params['user'],
"-d", postgres_params['database'],
"-c", f"DROP DATABASE IF EXISTS {params['database']};"
]
create_cmd = [
"psql",
"-h", postgres_params['host'],
"-p", str(postgres_params['port']),
"-U", postgres_params['user'],
"-d", postgres_params['database'],
"-c", f"CREATE DATABASE {params['database']};"
]
env = os.environ.copy()
env['PGPASSWORD'] = params['password']
for cmd in [drop_cmd, create_cmd]:
result = subprocess.run(cmd, env=env, capture_output=True, text=True)
if result.returncode != 0:
print(f"Database recreation step failed: {result.stderr}")
def list_backups(self):
"""List available backup files."""
backups = list(self.backup_dir.glob("*.sql"))
backups.sort(key=lambda x: x.stat().st_mtime, reverse=True)
if not backups:
print("No backup files found.")
return
print("Available backups:")
for backup in backups:
size = backup.stat().st_size / (1024 * 1024) # Size in MB
mtime = datetime.fromtimestamp(backup.stat().st_mtime)
print(".2f")
def cleanup_old_backups(self, keep_days: int = 30):
"""Remove backups older than specified days."""
from datetime import timedelta
cutoff = datetime.now() - timedelta(days=keep_days)
removed = []
for backup in self.backup_dir.glob("*.sql"):
if datetime.fromtimestamp(backup.stat().st_mtime) < cutoff:
backup.unlink()
removed.append(backup.name)
if removed:
print(f"Removed {len(removed)} old backups: {', '.join(removed)}")
else:
print("No old backups to remove.")
def main():
if len(sys.argv) < 2:
print("Usage: python backup_restore.py <command> [options]")
print("Commands:")
print(" backup [name] - Create a new backup")
print(" restore <file> [--yes] - Restore from backup")
print(" list - List available backups")
print(" cleanup [days] - Remove backups older than N days (default: 30)")
sys.exit(1)
manager = DatabaseManager()
command = sys.argv[1]
try:
if command == "backup":
name = sys.argv[2] if len(sys.argv) > 2 else None
manager.create_backup(name)
elif command == "restore":
if len(sys.argv) < 3:
print("Error: Please specify backup file to restore from")
sys.exit(1)
backup_file = sys.argv[2]
confirm = "--yes" in sys.argv
manager.restore_backup(backup_file, confirm)
elif command == "list":
manager.list_backups()
elif command == "cleanup":
days = int(sys.argv[2]) if len(sys.argv) > 2 else 30
manager.cleanup_old_backups(days)
else:
print(f"Unknown command: {command}")
sys.exit(1)
except Exception as e:
print(f"Error: {e}")
sys.exit(1)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,259 @@
#!/usr/bin/env python3
"""
Migration compatibility and version checker for containerized deployments.
Validates migration integrity and compatibility before deployments.
"""
import sys
import os
from pathlib import Path
from typing import Dict, List, Tuple
# Add backend directory to path
backend_dir = Path(__file__).parent.parent
sys.path.insert(0, str(backend_dir))
from alembic.config import Config
from alembic import command
from alembic.migration import MigrationContext
from alembic.script import ScriptDirectory
from sqlalchemy import create_engine, text
from app.database import get_database_url
class MigrationChecker:
"""Validates migration compatibility and integrity."""
def __init__(self):
self.config = self._get_alembic_config()
self.script = ScriptDirectory.from_config(self.config)
def _get_alembic_config(self):
"""Get Alembic configuration."""
config = Config("alembic.ini")
config.set_main_option("sqlalchemy.url", get_database_url())
return config
def check_migration_files(self) -> Dict[str, bool]:
"""Check integrity of migration files."""
results = {
"files_exist": False,
"proper_ordering": False,
"no_duplicates": False,
"valid_syntax": False
}
try:
# Check if migration directory exists
versions_dir = Path("alembic/versions")
if not versions_dir.exists():
print("❌ Migration versions directory not found")
return results
# Get all migration files
migration_files = list(versions_dir.glob("*.py"))
if not migration_files:
print("⚠️ No migration files found")
results["files_exist"] = True # Empty is valid
return results
results["files_exist"] = True
# Check for duplicate revision numbers
revisions = []
for file_path in migration_files:
with open(file_path, 'r') as f:
content = f.read()
# Extract revision from file
if "revision = " in content:
rev_line = [line for line in content.split('\n') if "revision = " in line]
if rev_line:
rev = rev_line[0].split("'")[1]
if rev in revisions:
print(f"❌ Duplicate revision found: {rev}")
return results
revisions.append(rev)
results["no_duplicates"] = True
# Validate migration ordering
try:
# Get ordered revisions from script directory
ordered_revisions = []
for rev in self.script.walk_revisions():
ordered_revisions.append(rev.revision)
# Check if our files match the ordering
if set(revisions) == set(ordered_revisions):
results["proper_ordering"] = True
else:
print("❌ Migration ordering mismatch")
return results
except Exception as e:
print(f"❌ Error checking migration ordering: {e}")
return results
# Basic syntax validation
for file_path in migration_files:
try:
compile(open(file_path).read(), file_path, 'exec')
except SyntaxError as e:
print(f"❌ Syntax error in {file_path}: {e}")
return results
results["valid_syntax"] = True
print("✅ All migration files are valid")
except Exception as e:
print(f"❌ Error checking migration files: {e}")
return results
def check_database_state(self) -> Dict[str, any]:
"""Check current database migration state."""
results = {
"connected": False,
"current_revision": None,
"head_revision": None,
"up_to_date": False,
"pending_migrations": []
}
try:
engine = create_engine(get_database_url())
with engine.connect() as conn:
results["connected"] = True
# Get current revision
context = MigrationContext.configure(conn)
current_rev = context.get_current_revision()
results["current_revision"] = current_rev
# Get head revision
head_rev = self.script.get_current_head()
results["head_revision"] = head_rev
# Check if up to date
results["up_to_date"] = current_rev == head_rev
# Get pending migrations
if not results["up_to_date"]:
pending = []
for rev in self.script.walk_revisions():
if rev.revision > current_rev:
pending.append(rev.revision)
results["pending_migrations"] = pending
except Exception as e:
print(f"❌ Database connection error: {e}")
return results
def validate_deployment_readiness(self) -> bool:
"""Validate if deployment can proceed safely."""
print("🔍 Checking deployment readiness...")
# Check migration files
file_checks = self.check_migration_files()
all_files_good = all(file_checks.values())
# Check database state
db_checks = self.check_database_state()
db_connected = db_checks["connected"]
if not all_files_good:
print("❌ Migration files have issues")
return False
if not db_connected:
print("❌ Cannot connect to database")
return False
if not db_checks["up_to_date"]:
print(f"⚠️ Database not up to date. Current: {db_checks['current_revision']}, Head: {db_checks['head_revision']}")
print(f"Pending migrations: {db_checks['pending_migrations']}")
# For deployment, we might want to allow this if migrations will be run
print(" This is acceptable if migrations will be run during deployment")
return True
print("✅ Deployment readiness check passed")
return True
def generate_migration_report(self) -> str:
"""Generate a detailed migration status report."""
report = []
report.append("# Migration Status Report")
report.append("")
# File checks
report.append("## Migration Files")
file_checks = self.check_migration_files()
for check, status in file_checks.items():
status_icon = "" if status else ""
report.append(f"- {check}: {status_icon}")
# Database state
report.append("")
report.append("## Database State")
db_checks = self.check_database_state()
for check, value in db_checks.items():
if isinstance(value, list):
value = ", ".join(value) if value else "None"
report.append(f"- {check}: {value}")
# Deployment readiness
report.append("")
report.append("## Deployment Readiness")
ready = self.validate_deployment_readiness()
readiness_icon = "" if ready else ""
report.append(f"- Ready for deployment: {readiness_icon}")
return "\n".join(report)
def main():
if len(sys.argv) < 2:
print("Usage: python migration_checker.py <command>")
print("Commands:")
print(" check-files - Check migration file integrity")
print(" check-db - Check database migration state")
print(" validate-deploy - Validate deployment readiness")
print(" report - Generate detailed migration report")
sys.exit(1)
checker = MigrationChecker()
command = sys.argv[1]
try:
if command == "check-files":
results = checker.check_migration_files()
all_good = all(results.values())
print("✅ Files OK" if all_good else "❌ Files have issues")
sys.exit(0 if all_good else 1)
elif command == "check-db":
results = checker.check_database_state()
print(f"Connected: {'' if results['connected'] else ''}")
print(f"Up to date: {'' if results['up_to_date'] else ''}")
print(f"Current: {results['current_revision']}")
print(f"Head: {results['head_revision']}")
elif command == "validate-deploy":
ready = checker.validate_deployment_readiness()
sys.exit(0 if ready else 1)
elif command == "report":
report = checker.generate_migration_report()
print(report)
else:
print(f"Unknown command: {command}")
sys.exit(1)
except Exception as e:
print(f"Error: {e}")
sys.exit(1)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env python3
"""
Migration rollback script for containerized deployments.
Provides safe rollback functionality with validation.
"""
import sys
import os
from pathlib import Path
# Add backend directory to path
backend_dir = Path(__file__).parent.parent
sys.path.insert(0, str(backend_dir))
from alembic.config import Config
from alembic import command
from alembic.migration import MigrationContext
from alembic.script import ScriptDirectory
import sqlalchemy as sa
from app.database import get_database_url
def get_alembic_config():
"""Get Alembic configuration."""
config = Config("alembic.ini")
config.set_main_option("sqlalchemy.url", get_database_url())
return config
def get_current_revision():
"""Get current database revision."""
config = get_alembic_config()
script = ScriptDirectory.from_config(config)
with sa.create_engine(get_database_url()).connect() as conn:
context = MigrationContext.configure(conn)
current_rev = context.get_current_revision()
return current_rev
def rollback_migration(revision="head:-1"):
"""
Rollback to specified revision.
Args:
revision: Target revision (default: one step back from head)
"""
try:
print(f"Rolling back to revision: {revision}")
config = get_alembic_config()
command.downgrade(config, revision)
print("Rollback completed successfully")
# Verify rollback
current = get_current_revision()
print(f"Current revision after rollback: {current}")
except Exception as e:
print(f"Rollback failed: {e}")
sys.exit(1)
def list_migrations():
"""List available migrations."""
config = get_alembic_config()
script = ScriptDirectory.from_config(config)
print("Available migrations:")
for rev in script.walk_revisions():
print(f" {rev.revision}: {rev.doc}")
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: python migration_rollback.py <command> [revision]")
print("Commands:")
print(" rollback [revision] - Rollback to revision (default: head:-1)")
print(" current - Show current revision")
print(" list - List available migrations")
sys.exit(1)
command = sys.argv[1]
if command == "rollback":
revision = sys.argv[2] if len(sys.argv) > 2 else "head:-1"
rollback_migration(revision)
elif command == "current":
current = get_current_revision()
print(f"Current revision: {current}")
elif command == "list":
list_migrations()
else:
print(f"Unknown command: {command}")
sys.exit(1)

View File

@@ -0,0 +1 @@
# Empty file to mark tests directory as Python package

36
backend/tests/conftest.py Normal file
View File

@@ -0,0 +1,36 @@
import pytest
from fastapi.testclient import TestClient
from app.main import app
from app.database import get_db, Base
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
TEST_DATABASE_URL = "postgresql://postgres:postgres@localhost:5432/test_db"
@pytest.fixture(scope="session")
def test_engine():
engine = create_engine(TEST_DATABASE_URL)
Base.metadata.create_all(bind=engine)
yield engine
Base.metadata.drop_all(bind=engine)
@pytest.fixture
def db_session(test_engine):
connection = test_engine.connect()
transaction = connection.begin()
session = sessionmaker(autocommit=False, autoflush=False, bind=connection)()
yield session
session.close()
transaction.rollback()
connection.close()
@pytest.fixture
def client(db_session):
def override_get_db():
try:
yield db_session
finally:
db_session.close()
app.dependency_overrides[get_db] = override_get_db
return TestClient(app)

View File

@@ -0,0 +1,78 @@
import pytest
from unittest.mock import AsyncMock, patch
from app.services.garmin import GarminService
from app.models.garmin_sync_log import GarminSyncStatus
from datetime import datetime, timedelta
@pytest.mark.asyncio
async def test_garmin_authentication_success(db_session):
"""Test successful Garmin Connect authentication"""
with patch('garth.Client') as mock_client:
mock_instance = mock_client.return_value
mock_instance.login = AsyncMock(return_value=True)
service = GarminService(db_session)
result = await service.authenticate("test_user", "test_pass")
assert result is True
mock_instance.login.assert_awaited_once_with("test_user", "test_pass")
@pytest.mark.asyncio
async def test_garmin_authentication_failure(db_session):
"""Test authentication failure handling"""
with patch('garth.Client') as mock_client:
mock_instance = mock_client.return_value
mock_instance.login = AsyncMock(side_effect=Exception("Invalid credentials"))
service = GarminService(db_session)
result = await service.authenticate("bad_user", "wrong_pass")
assert result is False
log_entry = db_session.query(GarminSyncLog).first()
assert log_entry.status == GarminSyncStatus.AUTH_FAILED
@pytest.mark.asyncio
async def test_activity_sync(db_session):
"""Test successful activity synchronization"""
with patch('garth.Client') as mock_client:
mock_instance = mock_client.return_value
mock_instance.connectapi = AsyncMock(return_value=[
{"activityId": 123, "startTime": "2024-01-01T08:00:00"}
])
service = GarminService(db_session)
await service.sync_activities()
# Verify workout created
workout = db_session.query(Workout).first()
assert workout.garmin_activity_id == 123
# Verify sync log updated
log_entry = db_session.query(GarminSyncLog).first()
assert log_entry.status == GarminSyncStatus.COMPLETED
@pytest.mark.asyncio
async def test_rate_limiting_handling(db_session):
"""Test API rate limit error handling"""
with patch('garth.Client') as mock_client:
mock_instance = mock_client.return_value
mock_instance.connectapi = AsyncMock(side_effect=Exception("Rate limit exceeded"))
service = GarminService(db_session)
result = await service.sync_activities()
assert result is False
log_entry = db_session.query(GarminSyncLog).first()
assert log_entry.status == GarminSyncStatus.FAILED
assert "Rate limit" in log_entry.error_message
@pytest.mark.asyncio
async def test_session_persistence(db_session):
"""Test session cookie persistence"""
service = GarminService(db_session)
# Store session
await service.store_session({"token": "test123"})
session = await service.load_session()
assert session == {"token": "test123"}
assert Path("/app/data/sessions/garmin_session.pickle").exists()

244
designdoc.md Normal file
View File

@@ -0,0 +1,244 @@
---
# **AI-Assisted Cycling Coach — Design Document**
## **1. Architecture Overview**
**Goal:** Web-based cycling coach that plans workouts, analyzes Garmin rides, and integrates AI while enforcing strict user-defined rules.
### **Components**
| Component | Tech | Purpose |
| ---------------- | -------------------------- | ------------------------------------------------------------------ |
| Frontend | React/Next.js | UI for routes, plans, analysis, file uploads |
| Backend | Python (FastAPI, async) | API layer, AI integration, Garmin sync, DB access |
| Database | PostgreSQL | Stores routes, sections, plans, rules, workouts, prompts, analyses |
| File Storage | Mounted folder `/data/gpx` | Store GPX files for sections/routes |
| AI Integration | OpenRouter via backend | Plan generation, workout analysis, suggestions |
| Containerization | Docker + docker-compose | Encapsulate frontend, backend, database with persistent storage |
**Workflow Overview**
1. Upload/import GPX → backend saves to mounted folder + metadata in DB
2. Define rules (natural language → AI parses → JSON → DB)
3. Generate plan → AI creates JSON plan → DB versioned
4. Ride recorded on Garmin → backend syncs activity metrics → stores in DB
5. AI analyzes workout → feedback & suggestions stored → user approves → new plan version created
---
## **2. Backend Design (Python, Async)**
**Framework:** FastAPI (async-first, non-blocking I/O)
**Tasks:**
* **Route/Section Management:** Upload GPX, store metadata, read GPX files for visualization
* **Rule Management:** CRUD rules, hierarchical parsing (AI-assisted)
* **Plan Management:** Generate plans (AI), store versions
* **Workout Analysis:** Fetch Garmin activity, run AI analysis, store reports
* **AI Integration:** Async calls to OpenRouter
* **Database Interaction:** Async Postgres client (e.g., `asyncpg` or `SQLAlchemy Async`)
**Endpoints (examples)**
| Method | Endpoint | Description |
| ------ | ------------------- | ------------------------------------------------ |
| POST | `/routes/upload` | Upload GPX file for route/section |
| GET | `/routes` | List routes and sections |
| POST | `/rules` | Create new rule set (with AI parse) |
| POST | `/plans/generate` | Generate new plan using rules & goals |
| GET | `/plans/{plan_id}` | Fetch plan JSON & version info |
| POST | `/workouts/analyze` | Trigger AI analysis for a synced Garmin activity |
| POST | `/workouts/approve` | Approve AI suggestions → create new plan version |
**Async Patterns:**
* File I/O → async reading/writing GPX
* AI API calls → async HTTP requests
* Garmin sync → async polling/scheduled jobs
---
## **3. Database Design (Postgres)**
**Tables:**
```sql
-- Routes & Sections
CREATE TABLE routes (
id SERIAL PRIMARY KEY,
name TEXT NOT NULL,
created_at TIMESTAMP DEFAULT now()
);
CREATE TABLE sections (
id SERIAL PRIMARY KEY,
route_id INT REFERENCES routes(id),
gpx_file_path TEXT NOT NULL,
distance_m NUMERIC,
grade_avg NUMERIC,
min_gear TEXT,
est_time_minutes NUMERIC,
created_at TIMESTAMP DEFAULT now()
);
-- Rules (hierarchical JSON)
CREATE TABLE rules (
id SERIAL PRIMARY KEY,
name TEXT NOT NULL,
user_defined BOOLEAN DEFAULT true,
jsonb_rules JSONB NOT NULL,
version INT DEFAULT 1,
created_at TIMESTAMP DEFAULT now()
);
-- Plans (versioned)
CREATE TABLE plans (
id SERIAL PRIMARY KEY,
jsonb_plan JSONB NOT NULL,
version INT NOT NULL,
created_at TIMESTAMP DEFAULT now()
);
-- Workouts
CREATE TABLE workouts (
id SERIAL PRIMARY KEY,
plan_id INT REFERENCES plans(id),
garmin_activity_id TEXT NOT NULL,
metrics JSONB,
created_at TIMESTAMP DEFAULT now()
);
-- Analyses
CREATE TABLE analyses (
id SERIAL PRIMARY KEY,
workout_id INT REFERENCES workouts(id),
jsonb_feedback JSONB,
created_at TIMESTAMP DEFAULT now()
);
-- AI Prompts
CREATE TABLE prompts (
id SERIAL PRIMARY KEY,
action_type TEXT, -- plan, analysis, suggestion
model TEXT,
prompt_text TEXT,
version INT DEFAULT 1,
created_at TIMESTAMP DEFAULT now()
);
```
---
## **4. Containerization (Docker Compose)**
```yaml
version: '3.9'
services:
backend:
build: ./backend
ports:
- "8000:8000"
volumes:
- gpx-data:/app/data/gpx
environment:
- DATABASE_URL=postgresql://postgres:password@db:5432/cycling
depends_on:
- db
frontend:
build: ./frontend
ports:
- "3000:3000"
db:
image: postgres:15
restart: always
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: password
POSTGRES_DB: cycling
volumes:
- postgres-data:/var/lib/postgresql/data
volumes:
gpx-data:
driver: local
postgres-data:
driver: local
```
**Notes:**
* `/app/data/gpx` inside backend container is persisted on host via `gpx-data` volume.
* Postgres data persisted via `postgres-data`.
* Backend talks to DB via async client.
---
## **5. Frontend UI Layouts & Flows**
### **5.1 Layout**
* **Navbar:** Routes | Rules | Plans | Workouts | Analysis | Export/Import
* **Sidebar:** Filters (date, type, difficulty)
* **Main Area:** Dynamic content depending on selection
### **5.2 Key Screens**
1. **Routes**
* Upload/import GPX
* View route map + section metadata
2. **Rules**
* Natural language editor
* AI parse → preview JSON → save
* Switch between rule sets
3. **Plan**
* Select goal + rule set → generate plan
* View plan timeline & weekly workouts
4. **Workout Analysis**
* List synced Garmin activities
* Select activity → AI generates report
* Visualizations: HR, cadence, power vs planned
* Approve suggestions → new plan version
5. **Export/Import**
* Export JSON/ZIP of routes, rules, plans
* Import JSON/GPX
### **5.3 User Flow Example**
1. Upload GPX → backend saves file + DB metadata
2. Define rule set → AI parses → user confirms → DB versioned
3. Generate plan → AI → store plan version in DB
4. Sync Garmin activity → backend fetches metrics → store workout
5. AI analyzes → report displayed → user approves → new plan version
6. Export plan or route as needed
---
## **6. AI Integration**
* Each **action type** (plan generation, analysis, suggestion) has:
* Stored prompt template in DB
* Configurable model per action
* Async calls to OpenRouter
* Store raw AI output + processed structured result in DB
---
## ✅ **Next Steps**
1. Implement **Python FastAPI backend** with async patterns.
2. Build **Postgres DB schema** and migration scripts.
3. Setup **Docker Compose** with mounted GPX folder.
4. Design frontend UI based on the flows above.
5. Integrate AI endpoints and Garmin sync.
---

54
docker-compose.yml Normal file
View File

@@ -0,0 +1,54 @@
version: '3.9'
services:
backend:
build:
context: ./backend
volumes:
- gpx-data:/app/data/gpx
- garmin-sessions:/app/data/sessions
ports:
- "8000:8000"
environment:
- DATABASE_URL=postgresql://postgres:password@db:5432/cycling
- GARMIN_USERNAME=${GARMIN_USERNAME}
- GARMIN_PASSWORD=${GARMIN_PASSWORD}
- OPENROUTER_API_KEY=${OPENROUTER_API_KEY}
- AI_MODEL=${AI_MODEL:-claude-3-sonnet-20240229}
- API_KEY=${API_KEY}
depends_on:
db:
condition: service_healthy
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 30s
timeout: 10s
retries: 5
start_period: 40s
frontend:
build: ./frontend
ports:
- "3000:3000"
environment:
- REACT_APP_API_URL=http://localhost:8000
- REACT_APP_API_KEY=${API_KEY}
db:
image: postgres:15
restart: always
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: password
POSTGRES_DB: cycling
volumes:
- postgres-data:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres -d cycling"]
interval: 10s
timeout: 5s
retries: 5
volumes:
gpx-data:
garmin-sessions:
postgres-data:

39
frontend/Dockerfile Normal file
View File

@@ -0,0 +1,39 @@
# Build stage
FROM node:20-alpine AS build
# Set working directory
WORKDIR /app
# Copy package.json and package-lock.json
COPY frontend/package*.json ./
# Install dependencies
RUN npm install
# Copy source code
COPY frontend/ .
# Build application
RUN npm run build
# Production stage
FROM node:20-alpine AS production
# Set working directory
WORKDIR /app
# Copy build artifacts and dependencies
COPY --from=build /app/package*.json ./
COPY --from=build /app/.next ./.next
COPY --from=build /app/node_modules ./node_modules
COPY --from=build /app/public ./public
# Create non-root user
RUN addgroup -S appgroup && adduser -S appuser -G appgroup
USER appuser
# Expose application port
EXPOSE 3000
# Run application
CMD ["npm", "start"]

10
frontend/babel.config.js Normal file
View File

@@ -0,0 +1,10 @@
module.exports = {
presets: [
['next/babel', {
'preset-react': {
runtime: 'automatic',
importSource: '@emotion/react'
}
}]
]
}

4942
frontend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

32
frontend/package.json Normal file
View File

@@ -0,0 +1,32 @@
{
"name": "aic-frontend",
"version": "1.0.0",
"private": true,
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start",
"lint": "next lint",
"test": "jest",
"test:watch": "jest --watch"
},
"dependencies": {
"next": "14.2.3",
"react": "18.2.0",
"react-dom": "18.2.0",
"recharts": "3.4.2"
},
"devDependencies": {
"@types/node": "20.11.5",
"@types/react": "18.2.60",
"@types/react-dom": "18.2.22",
"@testing-library/jest-dom": "6.4.2",
"@testing-library/react": "14.2.1",
"@testing-library/user-event": "14.5.2",
"jest": "29.7.0",
"jest-environment-jsdom": "29.7.0",
"eslint": "8.57.0",
"eslint-config-next": "14.2.3",
"typescript": "5.3.3"
}
}

1
frontend/setupTests.js Normal file
View File

@@ -0,0 +1 @@
import '@testing-library/jest-dom/extend-expect';

View File

@@ -0,0 +1,66 @@
import React, { Component } from 'react';
class ErrorBoundary extends Component {
constructor(props) {
super(props);
this.state = { hasError: false, error: null, errorInfo: null };
}
static getDerivedStateFromError(error) {
return { hasError: true };
}
componentDidCatch(error, errorInfo) {
this.setState({
error: error,
errorInfo: errorInfo
});
// Log error to analytics service in production
if (process.env.NODE_ENV === 'production') {
console.error('Error caught by boundary:', error, errorInfo);
}
}
render() {
if (this.state.hasError) {
return (
<div className="error-boundary bg-red-50 border border-red-200 rounded-lg p-6">
<div className="flex items-start">
<div className="flex-shrink-0">
<svg className="h-12 w-12 text-red-400" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z" />
</svg>
</div>
<div className="ml-4">
<h3 className="text-lg font-medium text-red-800">Something went wrong</h3>
<div className="mt-2 text-sm text-red-700">
<p>We're sorry - an unexpected error occurred. Please try refreshing the page.</p>
{process.env.NODE_ENV === 'development' && (
<details className="mt-3">
<summary className="font-medium cursor-pointer">Error details</summary>
<div className="mt-2 bg-red-100 p-2 rounded-md overflow-auto max-h-48">
<p className="font-mono text-xs">{this.state.error && this.state.error.toString()}</p>
<pre className="text-xs mt-2">{this.state.errorInfo?.componentStack}</pre>
</div>
</details>
)}
</div>
<div className="mt-4">
<button
onClick={() => window.location.reload()}
className="px-4 py-2 bg-red-600 text-white rounded-md text-sm font-medium hover:bg-red-700 transition-colors"
>
Reload Page
</button>
</div>
</div>
</div>
</div>
);
}
return this.props.children;
}
}
export default ErrorBoundary;

View File

@@ -0,0 +1,150 @@
import React, { useState, useCallback } from 'react';
const FileUpload = ({ onUpload, acceptedTypes = ['.gpx'] }) => {
const [isDragging, setIsDragging] = useState(false);
const [previewContent, setPreviewContent] = useState(null);
const [error, setError] = useState(null);
const [isLoading, setIsLoading] = useState(false);
const handleDragOver = (e) => {
e.preventDefault();
setIsDragging(true);
};
const handleDragLeave = () => {
setIsDragging(false);
};
const handleDrop = (e) => {
e.preventDefault();
setIsDragging(false);
const files = e.dataTransfer.files;
if (files.length > 0) {
handleFile(files[0]);
}
};
const handleFileInput = (e) => {
const files = e.target.files;
if (files.length > 0) {
handleFile(files[0]);
}
};
const handleFile = async (file) => {
setError(null);
// Validate file type
const fileExt = file.name.split('.').pop().toLowerCase();
if (!acceptedTypes.includes(`.${fileExt}`)) {
setError(`Invalid file type. Supported types: ${acceptedTypes.join(', ')}`);
return;
}
// Validate file size (max 10MB)
if (file.size > 10 * 1024 * 1024) {
setError('File size exceeds 10MB limit');
return;
}
try {
setIsLoading(true);
// Preview GPX content
if (fileExt === 'gpx') {
const content = await file.text();
setPreviewContent(content);
} else {
setPreviewContent(null);
}
// Pass file to parent component for upload
if (onUpload) {
onUpload(file);
}
} catch (err) {
console.error('File processing error:', err);
setError('Failed to process file');
} finally {
setIsLoading(false);
}
};
const clearPreview = () => {
setPreviewContent(null);
};
return (
<div className="file-upload">
<div
className={`border-2 border-dashed rounded-lg p-6 text-center cursor-pointer transition-colors ${
isDragging ? 'border-blue-500 bg-blue-50' : 'border-gray-300 hover:border-blue-400'
}`}
onDragOver={handleDragOver}
onDragLeave={handleDragLeave}
onDrop={handleDrop}
onClick={() => document.getElementById('file-input').click()}
>
<input
id="file-input"
type="file"
className="hidden"
accept={acceptedTypes.join(',')}
onChange={handleFileInput}
/>
<div className="flex flex-col items-center justify-center">
{isLoading ? (
<>
<svg className="animate-spin h-10 w-10 text-blue-500 mx-auto" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24">
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4"></circle>
<path className="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"></path>
</svg>
<p className="mt-2 text-gray-600">Processing file...</p>
</>
) : (
<>
<svg className="h-10 w-10 text-gray-400 mx-auto" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M7 16a4 4 0 01-.88-7.903A5 5 0 1115.9 6L16 6a5 5 0 011 9.9M15 13l-3-3m0 0l-3 3m3-3v12" />
</svg>
<p className="mt-2 text-sm text-gray-600">
<span className="font-medium text-blue-600">Click to upload</span> or drag and drop
</p>
<p className="text-xs text-gray-500 mt-1">
{acceptedTypes.join(', ')} files, max 10MB
</p>
</>
)}
</div>
</div>
{error && (
<div className="mt-2 p-2 bg-red-50 text-red-700 text-sm rounded-md">
{error}
</div>
)}
{previewContent && (
<div className="mt-4">
<div className="flex justify-between items-center mb-2">
<h3 className="font-medium text-gray-800">File Preview</h3>
<button
onClick={clearPreview}
className="text-sm text-gray-500 hover:text-gray-700"
>
Clear preview
</button>
</div>
<div className="bg-gray-50 p-3 rounded-md border border-gray-200 max-h-60 overflow-auto">
<pre className="text-xs text-gray-700 whitespace-pre-wrap">
{previewContent}
</pre>
</div>
</div>
)}
</div>
);
};
export default FileUpload;

View File

@@ -0,0 +1,121 @@
import { useState, useEffect } from 'react';
const GarminSync = () => {
const [syncStatus, setSyncStatus] = useState(null);
const [syncing, setSyncing] = useState(false);
const [error, setError] = useState(null);
const triggerSync = async () => {
setSyncing(true);
setError(null);
try {
const response = await fetch('/api/workouts/sync', {
method: 'POST',
headers: {
'X-API-Key': process.env.REACT_APP_API_KEY
}
});
if (!response.ok) {
throw new Error(`Sync failed: ${response.statusText}`);
}
// Start polling for status updates
pollSyncStatus();
} catch (err) {
console.error('Garmin sync failed:', err);
setError(err.message);
setSyncing(false);
}
};
const pollSyncStatus = () => {
const interval = setInterval(async () => {
try {
const response = await fetch('/api/workouts/sync-status');
const status = await response.json();
setSyncStatus(status);
// Stop polling when sync is no longer in progress
if (status.status !== 'in_progress') {
setSyncing(false);
clearInterval(interval);
}
} catch (err) {
console.error('Error fetching sync status:', err);
setError('Failed to get sync status');
setSyncing(false);
clearInterval(interval);
}
}, 2000);
};
return (
<div className="garmin-sync bg-gray-50 p-4 rounded-lg shadow">
<h3 className="text-lg font-medium text-gray-800 mb-3">Garmin Connect Sync</h3>
<button
onClick={triggerSync}
disabled={syncing}
className={`px-4 py-2 rounded-md font-medium ${
syncing ? 'bg-gray-400 cursor-not-allowed' : 'bg-blue-600 hover:bg-blue-700'
} text-white transition-colors`}
>
{syncing ? (
<span className="flex items-center">
<svg className="animate-spin -ml-1 mr-2 h-4 w-4 text-white" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24">
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4"></circle>
<path className="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"></path>
</svg>
Syncing...
</span>
) : 'Sync Recent Activities'}
</button>
{error && (
<div className="mt-3 p-2 bg-red-50 text-red-700 rounded-md">
Error: {error}
</div>
)}
{syncStatus && (
<div className="mt-4 p-3 bg-white rounded-md border border-gray-200">
<h4 className="font-medium text-gray-700 mb-2">Sync Status</h4>
<div className="grid grid-cols-2 gap-2 text-sm">
<div className="text-gray-600">Last sync:</div>
<div className="text-gray-800">
{syncStatus.last_sync_time
? new Date(syncStatus.last_sync_time).toLocaleString()
: 'Never'}
</div>
<div className="text-gray-600">Status:</div>
<div className={`font-medium ${
syncStatus.status === 'success' ? 'text-green-600' :
syncStatus.status === 'error' ? 'text-red-600' : 'text-blue-600'
}`}>
{syncStatus.status}
</div>
{syncStatus.activities_synced > 0 && (
<>
<div className="text-gray-600">Activities synced:</div>
<div className="text-gray-800">{syncStatus.activities_synced}</div>
</>
)}
{syncStatus.error_message && (
<>
<div className="text-gray-600">Error:</div>
<div className="text-red-600">{syncStatus.error_message}</div>
</>
)}
</div>
</div>
)}
</div>
);
};
export default GarminSync;

View File

@@ -0,0 +1,152 @@
import React, { useState } from 'react';
const PlanTimeline = ({ plan, versions }) => {
const [expandedWeeks, setExpandedWeeks] = useState({});
const toggleWeek = (weekNumber) => {
setExpandedWeeks(prev => ({
...prev,
[weekNumber]: !prev[weekNumber]
}));
};
return (
<div className="plan-timeline bg-white rounded-lg shadow-md p-5">
<div className="header flex justify-between items-center mb-6">
<div>
<h2 className="text-xl font-bold text-gray-800">{plan.name || 'Training Plan'}</h2>
<p className="text-gray-600">Version {plan.version} Created {new Date(plan.created_at).toLocaleDateString()}</p>
</div>
<div className="bg-blue-100 text-blue-800 px-3 py-1 rounded-full text-sm">
{plan.jsonb_plan.overview.focus.replace(/_/g, ' ')}
</div>
</div>
{versions.length > 1 && (
<div className="version-history mb-8">
<h3 className="text-lg font-medium text-gray-800 mb-3">Version History</h3>
<div className="overflow-x-auto">
<table className="min-w-full divide-y divide-gray-200">
<thead className="bg-gray-50">
<tr>
<th className="px-4 py-2 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">Version</th>
<th className="px-4 py-2 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">Created</th>
<th className="px-4 py-2 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">Trigger</th>
<th className="px-4 py-2 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">Changes</th>
</tr>
</thead>
<tbody className="bg-white divide-y divide-gray-200">
{versions.map(version => (
<tr key={version.id} className={version.id === plan.id ? 'bg-blue-50' : ''}>
<td className="px-4 py-3 whitespace-nowrap text-sm font-medium text-gray-900">
v{version.version}
</td>
<td className="px-4 py-3 whitespace-nowrap text-sm text-gray-500">
{new Date(version.created_at).toLocaleDateString()}
</td>
<td className="px-4 py-3 whitespace-nowrap text-sm text-gray-500 capitalize">
{version.evolution_trigger?.replace(/_/g, ' ') || 'initial'}
</td>
<td className="px-4 py-3 text-sm text-gray-500">
{version.changes_summary || 'Initial version'}
</td>
</tr>
))}
</tbody>
</table>
</div>
</div>
)}
<div className="plan-overview bg-gray-50 p-4 rounded-md mb-6">
<h3 className="text-lg font-medium text-gray-800 mb-2">Plan Overview</h3>
<div className="grid grid-cols-1 md:grid-cols-3 gap-4">
<div className="metric-card">
<span className="text-gray-500">Duration</span>
<span className="text-xl font-bold text-gray-800">
{plan.jsonb_plan.overview.duration_weeks} weeks
</span>
</div>
<div className="metric-card">
<span className="text-gray-500">Weekly Hours</span>
<span className="text-xl font-bold text-gray-800">
{plan.jsonb_plan.overview.total_weekly_hours} hours
</span>
</div>
<div className="metric-card">
<span className="text-gray-500">Focus</span>
<span className="text-xl font-bold text-gray-800 capitalize">
{plan.jsonb_plan.overview.focus.replace(/_/g, ' ')}
</span>
</div>
</div>
</div>
<div className="weekly-schedule">
<h3 className="text-lg font-medium text-gray-800 mb-4">Weekly Schedule</h3>
{plan.jsonb_plan.weeks.map((week, weekIndex) => (
<div key={weekIndex} className="week-card border border-gray-200 rounded-md mb-4 overflow-hidden">
<div
className="week-header bg-gray-100 p-3 flex justify-between items-center cursor-pointer hover:bg-gray-200"
onClick={() => toggleWeek(weekIndex)}
>
<h4 className="font-medium text-gray-800">Week {week.week_number} {week.focus.replace(/_/g, ' ')}</h4>
<div className="flex items-center">
<span className="text-sm text-gray-600 mr-2">
{week.total_hours} hours {week.workouts.length} workouts
</span>
<svg
className={`w-5 h-5 text-gray-500 transform transition-transform ${
expandedWeeks[weekIndex] ? 'rotate-180' : ''
}`}
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
>
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 9l-7 7-7-7" />
</svg>
</div>
</div>
{expandedWeeks[weekIndex] && (
<div className="workouts-list p-4 bg-white">
{week.workouts.map((workout, workoutIndex) => (
<div key={workoutIndex} className="workout-item border-b border-gray-100 py-3 last:border-0">
<div className="flex justify-between">
<div>
<span className="font-medium text-gray-800 capitalize">{workout.type.replace(/_/g, ' ')}</span>
<span className="text-gray-600 ml-2"> {workout.day}</span>
</div>
<span className="text-gray-600">{workout.duration_minutes} min</span>
</div>
<div className="mt-1 flex flex-wrap gap-2">
<span className="px-2 py-1 bg-blue-100 text-blue-800 text-xs rounded-full capitalize">
{workout.intensity.replace(/_/g, ' ')}
</span>
{workout.route_id && (
<span className="px-2 py-1 bg-green-100 text-green-800 text-xs rounded-full">
Route: {workout.route_name || workout.route_id}
</span>
)}
<span className="px-2 py-1 bg-purple-100 text-purple-800 text-xs rounded-full">
TSS: {workout.tss_target || 'N/A'}
</span>
</div>
{workout.description && (
<p className="mt-2 text-gray-700 text-sm">{workout.description}</p>
)}
</div>
))}
</div>
)}
</div>
))}
</div>
</div>
);
};
export default PlanTimeline;

View File

@@ -0,0 +1,162 @@
import { useState } from 'react';
import { useNavigate } from 'react-router-dom';
const WorkoutAnalysis = ({ workout, analysis }) => {
const [approving, setApproving] = useState(false);
const [error, setError] = useState(null);
const navigate = useNavigate();
const approveAnalysis = async () => {
setApproving(true);
setError(null);
try {
const response = await fetch(`/api/analyses/${analysis.id}/approve`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-API-Key': process.env.REACT_APP_API_KEY
}
});
if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.message || 'Approval failed');
}
const result = await response.json();
if (result.new_plan_id) {
// Navigate to the new plan
navigate(`/plans/${result.new_plan_id}`);
} else {
// Show success message
setApproving(false);
alert('Analysis approved successfully!');
}
} catch (err) {
console.error('Approval failed:', err);
setError(err.message);
setApproving(false);
}
};
return (
<div className="workout-analysis bg-white rounded-lg shadow-md p-5">
<div className="workout-summary border-b border-gray-200 pb-4 mb-4">
<h3 className="text-xl font-semibold text-gray-800">
{workout.activity_type || 'Cycling'} - {new Date(workout.start_time).toLocaleDateString()}
</h3>
<div className="grid grid-cols-2 md:grid-cols-4 gap-3 mt-3 text-sm">
<div className="metric-card">
<span className="text-gray-500">Duration</span>
<span className="font-medium">
{Math.round(workout.duration_seconds / 60)} min
</span>
</div>
<div className="metric-card">
<span className="text-gray-500">Distance</span>
<span className="font-medium">
{(workout.distance_m / 1000).toFixed(1)} km
</span>
</div>
{workout.avg_power && (
<div className="metric-card">
<span className="text-gray-500">Avg Power</span>
<span className="font-medium">
{Math.round(workout.avg_power)}W
</span>
</div>
)}
{workout.avg_hr && (
<div className="metric-card">
<span className="text-gray-500">Avg HR</span>
<span className="font-medium">
{Math.round(workout.avg_hr)} bpm
</span>
</div>
)}
</div>
</div>
{analysis && (
<div className="analysis-content">
<h4 className="text-lg font-medium text-gray-800 mb-3">AI Analysis</h4>
<div className="feedback-box bg-blue-50 p-4 rounded-md mb-5">
<p className="text-gray-700">{analysis.jsonb_feedback.summary}</p>
</div>
<div className="strengths-improvement grid grid-cols-1 md:grid-cols-2 gap-4 mb-6">
<div className="strengths">
<h5 className="font-medium text-green-700 mb-2">Strengths</h5>
<ul className="list-disc pl-5 space-y-1">
{analysis.jsonb_feedback.strengths.map((strength, index) => (
<li key={index} className="text-gray-700">{strength}</li>
))}
</ul>
</div>
<div className="improvements">
<h5 className="font-medium text-orange-600 mb-2">Areas for Improvement</h5>
<ul className="list-disc pl-5 space-y-1">
{analysis.jsonb_feedback.areas_for_improvement.map((area, index) => (
<li key={index} className="text-gray-700">{area}</li>
))}
</ul>
</div>
</div>
{analysis.suggestions && analysis.suggestions.length > 0 && (
<div className="suggestions bg-yellow-50 p-4 rounded-md mb-5">
<h5 className="font-medium text-gray-800 mb-3">Training Suggestions</h5>
<ul className="space-y-2">
{analysis.suggestions.map((suggestion, index) => (
<li key={index} className="flex items-start">
<span className="inline-block w-6 h-6 bg-yellow-100 text-yellow-800 rounded-full text-center mr-2 flex-shrink-0">
{index + 1}
</span>
<span className="text-gray-700">{suggestion}</span>
</li>
))}
</ul>
{!analysis.approved && (
<div className="mt-4">
<button
onClick={approveAnalysis}
disabled={approving}
className={`px-4 py-2 rounded-md font-medium ${
approving ? 'bg-gray-400 cursor-not-allowed' : 'bg-green-600 hover:bg-green-700'
} text-white transition-colors flex items-center`}
>
{approving ? (
<>
<svg className="animate-spin -ml-1 mr-2 h-4 w-4 text-white" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24">
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4"></circle>
<path className="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"></path>
</svg>
Applying suggestions...
</>
) : 'Approve & Update Training Plan'}
</button>
{error && (
<div className="mt-2 text-red-600 text-sm">
Error: {error}
</div>
)}
</div>
)}
</div>
)}
</div>
)}
</div>
);
};
export default WorkoutAnalysis;

View File

@@ -0,0 +1,98 @@
import React from 'react';
import {
LineChart, Line, XAxis, YAxis, CartesianGrid,
Tooltip, Legend, ResponsiveContainer
} from 'recharts';
const WorkoutCharts = ({ timeSeries }) => {
// Transform timestamp to minutes from start for X-axis
const formatTimeSeries = (data) => {
if (!data || data.length === 0) return [];
const startTime = new Date(data[0].timestamp);
return data.map(point => ({
...point,
time: (new Date(point.timestamp) - startTime) / 60000, // Convert to minutes
heart_rate: point.heart_rate || null,
power: point.power || null,
cadence: point.cadence || null
}));
};
const formattedData = formatTimeSeries(timeSeries);
return (
<div className="workout-charts bg-white p-4 rounded-lg shadow-md">
<h3 className="text-lg font-medium text-gray-800 mb-4">Workout Metrics</h3>
<ResponsiveContainer width="100%" height={300}>
<LineChart
data={formattedData}
margin={{ top: 5, right: 30, left: 20, bottom: 5 }}
>
<CartesianGrid strokeDasharray="3 3" stroke="#f0f0f0" />
<XAxis
dataKey="time"
label={{
value: 'Time (minutes)',
position: 'insideBottomRight',
offset: -5
}}
domain={['dataMin', 'dataMax']}
tickCount={6}
/>
<YAxis yAxisId="left" orientation="left" stroke="#8884d8">
<Label value="HR (bpm) / Cadence (rpm)" angle={-90} position="insideLeft" />
</YAxis>
<YAxis yAxisId="right" orientation="right" stroke="#82ca9d">
<Label value="Power (W)" angle={90} position="insideRight" />
</YAxis>
<Tooltip
formatter={(value, name) => [`${value} ${name === 'power' ? 'W' : name === 'heart_rate' ? 'bpm' : 'rpm'}`, name]}
labelFormatter={(value) => `Time: ${value.toFixed(1)} min`}
/>
<Legend />
<Line
yAxisId="left"
type="monotone"
dataKey="heart_rate"
name="Heart Rate"
stroke="#8884d8"
strokeWidth={2}
dot={false}
activeDot={{ r: 6 }}
isAnimationActive={false}
/>
<Line
yAxisId="right"
type="monotone"
dataKey="power"
name="Power"
stroke="#82ca9d"
strokeWidth={2}
dot={false}
activeDot={{ r: 6 }}
isAnimationActive={false}
/>
<Line
yAxisId="left"
type="monotone"
dataKey="cadence"
name="Cadence"
stroke="#ffc658"
strokeWidth={2}
dot={false}
activeDot={{ r: 6 }}
isAnimationActive={false}
/>
</LineChart>
</ResponsiveContainer>
<div className="mt-4 text-sm text-gray-500">
<p>Note: Charts show metrics over time during the workout. Hover over points to see exact values.</p>
</div>
</div>
);
};
export default WorkoutCharts;

View File

@@ -0,0 +1,34 @@
import { render, screen, fireEvent } from '@testing-library/react'
import FileUpload from '../FileUpload'
describe('FileUpload Component', () => {
test('renders upload button', () => {
render(<FileUpload onUpload={() => {}} />)
expect(screen.getByText('Upload GPX File')).toBeInTheDocument()
expect(screen.getByTestId('file-input')).toBeInTheDocument()
})
test('handles file selection', () => {
const mockFile = new File(['test content'], 'test.gpx', { type: 'application/gpx+xml' })
const mockOnUpload = jest.fn()
render(<FileUpload onUpload={mockOnUpload} />)
const input = screen.getByTestId('file-input')
fireEvent.change(input, { target: { files: [mockFile] } })
expect(mockOnUpload).toHaveBeenCalledWith(mockFile)
expect(screen.getByText('Selected file: test.gpx')).toBeInTheDocument()
})
test('shows error for invalid file type', () => {
const invalidFile = new File(['test'], 'test.txt', { type: 'text/plain' })
const { container } = render(<FileUpload onUpload={() => {}} />)
const input = screen.getByTestId('file-input')
fireEvent.change(input, { target: { files: [invalidFile] } })
expect(screen.getByText('Invalid file type. Please upload a GPX file.')).toBeInTheDocument()
expect(container.querySelector('.error-message')).toBeVisible()
})
})

View File

@@ -0,0 +1,253 @@
import React, { useState, useEffect } from 'react';
import GarminSync from '../components/GarminSync';
import WorkoutCharts from '../components/WorkoutCharts';
import PlanTimeline from '../components/PlanTimeline';
import WorkoutAnalysis from '../components/WorkoutAnalysis';
const Dashboard = () => {
const [loading, setLoading] = useState(true);
const [error, setError] = useState(null);
const [dashboardData, setDashboardData] = useState({
recentWorkouts: [],
upcomingWorkouts: [],
currentPlan: null,
planVersions: [],
lastAnalysis: null,
syncStatus: null,
metrics: {}
});
useEffect(() => {
const fetchDashboardData = async () => {
try {
setLoading(true);
const response = await fetch('/api/dashboard', {
headers: {
'X-API-Key': process.env.REACT_APP_API_KEY
}
});
if (!response.ok) {
throw new Error(`Failed to load dashboard: ${response.statusText}`);
}
const data = await response.json();
setDashboardData(data);
setError(null);
} catch (err) {
console.error('Dashboard load error:', err);
setError(err.message);
} finally {
setLoading(false);
}
};
fetchDashboardData();
}, []);
const handleSyncComplete = (newSyncStatus) => {
setDashboardData(prev => ({
...prev,
syncStatus: newSyncStatus
}));
};
if (loading) {
return (
<div className="min-h-screen flex items-center justify-center bg-gray-50">
<div className="text-center">
<div className="animate-spin rounded-full h-12 w-12 border-t-2 border-b-2 border-blue-500 mx-auto"></div>
<p className="mt-4 text-gray-600">Loading your training dashboard...</p>
</div>
</div>
);
}
if (error) {
return (
<div className="min-h-screen flex items-center justify-center bg-gray-50">
<div className="max-w-md p-6 bg-white rounded-lg shadow-md">
<div className="text-red-500 text-center mb-4">
<svg xmlns="http://www.w3.org/2000/svg" className="h-12 w-12 mx-auto" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z" />
</svg>
</div>
<h2 className="text-xl font-bold text-gray-800 mb-2">Dashboard Error</h2>
<p className="text-gray-600 mb-4">{error}</p>
<button
onClick={() => window.location.reload()}
className="px-4 py-2 bg-blue-600 text-white rounded-md hover:bg-blue-700 transition-colors"
>
Try Again
</button>
</div>
</div>
);
}
return (
<div className="dashboard bg-gray-50 min-h-screen p-4 md:p-6">
<div className="max-w-7xl mx-auto">
<div className="mb-6">
<h1 className="text-2xl md:text-3xl font-bold text-gray-900">Training Dashboard</h1>
<p className="text-gray-600">Your personalized cycling training overview</p>
</div>
{/* Stats Overview */}
<div className="grid grid-cols-1 md:grid-cols-4 gap-4 mb-6">
<div className="bg-white p-4 rounded-lg shadow">
<h3 className="text-gray-500 text-sm font-medium">Weekly Hours</h3>
<p className="text-2xl font-bold text-gray-900">
{dashboardData.metrics.weekly_hours || '0'}h
</p>
</div>
<div className="bg-white p-4 rounded-lg shadow">
<h3 className="text-gray-500 text-sm font-medium">Workouts This Week</h3>
<p className="text-2xl font-bold text-gray-900">
{dashboardData.metrics.workouts_this_week || '0'}
</p>
</div>
<div className="bg-white p-4 rounded-lg shadow">
<h3 className="text-gray-500 text-sm font-medium">Plan Progress</h3>
<p className="text-2xl font-bold text-gray-900">
{dashboardData.metrics.plan_progress || '0'}%
</p>
</div>
<div className="bg-white p-4 rounded-lg shadow">
<h3 className="text-gray-500 text-sm font-medium">Fitness Level</h3>
<p className="text-2xl font-bold text-gray-900 capitalize">
{dashboardData.metrics.fitness_level || 'N/A'}
</p>
</div>
</div>
<div className="grid grid-cols-1 lg:grid-cols-3 gap-6">
{/* Left Column */}
<div className="lg:col-span-2 space-y-6">
{/* Garmin Sync */}
<div className="bg-white rounded-lg shadow-md p-5">
<GarminSync onSyncComplete={handleSyncComplete} />
</div>
{/* Current Plan */}
{dashboardData.currentPlan && (
<div className="bg-white rounded-lg shadow-md p-5">
<h2 className="text-xl font-bold text-gray-800 mb-4">Current Training Plan</h2>
<PlanTimeline
plan={dashboardData.currentPlan}
versions={dashboardData.planVersions}
/>
</div>
)}
{/* Recent Analysis */}
{dashboardData.lastAnalysis && (
<div className="bg-white rounded-lg shadow-md p-5">
<h2 className="text-xl font-bold text-gray-800 mb-4">Latest Workout Analysis</h2>
<WorkoutAnalysis
workout={dashboardData.lastAnalysis.workout}
analysis={dashboardData.lastAnalysis}
/>
</div>
)}
</div>
{/* Right Column */}
<div className="space-y-6">
{/* Upcoming Workouts */}
<div className="bg-white rounded-lg shadow-md p-5">
<h2 className="text-xl font-bold text-gray-800 mb-4">Upcoming Workouts</h2>
{dashboardData.upcomingWorkouts.length > 0 ? (
<div className="space-y-3">
{dashboardData.upcomingWorkouts.map(workout => (
<div key={workout.id} className="border-b border-gray-100 pb-3 last:border-0">
<div className="flex justify-between items-start">
<div>
<h3 className="font-medium text-gray-800 capitalize">
{workout.type.replace(/_/g, ' ')}
</h3>
<p className="text-sm text-gray-600">
{new Date(workout.scheduled_date).toLocaleDateString()} {workout.duration_minutes} min
</p>
</div>
<span className="px-2 py-1 bg-blue-100 text-blue-800 text-xs rounded-full capitalize">
{workout.intensity.replace(/_/g, ' ')}
</span>
</div>
{workout.description && (
<p className="mt-1 text-sm text-gray-700">{workout.description}</p>
)}
</div>
))}
</div>
) : (
<p className="text-gray-500 italic">No upcoming workouts scheduled</p>
)}
</div>
{/* Recent Workouts */}
<div className="bg-white rounded-lg shadow-md p-5">
<h2 className="text-xl font-bold text-gray-800 mb-4">Recent Workouts</h2>
{dashboardData.recentWorkouts.length > 0 ? (
<div className="space-y-3">
{dashboardData.recentWorkouts.map(workout => (
<div key={workout.id} className="border-b border-gray-100 pb-3 last:border-0">
<div className="flex justify-between">
<div>
<h3 className="font-medium text-gray-800 capitalize">
{workout.activity_type || 'Cycling'}
</h3>
<p className="text-sm text-gray-600">
{new Date(workout.start_time).toLocaleDateString()} {Math.round(workout.duration_seconds / 60)} min
</p>
</div>
<div className="text-right">
<span className="block text-sm font-medium">
{workout.distance_m ? `${(workout.distance_m / 1000).toFixed(1)} km` : ''}
</span>
{workout.analysis && workout.analysis.performance_score && (
<span className="text-xs px-2 py-0.5 bg-green-100 text-green-800 rounded-full">
Score: {workout.analysis.performance_score}/10
</span>
)}
</div>
</div>
{workout.analysis && workout.analysis.performance_summary && (
<p className="mt-1 text-sm text-gray-700 line-clamp-2">
{workout.analysis.performance_summary}
</p>
)}
</div>
))}
</div>
) : (
<p className="text-gray-500 italic">No recent workouts recorded</p>
)}
</div>
{/* Quick Actions */}
<div className="bg-white rounded-lg shadow-md p-5">
<h2 className="text-xl font-bold text-gray-800 mb-4">Quick Actions</h2>
<div className="grid grid-cols-2 gap-3">
<button className="px-3 py-2 bg-blue-600 text-white rounded-md text-sm font-medium hover:bg-blue-700 transition-colors">
Generate New Plan
</button>
<button className="px-3 py-2 bg-green-600 text-white rounded-md text-sm font-medium hover:bg-green-700 transition-colors">
Add Custom Workout
</button>
<button className="px-3 py-2 bg-purple-600 text-white rounded-md text-sm font-medium hover:bg-purple-700 transition-colors">
View All Routes
</button>
<button className="px-3 py-2 bg-yellow-600 text-white rounded-md text-sm font-medium hover:bg-yellow-700 transition-colors">
Update Rules
</button>
</div>
</div>
</div>
</div>
</div>
</div>
);
};
export default Dashboard;

View File

@@ -0,0 +1,49 @@
import { useEffect, useState } from 'react';
export default function Home() {
const [healthStatus, setHealthStatus] = useState<string>('checking...');
useEffect(() => {
const checkBackendHealth = async () => {
try {
const response = await fetch('http://backend:8000/health');
const data = await response.json();
setHealthStatus(data.status);
} catch (error) {
setHealthStatus('unavailable');
console.error('Error checking backend health:', error);
}
};
checkBackendHealth();
}, []);
return (
<div className="min-h-screen flex flex-col items-center justify-center bg-gray-50">
<div className="max-w-2xl w-full p-8 bg-white rounded-lg shadow-md">
<h1 className="text-3xl font-bold text-center text-gray-800 mb-6">
Welcome to AI Cycling Coach
</h1>
<p className="text-lg text-gray-600 mb-8 text-center">
Your AI-powered training companion for cyclists
</p>
<div className="bg-blue-50 p-4 rounded-lg border border-blue-200">
<h2 className="text-lg font-semibold text-blue-800 mb-2">
System Status
</h2>
<div className="flex items-center">
<div className={`h-3 w-3 rounded-full mr-2 ${healthStatus === 'healthy' ? 'bg-green-500' : 'bg-red-500'}`}></div>
<span className="text-gray-700">
Backend service: {healthStatus}
</span>
</div>
</div>
<p className="mt-8 text-center text-gray-500">
Development in progress - more features coming soon!
</p>
</div>
</div>
);
}