diff --git a/CL_plan.md b/CL_plan.md
index 722badd..c6cc08b 100644
--- a/CL_plan.md
+++ b/CL_plan.md
@@ -1,822 +1,64 @@
-# AI-Assisted Cycling Coach - Updated Implementation Plan
-
-## Overview
-This document outlines the implementation plan for a single-user, self-hosted AI-assisted cycling coach application with Python backend, PostgreSQL database, GPX file storage, and web frontend.
-
-## Architecture Components
-- **Backend**: Python FastAPI (async)
-- **Database**: PostgreSQL with versioning support
-- **File Storage**: Local directory for GPX files (up to 200 files)
-- **Frontend**: React/Next.js
-- **AI Integration**: OpenRouter API
-- **Garmin Integration**: garth or garmin-connect Python modules
-- **Authentication**: Simple API key for single-user setup
-- **Containerization**: Docker + Docker Compose (self-hosted)
-
-## Implementation Phases
-
-### Phase 1: Project Setup and Foundation ✅ (Week 1-2)
-**Status: Complete**
-
-1. **Initialize Project Structure**
- ```
- /
- ├── backend/
- │ ├── app/
- │ │ ├── __init__.py
- │ │ ├── main.py
- │ │ ├── models/
- │ │ ├── routes/
- │ │ ├── services/
- │ │ └── utils/
- │ ├── requirements.txt
- │ └── Dockerfile
- ├── frontend/
- │ ├── src/
- │ ├── public/
- │ ├── package.json
- │ └── Dockerfile
- ├── docker-compose.yml
- ├── .env.example
- └── README.md
- ```
-
-2. **Docker Environment Setup**
- ```yaml
- version: '3.9'
- services:
- backend:
- build: ./backend
- ports:
- - "8000:8000"
- volumes:
- - gpx-data:/app/data/gpx
- - garmin-sessions:/app/data/sessions
- environment:
- - DATABASE_URL=postgresql://postgres:password@db:5432/cycling
- - GARMIN_USERNAME=${GARMIN_USERNAME}
- - GARMIN_PASSWORD=${GARMIN_PASSWORD}
- - OPENROUTER_API_KEY=${OPENROUTER_API_KEY}
- - AI_MODEL=${AI_MODEL:-claude-3-sonnet-20240229}
- - API_KEY=${API_KEY}
- depends_on:
- - db
-
- frontend:
- build: ./frontend
- ports:
- - "3000:3000"
- environment:
- - REACT_APP_API_URL=http://localhost:8000
- - REACT_APP_API_KEY=${API_KEY}
-
- db:
- image: postgres:15
- restart: always
- environment:
- POSTGRES_USER: postgres
- POSTGRES_PASSWORD: password
- POSTGRES_DB: cycling
- volumes:
- - postgres-data:/var/lib/postgresql/data
-
- volumes:
- gpx-data:
- driver: local
- garmin-sessions:
- driver: local
- postgres-data:
- driver: local
- ```
-
-3. **Database Schema with Enhanced Versioning**
- ```sql
- -- Routes & Sections
- CREATE TABLE routes (
- id SERIAL PRIMARY KEY,
- name TEXT NOT NULL,
- created_at TIMESTAMP DEFAULT now()
- );
-
- CREATE TABLE sections (
- id SERIAL PRIMARY KEY,
- route_id INT REFERENCES routes(id),
- gpx_file_path TEXT NOT NULL,
- distance_m NUMERIC,
- grade_avg NUMERIC,
- min_gear TEXT,
- est_time_minutes NUMERIC,
- created_at TIMESTAMP DEFAULT now()
- );
-
- -- Rules with versioning and evolution tracking
- CREATE TABLE rules (
- id SERIAL PRIMARY KEY,
- name TEXT NOT NULL,
- user_defined BOOLEAN DEFAULT true,
- jsonb_rules JSONB NOT NULL,
- version INT DEFAULT 1,
- parent_rule_id INT REFERENCES rules(id),
- created_at TIMESTAMP DEFAULT now()
- );
-
- -- Plans with versioning and evolution tracking
- CREATE TABLE plans (
- id SERIAL PRIMARY KEY,
- jsonb_plan JSONB NOT NULL,
- version INT NOT NULL,
- parent_plan_id INT REFERENCES plans(id),
- created_at TIMESTAMP DEFAULT now()
- );
-
- -- Workouts with Garmin integration
- CREATE TABLE workouts (
- id SERIAL PRIMARY KEY,
- plan_id INT REFERENCES plans(id),
- garmin_activity_id TEXT UNIQUE NOT NULL,
- activity_type TEXT,
- start_time TIMESTAMP,
- duration_seconds INT,
- distance_m NUMERIC,
- avg_hr INT,
- max_hr INT,
- avg_power NUMERIC,
- max_power NUMERIC,
- avg_cadence NUMERIC,
- elevation_gain_m NUMERIC,
- metrics JSONB, -- Additional Garmin data
- created_at TIMESTAMP DEFAULT now()
- );
-
- -- Analyses with enhanced feedback structure
- CREATE TABLE analyses (
- id SERIAL PRIMARY KEY,
- workout_id INT REFERENCES workouts(id),
- analysis_type TEXT DEFAULT 'workout_review',
- jsonb_feedback JSONB,
- suggestions JSONB,
- approved BOOLEAN DEFAULT false,
- created_at TIMESTAMP DEFAULT now()
- );
-
- -- AI Prompts with versioning
- CREATE TABLE prompts (
- id SERIAL PRIMARY KEY,
- action_type TEXT, -- plan_generation, workout_analysis, rule_parsing, suggestions
- model TEXT,
- prompt_text TEXT,
- version INT DEFAULT 1,
- active BOOLEAN DEFAULT true,
- created_at TIMESTAMP DEFAULT now()
- );
-
- -- Garmin sync status tracking
- CREATE TABLE garmin_sync_log (
- id SERIAL PRIMARY KEY,
- last_sync_time TIMESTAMP,
- activities_synced INT DEFAULT 0,
- status TEXT, -- success, error, in_progress
- error_message TEXT,
- created_at TIMESTAMP DEFAULT now()
- );
- ```
-
-### Phase 2: Core Backend Implementation ✅ (Week 3-5)
-**Status: Complete**
-
-1. **Database Models with SQLAlchemy**
-2. **Basic API Endpoints**
-3. **GPX File Handling**
-4. **Basic Authentication Middleware**
-
-### Phase 3: Enhanced Backend + Garmin Integration (Week 6-8)
-
-#### Week 6: Garmin Integration
-1. **Garmin Service Implementation**
- ```python
- # backend/app/services/garmin.py
- import os
- import garth
- from typing import List, Dict, Any, Optional
- from datetime import datetime, timedelta
-
- class GarminService:
- def __init__(self):
- self.username = os.getenv("GARMIN_USERNAME")
- self.password = os.getenv("GARMIN_PASSWORD")
- self.client: Optional[garth.Client] = None
- self.session_dir = "/app/data/sessions"
-
- async def authenticate(self):
- """Authenticate with Garmin Connect and persist session."""
- if not self.client:
- self.client = garth.Client()
-
- try:
- # Try to load existing session
- self.client.load(self.session_dir)
- except Exception:
- # Fresh authentication
- await self.client.login(self.username, self.password)
- self.client.save(self.session_dir)
-
- async def get_activities(self, limit: int = 10, start_date: datetime = None) -> List[Dict[str, Any]]:
- """Fetch recent activities from Garmin Connect."""
- if not self.client:
- await self.authenticate()
-
- if not start_date:
- start_date = datetime.now() - timedelta(days=7)
-
- activities = self.client.get_activities(limit=limit, start=start_date)
- return activities
-
- async def get_activity_details(self, activity_id: str) -> Dict[str, Any]:
- """Get detailed activity data including metrics."""
- if not self.client:
- await self.authenticate()
-
- details = self.client.get_activity(activity_id)
- return details
- ```
-
-2. **Workout Sync Service**
- ```python
- # backend/app/services/workout_sync.py
- from sqlalchemy.ext.asyncio import AsyncSession
- from app.services.garmin import GarminService
- from app.models.workout import Workout
- from app.models.garmin_sync_log import GarminSyncLog
-
- class WorkoutSyncService:
- def __init__(self, db: AsyncSession):
- self.db = db
- self.garmin_service = GarminService()
-
- async def sync_recent_activities(self, days_back: int = 7):
- """Sync recent Garmin activities to database."""
- try:
- sync_log = GarminSyncLog(status="in_progress")
- self.db.add(sync_log)
- await self.db.commit()
-
- start_date = datetime.now() - timedelta(days=days_back)
- activities = await self.garmin_service.get_activities(
- limit=50, start_date=start_date
- )
-
- synced_count = 0
- for activity in activities:
- if await self.activity_exists(activity['activityId']):
- continue
-
- workout_data = await self.parse_activity_data(activity)
- workout = Workout(**workout_data)
- self.db.add(workout)
- synced_count += 1
-
- sync_log.status = "success"
- sync_log.activities_synced = synced_count
- sync_log.last_sync_time = datetime.now()
-
- await self.db.commit()
- return synced_count
-
- except Exception as e:
- sync_log.status = "error"
- sync_log.error_message = str(e)
- await self.db.commit()
- raise
-
- async def activity_exists(self, garmin_activity_id: str) -> bool:
- """Check if activity already exists in database."""
- result = await self.db.execute(
- select(Workout).where(Workout.garmin_activity_id == garmin_activity_id)
- )
- return result.scalar_one_or_none() is not None
-
- async def parse_activity_data(self, activity: Dict[str, Any]) -> Dict[str, Any]:
- """Parse Garmin activity data into workout model format."""
- return {
- "garmin_activity_id": activity['activityId'],
- "activity_type": activity.get('activityType', {}).get('typeKey'),
- "start_time": datetime.fromisoformat(activity['startTimeLocal'].replace('Z', '+00:00')),
- "duration_seconds": activity.get('duration'),
- "distance_m": activity.get('distance'),
- "avg_hr": activity.get('averageHR'),
- "max_hr": activity.get('maxHR'),
- "avg_power": activity.get('avgPower'),
- "max_power": activity.get('maxPower'),
- "avg_cadence": activity.get('averageBikingCadenceInRevPerMinute'),
- "elevation_gain_m": activity.get('elevationGain'),
- "metrics": activity # Store full Garmin data as JSONB
- }
- ```
-
-3. **Background Tasks Setup**
- ```python
- # backend/app/main.py
- from fastapi import BackgroundTasks
- from app.services.workout_sync import WorkoutSyncService
-
- @app.post("/api/workouts/sync")
- async def trigger_garmin_sync(
- background_tasks: BackgroundTasks,
- db: AsyncSession = Depends(get_db)
- ):
- """Trigger background sync of recent Garmin activities."""
- sync_service = WorkoutSyncService(db)
- background_tasks.add_task(sync_service.sync_recent_activities, days_back=14)
- return {"message": "Garmin sync started"}
-
- @app.get("/api/workouts/sync-status")
- async def get_sync_status(db: AsyncSession = Depends(get_db)):
- """Get the latest sync status."""
- result = await db.execute(
- select(GarminSyncLog).order_by(GarminSyncLog.created_at.desc()).limit(1)
- )
- sync_log = result.scalar_one_or_none()
- return sync_log
- ```
-
-#### Week 7: Enhanced AI Integration
-1. **Prompt Management System**
- ```python
- # backend/app/services/prompt_manager.py
- class PromptManager:
- def __init__(self, db: AsyncSession):
- self.db = db
-
- async def get_active_prompt(self, action_type: str, model: str = None) -> Optional[str]:
- """Get the active prompt for a specific action type."""
- query = select(Prompt).where(
- Prompt.action_type == action_type,
- Prompt.active == True
- )
- if model:
- query = query.where(Prompt.model == model)
-
- result = await self.db.execute(query.order_by(Prompt.version.desc()))
- prompt = result.scalar_one_or_none()
- return prompt.prompt_text if prompt else None
-
- async def create_prompt_version(
- self,
- action_type: str,
- prompt_text: str,
- model: str = None
- ) -> Prompt:
- """Create a new version of a prompt."""
- # Deactivate previous versions
- await self.db.execute(
- update(Prompt)
- .where(Prompt.action_type == action_type)
- .values(active=False)
- )
-
- # Get next version number
- result = await self.db.execute(
- select(func.max(Prompt.version))
- .where(Prompt.action_type == action_type)
- )
- max_version = result.scalar() or 0
-
- # Create new prompt
- new_prompt = Prompt(
- action_type=action_type,
- model=model,
- prompt_text=prompt_text,
- version=max_version + 1,
- active=True
- )
-
- self.db.add(new_prompt)
- await self.db.commit()
- return new_prompt
- ```
-
-2. **Enhanced AI Service**
- ```python
- # backend/app/services/ai_service.py
- import asyncio
- from typing import Dict, Any, List
- import httpx
- from app.services.prompt_manager import PromptManager
-
- class AIService:
- def __init__(self, db: AsyncSession):
- self.db = db
- self.prompt_manager = PromptManager(db)
- self.api_key = os.getenv("OPENROUTER_API_KEY")
- self.model = os.getenv("AI_MODEL", "anthropic/claude-3-sonnet-20240229")
- self.base_url = "https://openrouter.ai/api/v1"
-
- async def analyze_workout(self, workout: Workout, plan: Optional[Dict] = None) -> Dict[str, Any]:
- """Analyze a workout using AI and generate feedback."""
- prompt_template = await self.prompt_manager.get_active_prompt("workout_analysis")
-
- if not prompt_template:
- raise ValueError("No active workout analysis prompt found")
-
- # Build context from workout data
- workout_context = {
- "activity_type": workout.activity_type,
- "duration_minutes": workout.duration_seconds / 60 if workout.duration_seconds else 0,
- "distance_km": workout.distance_m / 1000 if workout.distance_m else 0,
- "avg_hr": workout.avg_hr,
- "avg_power": workout.avg_power,
- "elevation_gain": workout.elevation_gain_m,
- "planned_workout": plan
- }
-
- prompt = prompt_template.format(**workout_context)
-
- response = await self._make_ai_request(prompt)
- return self._parse_workout_analysis(response)
-
- async def generate_plan(self, rules: List[Dict], goals: Dict[str, Any]) -> Dict[str, Any]:
- """Generate a training plan using AI."""
- prompt_template = await self.prompt_manager.get_active_prompt("plan_generation")
-
- context = {
- "rules": rules,
- "goals": goals,
- "current_fitness_level": goals.get("fitness_level", "intermediate")
- }
-
- prompt = prompt_template.format(**context)
- response = await self._make_ai_request(prompt)
- return self._parse_plan_response(response)
-
- async def parse_rules_from_natural_language(self, natural_language: str) -> Dict[str, Any]:
- """Parse natural language rules into structured format."""
- prompt_template = await self.prompt_manager.get_active_prompt("rule_parsing")
- prompt = prompt_template.format(user_rules=natural_language)
-
- response = await self._make_ai_request(prompt)
- return self._parse_rules_response(response)
-
- async def _make_ai_request(self, prompt: str) -> str:
- """Make async request to OpenRouter API with retry logic."""
- async with httpx.AsyncClient() as client:
- for attempt in range(3): # Simple retry logic
- try:
- response = await client.post(
- f"{self.base_url}/chat/completions",
- headers={
- "Authorization": f"Bearer {self.api_key}",
- "Content-Type": "application/json",
- },
- json={
- "model": self.model,
- "messages": [{"role": "user", "content": prompt}],
- "max_tokens": 2000,
- },
- timeout=30.0
- )
- response.raise_for_status()
- data = response.json()
- return data["choices"][0]["message"]["content"]
-
- except Exception as e:
- if attempt == 2: # Last attempt
- raise AIServiceError(f"AI request failed after 3 attempts: {str(e)}")
- await asyncio.sleep(2 ** attempt) # Exponential backoff
-
- def _parse_workout_analysis(self, response: str) -> Dict[str, Any]:
- """Parse AI response for workout analysis."""
- # Implementation depends on your prompt design
- # This is a simplified example
- try:
- import json
- # Assume AI returns JSON
- clean_response = response.strip()
- if clean_response.startswith("```json"):
- clean_response = clean_response[7:-3]
- return json.loads(clean_response)
- except json.JSONDecodeError:
- return {"raw_analysis": response, "structured": False}
- ```
-
-#### Week 8: Plan Evolution & Analysis Pipeline
-1. **Plan Evolution Service**
- ```python
- # backend/app/services/plan_evolution.py
- class PlanEvolutionService:
- def __init__(self, db: AsyncSession):
- self.db = db
- self.ai_service = AIService(db)
-
- async def evolve_plan_from_analysis(
- self,
- analysis: Analysis,
- current_plan: Plan
- ) -> Optional[Plan]:
- """Create a new plan version based on workout analysis."""
- if not analysis.approved:
- return None
-
- suggestions = analysis.suggestions
- if not suggestions:
- return None
-
- # Generate new plan incorporating suggestions
- evolution_context = {
- "current_plan": current_plan.jsonb_plan,
- "workout_analysis": analysis.jsonb_feedback,
- "suggestions": suggestions,
- "evolution_type": "workout_feedback"
- }
-
- new_plan_data = await self.ai_service.evolve_plan(evolution_context)
-
- # Create new plan version
- new_plan = Plan(
- jsonb_plan=new_plan_data,
- version=current_plan.version + 1,
- parent_plan_id=current_plan.id
- )
-
- self.db.add(new_plan)
- await self.db.commit()
- return new_plan
- ```
-
-2. **Enhanced API Endpoints**
- ```python
- # backend/app/routes/workouts.py
- @router.post("/workouts/{workout_id}/analyze")
- async def analyze_workout(
- workout_id: int,
- background_tasks: BackgroundTasks,
- db: AsyncSession = Depends(get_db)
- ):
- """Trigger AI analysis of a specific workout."""
- workout = await get_workout_by_id(db, workout_id)
- if not workout:
- raise HTTPException(status_code=404, detail="Workout not found")
-
- ai_service = AIService(db)
- background_tasks.add_task(
- analyze_and_store_workout,
- db, workout, ai_service
- )
-
- return {"message": "Analysis started", "workout_id": workout_id}
-
- @router.post("/analyses/{analysis_id}/approve")
- async def approve_analysis(
- analysis_id: int,
- db: AsyncSession = Depends(get_db)
- ):
- """Approve analysis suggestions and trigger plan evolution."""
- analysis = await get_analysis_by_id(db, analysis_id)
- analysis.approved = True
-
- # Trigger plan evolution if suggestions exist
- if analysis.suggestions:
- evolution_service = PlanEvolutionService(db)
- current_plan = await get_current_active_plan(db)
- if current_plan:
- new_plan = await evolution_service.evolve_plan_from_analysis(
- analysis, current_plan
- )
- return {"message": "Analysis approved", "new_plan_id": new_plan.id}
-
- await db.commit()
- return {"message": "Analysis approved"}
- ```
-
-### Phase 4: Frontend Implementation (Week 9-11)
-
-#### Week 9: Core Components
-1. **Garmin Sync Interface**
- ```jsx
- // frontend/src/components/GarminSync.jsx
- import { useState, useEffect } from 'react';
-
- const GarminSync = () => {
- const [syncStatus, setSyncStatus] = useState(null);
- const [syncing, setSyncing] = useState(false);
-
- const triggerSync = async () => {
- setSyncing(true);
- try {
- await fetch('/api/workouts/sync', { method: 'POST' });
- // Poll for status updates
- pollSyncStatus();
- } catch (error) {
- console.error('Sync failed:', error);
- setSyncing(false);
- }
- };
-
- const pollSyncStatus = () => {
- const interval = setInterval(async () => {
- const response = await fetch('/api/workouts/sync-status');
- const status = await response.json();
- setSyncStatus(status);
-
- if (status.status !== 'in_progress') {
- setSyncing(false);
- clearInterval(interval);
- }
- }, 2000);
- };
-
- return (
-
-
Garmin Connect Sync
-
-
- {syncStatus && (
-
-
Last sync: {new Date(syncStatus.last_sync_time).toLocaleString()}
-
Status: {syncStatus.status}
- {syncStatus.activities_synced > 0 && (
-
Activities synced: {syncStatus.activities_synced}
- )}
-
- )}
-
- );
- };
- ```
-
-2. **Workout Analysis Interface**
- ```jsx
- // frontend/src/components/WorkoutAnalysis.jsx
- const WorkoutAnalysis = ({ workout, analysis }) => {
- const [approving, setApproving] = useState(false);
-
- const approveAnalysis = async () => {
- setApproving(true);
- try {
- const response = await fetch(`/api/analyses/${analysis.id}/approve`, {
- method: 'POST'
- });
- const result = await response.json();
-
- if (result.new_plan_id) {
- // Navigate to new plan or show success message
- console.log('New plan created:', result.new_plan_id);
- }
- } catch (error) {
- console.error('Approval failed:', error);
- } finally {
- setApproving(false);
- }
- };
-
- return (
-
-
-
{workout.activity_type} - {new Date(workout.start_time).toLocaleDateString()}
-
- Duration: {Math.round(workout.duration_seconds / 60)}min
- Distance: {(workout.distance_m / 1000).toFixed(1)}km
- {workout.avg_power && Avg Power: {workout.avg_power}W}
- {workout.avg_hr && Avg HR: {workout.avg_hr}bpm}
-
-
-
- {analysis && (
-
-
AI Analysis
-
- {analysis.jsonb_feedback.summary}
-
-
- {analysis.suggestions && (
-
-
Suggestions
-
- {analysis.suggestions.map((suggestion, index) => (
- - {suggestion}
- ))}
-
-
- {!analysis.approved && (
-
- )}
-
- )}
-
- )}
-
- );
- };
- ```
-
-#### Week 10: Data Visualization
-1. **Workout Charts**
- ```jsx
- // Using recharts for workout data visualization
- import { LineChart, Line, XAxis, YAxis, CartesianGrid, Tooltip, Legend } from 'recharts';
-
- const WorkoutChart = ({ workoutData }) => {
- return (
-
-
Workout Metrics
-
-
-
-
-
-
-
-
-
-
-
-
- );
- };
- ```
-
-2. **Plan Timeline View**
- ```jsx
- // Plan visualization with version history
- const PlanTimeline = ({ plan, versions }) => {
- return (
-
-
Training Plan - Version {plan.version}
-
- {versions.length > 1 && (
-
-
Version History
- {versions.map(version => (
-
- v{version.version}
- {new Date(version.created_at).toLocaleDateString()}
- {version.parent_plan_id && → Evolved from analysis}
-
- ))}
-
- )}
-
-
- {plan.jsonb_plan.weeks.map((week, index) => (
-
-
Week {index + 1}
- {week.workouts.map((workout, wIndex) => (
-
- {workout.type}
- {workout.duration}min
- {workout.intensity}
-
- ))}
-
- ))}
-
-
- );
- };
- ```
-
-#### Week 11: Integration & Polish
-1. **Dashboard Overview**
-2. **File Upload Improvements**
-3. **Error Handling & Loading States**
-4. **Responsive Design**
-
### Phase 5: Testing and Deployment (Week 12-13)
#### Week 12: Testing
1. **Backend Testing**
+ - Implement comprehensive unit tests for critical services:
+ - Garmin sync service (mock API responses)
+ - AI service (mock OpenRouter API)
+ - Workflow services (plan generation, evolution)
+ - API endpoint testing with realistic payloads
+ - Error handling and edge case testing
+ - Database operation tests (including rollback scenarios)
+
+ Example test for Garmin service:
```python
# tests/test_garmin_service.py
import pytest
from unittest.mock import AsyncMock, patch
from app.services.garmin import GarminService
+ from app.exceptions import GarminAuthError
@pytest.mark.asyncio
- async def test_garmin_authentication():
- with patch('garth.Client') as mock_client:
+ async def test_garmin_auth_failure():
+ with patch('garth.Client', side_effect=Exception("Auth failed")):
service = GarminService()
- await service.authenticate()
- mock_client.return_value.login.assert_called_once()
-
- @pytest.mark.asyncio
- async def test_activity_sync(db_session):
- # Test workout sync functionality
- pass
+ with pytest.raises(GarminAuthError):
+ await service.authenticate()
```
-2. **Integration Tests**
-3. **Frontend Component Tests**
+2. **Integration Testing**
+ - Test full Garmin sync workflow: authentication → activity fetch → storage
+ - Verify AI analysis pipeline: workout → analysis → plan evolution
+ - Database transaction tests across multiple operations
+ - File system integration tests (GPX upload/download)
+
+3. **Frontend Testing**
+ - Component tests using React Testing Library
+ - User workflow tests (upload GPX → generate plan → analyze workout)
+ - API response handling and error display tests
+ - Responsive design verification across devices
+
+ Example component test:
+ ```javascript
+ // frontend/src/components/__tests__/GarminSync.test.jsx
+ import { render, screen, fireEvent } from '@testing-library/react';
+ import GarminSync from '../GarminSync';
+
+ test('shows sync status after triggering', async () => {
+ render();
+ fireEvent.click(screen.getByText('Sync Recent Activities'));
+ expect(await screen.findByText('Syncing...')).toBeInTheDocument();
+ });
+ ```
+
+4. **Continuous Integration Setup**
+ - Configure GitHub Actions pipeline:
+ - Backend test suite (Python)
+ - Frontend test suite (Jest)
+ - Security scanning (dependencies, secrets)
+ - Docker image builds on successful tests
+ - Automated database migration checks
+ - Test coverage reporting
#### Week 13: Deployment Preparation
1. **Environment Configuration**
@@ -830,523 +72,26 @@ This document outlines the implementation plan for a single-user, self-hosted AI
```
2. **Production Docker Setup**
-3. **Backup Strategy for Database and GPX Files**
+ - Optimize Dockerfiles for production:
+ - Multi-stage builds
+ - Minimized image sizes
+ - Proper user permissions
+ - Health checks for all services
+ - Resource limits in docker-compose.prod.yml
+
+3. **Backup Strategy**
+ - Implement daily automated backups:
+ - Database (pg_dump)
+ - GPX files
+ - Garmin sessions
+ - Backup rotation (keep last 30 days)
+ - Verify restore procedure
+
4. **Monitoring and Logging**
+ - Structured logging with log rotation
+ - System health dashboard
+ - Error tracking and alerting
+ - Performance monitoring
## Key Technical Decisions
-
-### Single-User Simplifications
-- **Authentication**: Simple API key instead of complex user management
-- **File Storage**: Local filesystem (200 GPX files easily manageable)
-- **Database**: Single tenant, no multi-user complexity
-- **Deployment**: Self-hosted container, no cloud scaling needs
-
-### Garmin Integration Strategy
-- **garth library**: Python library for Garmin Connect API
-- **Session persistence**: Store auth sessions in mounted volume
-- **Background sync**: Async background tasks for activity fetching
-- **Retry logic**: Handle API rate limits and temporary failures
-
-### AI Integration Approach
-- **Prompt versioning**: Database-stored prompts with version control
-- **Async processing**: Non-blocking AI calls with background tasks
-- **Cost management**: Simple retry logic, no complex rate limiting needed for single user
-- **Response parsing**: Flexible parsing for different AI response formats
-
-### Database Design Philosophy
-- **Versioning everywhere**: Plans, rules, and prompts all support evolution
-- **JSONB storage**: Flexible storage for AI responses and complex data
-- **Audit trail**: Track plan evolution and analysis approval history
-
-## Environment Variables
-```bash
-# Required environment variables
-GARMIN_USERNAME=your_garmin_email
-GARMIN_PASSWORD=your_garmin_password
-OPENROUTER_API_KEY=your_openrouter_key
-AI_MODEL=anthropic/claude-3-sonnet-20240229
-API_KEY=your_secure_api_key
-
-# Optional
-DATABASE_URL=postgresql://postgres:password@db:5432/cycling
-REACT_APP_API_URL=http://localhost:8000
-REACT_APP_API_KEY=your_secure_api_key
-```
-
-## Python Standards and Best Practices
-
-### Code Style and Structure
-```python
-# Example: Proper async service implementation
-from typing import Optional, List, Dict, Any
-from sqlalchemy.ext.asyncio import AsyncSession
-from sqlalchemy import select, update
-import logging
-
-logger = logging.getLogger(__name__)
-
-class WorkoutAnalysisService:
- """Service for analyzing workout data with AI assistance."""
-
- def __init__(self, db: AsyncSession):
- self.db = db
- self.ai_service = AIService(db)
-
- async def analyze_workout_performance(
- self,
- workout_id: int,
- comparison_metrics: Optional[Dict[str, Any]] = None
- ) -> Dict[str, Any]:
- """
- Analyze workout performance against planned metrics.
-
- Args:
- workout_id: The workout to analyze
- comparison_metrics: Optional baseline metrics for comparison
-
- Returns:
- Dict containing analysis results and suggestions
-
- Raises:
- WorkoutNotFoundError: If workout doesn't exist
- AIServiceError: If AI analysis fails
- """
- try:
- workout = await self._get_workout(workout_id)
- if not workout:
- raise WorkoutNotFoundError(f"Workout {workout_id} not found")
-
- analysis_data = await self._prepare_analysis_context(workout, comparison_metrics)
- ai_analysis = await self.ai_service.analyze_workout(analysis_data)
-
- # Store analysis results
- analysis_record = await self._store_analysis(workout_id, ai_analysis)
-
- logger.info(f"Successfully analyzed workout {workout_id}")
- return {
- "analysis_id": analysis_record.id,
- "feedback": ai_analysis.get("feedback"),
- "suggestions": ai_analysis.get("suggestions"),
- "performance_score": ai_analysis.get("score")
- }
-
- except Exception as e:
- logger.error(f"Failed to analyze workout {workout_id}: {str(e)}")
- raise
-
- async def _get_workout(self, workout_id: int) -> Optional[Workout]:
- """Retrieve workout by ID."""
- result = await self.db.execute(
- select(Workout).where(Workout.id == workout_id)
- )
- return result.scalar_one_or_none()
-
- async def _prepare_analysis_context(
- self,
- workout: Workout,
- comparison_metrics: Optional[Dict[str, Any]]
- ) -> Dict[str, Any]:
- """Prepare context data for AI analysis."""
- context = {
- "workout_data": {
- "duration_minutes": workout.duration_seconds / 60 if workout.duration_seconds else 0,
- "distance_km": workout.distance_m / 1000 if workout.distance_m else 0,
- "avg_power": workout.avg_power,
- "avg_heart_rate": workout.avg_hr,
- "elevation_gain": workout.elevation_gain_m
- },
- "activity_type": workout.activity_type,
- "date": workout.start_time.isoformat() if workout.start_time else None
- }
-
- if comparison_metrics:
- context["baseline_metrics"] = comparison_metrics
-
- return context
-```
-
-### Error Handling Patterns
-```python
-# Custom exceptions for better error handling
-class CyclingCoachError(Exception):
- """Base exception for cycling coach application."""
- pass
-
-class WorkoutNotFoundError(CyclingCoachError):
- """Raised when a workout cannot be found."""
- pass
-
-class GarminSyncError(CyclingCoachError):
- """Raised when Garmin synchronization fails."""
- pass
-
-class AIServiceError(CyclingCoachError):
- """Raised when AI service requests fail."""
- pass
-
-# Middleware for consistent error responses
-@app.exception_handler(CyclingCoachError)
-async def cycling_coach_exception_handler(request: Request, exc: CyclingCoachError):
- return JSONResponse(
- status_code=400,
- content={
- "error": exc.__class__.__name__,
- "message": str(exc),
- "timestamp": datetime.now().isoformat()
- }
- )
-```
-
-### Database Patterns
-```python
-# Proper async database patterns with context managers
-from contextlib import asynccontextmanager
-
-class DatabaseService:
- """Base service class with database session management."""
-
- def __init__(self, db: AsyncSession):
- self.db = db
-
- @asynccontextmanager
- async def transaction(self):
- """Context manager for database transactions."""
- try:
- yield self.db
- await self.db.commit()
- except Exception:
- await self.db.rollback()
- raise
-
- async def get_or_create(self, model_class, **kwargs):
- """Get existing record or create new one."""
- result = await self.db.execute(
- select(model_class).filter_by(**kwargs)
- )
- instance = result.scalar_one_or_none()
-
- if not instance:
- instance = model_class(**kwargs)
- self.db.add(instance)
- await self.db.flush() # Get ID without committing
-
- return instance
-```
-
-## Sample Prompt Templates
-
-### Workout Analysis Prompt
-```sql
-INSERT INTO prompts (action_type, model, prompt_text, version, active) VALUES (
-'workout_analysis',
-'anthropic/claude-3-sonnet-20240229',
-'Analyze the following cycling workout data and provide structured feedback:
-
-Workout Details:
-- Activity Type: {activity_type}
-- Duration: {duration_minutes} minutes
-- Distance: {distance_km} km
-- Average Power: {avg_power}W
-- Average Heart Rate: {avg_hr} bpm
-- Elevation Gain: {elevation_gain}m
-
-Please provide your analysis in the following JSON format:
-{{
- "performance_summary": "Brief overall assessment",
- "strengths": ["strength 1", "strength 2"],
- "areas_for_improvement": ["area 1", "area 2"],
- "training_suggestions": ["suggestion 1", "suggestion 2"],
- "next_workout_recommendations": {{
- "intensity": "easy/moderate/hard",
- "focus": "endurance/power/recovery",
- "duration_minutes": 60
- }},
- "performance_score": 8.5
-}}
-
-Focus on actionable insights and specific recommendations for improvement.',
-1,
-true
-);
-```
-
-### Plan Generation Prompt
-```sql
-INSERT INTO prompts (action_type, model, prompt_text, version, active) VALUES (
-'plan_generation',
-'anthropic/claude-3-sonnet-20240229',
-'Create a personalized cycling training plan based on the following information:
-
-Training Rules:
-{rules}
-
-Goals:
-{goals}
-
-Generate a 4-week training plan in the following JSON format:
-{{
- "plan_overview": {{
- "duration_weeks": 4,
- "focus": "endurance/power/mixed",
- "weekly_hours": 8
- }},
- "weeks": [
- {{
- "week_number": 1,
- "focus": "base building",
- "workouts": [
- {{
- "day": "monday",
- "type": "easy_ride",
- "duration_minutes": 60,
- "intensity": "zone_1_2",
- "description": "Easy recovery ride"
- }}
- ]
- }}
- ],
- "progression_notes": "How the plan builds over the weeks"
-}}
-
-Ensure all workouts respect the training rules provided.',
-1,
-true
-);
-```
-
-## Deployment Configuration
-
-### Production Docker Compose
-```yaml
-# docker-compose.prod.yml
-version: '3.9'
-services:
- backend:
- build:
- context: ./backend
- dockerfile: Dockerfile.prod
- restart: unless-stopped
- ports:
- - "8000:8000"
- volumes:
- - gpx-data:/app/data/gpx:rw
- - garmin-sessions:/app/data/sessions:rw
- - ./logs:/app/logs:rw
- environment:
- - DATABASE_URL=postgresql://postgres:${POSTGRES_PASSWORD}@db:5432/cycling
- - GARMIN_USERNAME=${GARMIN_USERNAME}
- - GARMIN_PASSWORD=${GARMIN_PASSWORD}
- - OPENROUTER_API_KEY=${OPENROUTER_API_KEY}
- - AI_MODEL=${AI_MODEL}
- - API_KEY=${API_KEY}
- - LOG_LEVEL=INFO
- depends_on:
- - db
- healthcheck:
- test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
- interval: 30s
- timeout: 10s
- retries: 3
-
- frontend:
- build:
- context: ./frontend
- dockerfile: Dockerfile.prod
- restart: unless-stopped
- ports:
- - "3000:3000"
- environment:
- - REACT_APP_API_URL=http://localhost:8000
- - NODE_ENV=production
- depends_on:
- - backend
-
- db:
- image: postgres:15-alpine
- restart: unless-stopped
- environment:
- POSTGRES_USER: postgres
- POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
- POSTGRES_DB: cycling
- volumes:
- - postgres-data:/var/lib/postgresql/data:rw
- - ./backups:/backups:rw
- healthcheck:
- test: ["CMD-SHELL", "pg_isready -U postgres"]
- interval: 10s
- timeout: 5s
- retries: 5
-
-volumes:
- gpx-data:
- driver: local
- driver_opts:
- type: none
- o: bind
- device: /home/user/cycling-coach/data/gpx
- garmin-sessions:
- driver: local
- driver_opts:
- type: none
- o: bind
- device: /home/user/cycling-coach/data/sessions
- postgres-data:
- driver: local
- driver_opts:
- type: none
- o: bind
- device: /home/user/cycling-coach/data/postgres
-
-networks:
- default:
- name: cycling-coach
-```
-
-### Backup Script
-```bash
-#!/bin/bash
-# backup.sh - Daily backup script
-
-BACKUP_DIR="/home/user/cycling-coach/backups"
-DATE=$(date +%Y%m%d_%H%M%S)
-
-# Create backup directory
-mkdir -p "$BACKUP_DIR"
-
-# Backup database
-docker exec cycling-coach-db-1 pg_dump -U postgres cycling > "$BACKUP_DIR/db_backup_$DATE.sql"
-
-# Backup GPX files
-tar -czf "$BACKUP_DIR/gpx_backup_$DATE.tar.gz" -C /home/user/cycling-coach/data/gpx .
-
-# Backup Garmin sessions
-tar -czf "$BACKUP_DIR/sessions_backup_$DATE.tar.gz" -C /home/user/cycling-coach/data/sessions .
-
-# Keep only last 30 days of backups
-find "$BACKUP_DIR" -name "*backup*" -type f -mtime +30 -delete
-
-echo "Backup completed: $DATE"
-```
-
-### Health Monitoring
-```python
-# backend/app/routes/health.py
-from fastapi import APIRouter, Depends, HTTPException
-from sqlalchemy.ext.asyncio import AsyncSession
-from sqlalchemy import text
-from app.database import get_db
-from app.services.garmin import GarminService
-
-router = APIRouter()
-
-@router.get("/health")
-async def health_check(db: AsyncSession = Depends(get_db)):
- """Health check endpoint for monitoring."""
- health_status = {
- "status": "healthy",
- "timestamp": datetime.now().isoformat(),
- "services": {}
- }
-
- # Check database
- try:
- await db.execute(text("SELECT 1"))
- health_status["services"]["database"] = "healthy"
- except Exception as e:
- health_status["services"]["database"] = f"error: {str(e)}"
- health_status["status"] = "unhealthy"
-
- # Check Garmin service
- try:
- garmin_service = GarminService()
- # Simple connectivity check without full auth
- health_status["services"]["garmin"] = "configured"
- except Exception as e:
- health_status["services"]["garmin"] = f"error: {str(e)}"
-
- # Check file system
- try:
- gpx_dir = "/app/data/gpx"
- if os.path.exists(gpx_dir) and os.access(gpx_dir, os.W_OK):
- health_status["services"]["file_storage"] = "healthy"
- else:
- health_status["services"]["file_storage"] = "error: directory not writable"
- health_status["status"] = "unhealthy"
- except Exception as e:
- health_status["services"]["file_storage"] = f"error: {str(e)}"
- health_status["status"] = "unhealthy"
-
- if health_status["status"] == "unhealthy":
- raise HTTPException(status_code=503, detail=health_status)
-
- return health_status
-```
-
-## Post-Deployment Setup
-
-### Initial Data Setup
-```python
-# scripts/init_prompts.py
-"""Initialize default AI prompts in the database."""
-import asyncio
-from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
-from app.models.prompt import Prompt
-
-async def init_default_prompts():
- """Initialize the database with default AI prompts."""
- engine = create_async_engine(DATABASE_URL)
-
- async with AsyncSession(engine) as session:
- # Add default prompts for each action type
- default_prompts = [
- # Workout analysis prompt (from above)
- # Plan generation prompt (from above)
- # Rule parsing prompt
- ]
-
- for prompt_data in default_prompts:
- prompt = Prompt(**prompt_data)
- session.add(prompt)
-
- await session.commit()
-
- print("Default prompts initialized successfully")
-
-if __name__ == "__main__":
- asyncio.run(init_default_prompts())
-```
-
-### Maintenance Tasks
-```python
-# scripts/maintenance.py
-"""Maintenance tasks for the cycling coach application."""
-
-async def cleanup_old_analyses():
- """Remove analyses older than 6 months."""
- cutoff_date = datetime.now() - timedelta(days=180)
-
- async with AsyncSession(engine) as session:
- result = await session.execute(
- delete(Analysis).where(Analysis.created_at < cutoff_date)
- )
- await session.commit()
- print(f"Deleted {result.rowcount} old analyses")
-
-async def optimize_database():
- """Run database maintenance tasks."""
- async with AsyncSession(engine) as session:
- await session.execute(text("VACUUM ANALYZE"))
- await session.commit()
- print("Database optimization completed")
-```
-
-This comprehensive implementation plan addresses all the key requirements for your single-user, self-hosted AI-assisted cycling coach application. The plan includes:
-
-1. **Complete Garmin integration** using environment variables and the garth library
-2. **Enhanced database schema** with proper versioning for plans and rules
-3. **Robust AI integration** with prompt management and error handling
-4. **Production-ready deployment** configuration with health checks and backups
-5. **Comprehensive testing strategy** for both backend and frontend
-6. **Maintenance and monitoring** tools for long-term operation
-
-The plan is well-suited for your scale (single user, 200 GPX files) and deployment target (self-hosted container), with practical simplifications that avoid unnecessary complexity while maintaining professional software engineering standards.
\ No newline at end of file
+...
\ No newline at end of file
diff --git a/backend/Dockerfile b/backend/Dockerfile
index 6cccf8f..77fc28b 100644
--- a/backend/Dockerfile
+++ b/backend/Dockerfile
@@ -45,21 +45,23 @@ COPY . .
RUN echo '#!/bin/bash\n\
set -e\n\
\n\
-# Run database migrations\n\
+# Run database migrations synchronously\n\
echo "Running database migrations..."\n\
-alembic upgrade head\n\
+python -m alembic upgrade head\n\
\n\
# Verify migration success\n\
echo "Verifying migration status..."\n\
-alembic current\n\
+python -m alembic current\n\
\n\
# Start the application\n\
echo "Starting application..."\n\
exec "$@"' > /app/entrypoint.sh && \
chmod +x /app/entrypoint.sh
-# Create non-root user
-RUN useradd -m appuser && chown -R appuser:appuser /app
+# Create non-root user and logs directory
+RUN useradd -m appuser && \
+ mkdir -p /app/logs && \
+ chown -R appuser:appuser /app
USER appuser
# Expose application port
diff --git a/backend/alembic.ini b/backend/alembic.ini
index 01ce8f0..c897f38 100644
--- a/backend/alembic.ini
+++ b/backend/alembic.ini
@@ -1,6 +1,6 @@
[alembic]
script_location = alembic
-sqlalchemy.url = postgresql+asyncpg://appuser:password@db:5432/cyclingdb
+sqlalchemy.url = postgresql+asyncpg://postgres:password@db:5432/cycling
[loggers]
keys = root
@@ -8,6 +8,9 @@ keys = root
[handlers]
keys = console
+[formatters]
+keys = generic
+
[logger_root]
level = WARN
handlers = console
diff --git a/backend/alembic/env.py b/backend/alembic/env.py
index 450ebcb..836d473 100644
--- a/backend/alembic/env.py
+++ b/backend/alembic/env.py
@@ -9,8 +9,8 @@ import os
sys.path.append(os.getcwd())
# Import base and models
-from app.models import Base
-from app.database import DATABASE_URL
+from app.models.base import Base
+from app.config import settings
config = context.config
fileConfig(config.config_file_name)
@@ -30,7 +30,7 @@ def run_migrations_offline():
with context.begin_transaction():
context.run_migrations()
-def run_migrations_online():
+async def run_migrations_online():
"""Run migrations in 'online' mode."""
connectable = AsyncEngine(
engine_from_config(
@@ -38,16 +38,17 @@ def run_migrations_online():
prefix="sqlalchemy.",
poolclass=pool.NullPool,
future=True,
- url=DATABASE_URL,
+ url=settings.DATABASE_URL,
)
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
-async def do_run_migrations(connection):
+def do_run_migrations(connection):
context.configure(connection=connection, target_metadata=target_metadata)
- await connection.run_sync(context.run_migrations)
+ with context.begin_transaction():
+ context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
diff --git a/backend/app/config.py b/backend/app/config.py
index 38ac640..ff7505f 100644
--- a/backend/app/config.py
+++ b/backend/app/config.py
@@ -1,11 +1,11 @@
-from pydantic_settings import BaseSettings
+from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
DATABASE_URL: str
GPX_STORAGE_PATH: str
AI_MODEL: str = "openrouter/auto"
+ API_KEY: str
- class Config:
- env_file = ".env"
+ model_config = SettingsConfigDict(env_file=".env", extra="ignore")
settings = Settings()
\ No newline at end of file
diff --git a/backend/app/database.py b/backend/app/database.py
index d72f8f3..87acb74 100644
--- a/backend/app/database.py
+++ b/backend/app/database.py
@@ -1,7 +1,8 @@
+import os
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
from sqlalchemy.orm import declarative_base, sessionmaker
-DATABASE_URL = "postgresql+asyncpg://appuser:password@db:5432/cyclingdb"
+DATABASE_URL = os.getenv("DATABASE_URL", "postgresql+asyncpg://postgres:password@db:5432/cycling")
engine = create_async_engine(DATABASE_URL, echo=True)
AsyncSessionLocal = sessionmaker(
diff --git a/backend/app/main.py b/backend/app/main.py
index 849b366..5762150 100644
--- a/backend/app/main.py
+++ b/backend/app/main.py
@@ -1,6 +1,9 @@
+import logging
+import json
+from datetime import datetime
from fastapi import FastAPI, Depends, Request, HTTPException
from fastapi.middleware.cors import CORSMiddleware
-from .database import get_db, get_database_url
+from .database import get_db
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import text
from alembic.config import Config
@@ -14,6 +17,45 @@ from .routes import prompts as prompt_routes
from .routes import dashboard as dashboard_routes
from .config import settings
+# Configure structured JSON logging
+class StructuredJSONFormatter(logging.Formatter):
+ def format(self, record):
+ log_data = {
+ "timestamp": datetime.utcnow().isoformat(),
+ "level": record.levelname,
+ "message": record.getMessage(),
+ "logger": record.name,
+ "module": record.module,
+ "function": record.funcName,
+ "line": record.lineno,
+ "thread": record.threadName,
+ }
+ if hasattr(record, 'extra'):
+ log_data.update(record.extra)
+ if record.exc_info:
+ log_data["exception"] = self.formatException(record.exc_info)
+ return json.dumps(log_data)
+
+# Set up logging
+logger = logging.getLogger("ai_cycling_coach")
+logger.setLevel(logging.INFO)
+
+# Create console handler with structured JSON format
+console_handler = logging.StreamHandler()
+console_handler.setFormatter(StructuredJSONFormatter())
+logger.addHandler(console_handler)
+
+# Configure rotating file handler
+from logging.handlers import RotatingFileHandler
+file_handler = RotatingFileHandler(
+ filename="/app/logs/app.log",
+ maxBytes=10*1024*1024, # 10 MB
+ backupCount=5,
+ encoding='utf-8'
+)
+file_handler.setFormatter(StructuredJSONFormatter())
+logger.addHandler(file_handler)
+
app = FastAPI(
title="AI Cycling Coach API",
description="Backend service for AI-assisted cycling training platform",
@@ -49,61 +91,16 @@ app.include_router(workout_routes.router, prefix="/workouts", tags=["workouts"])
app.include_router(prompt_routes.router, prefix="/prompts", tags=["prompts"])
app.include_router(dashboard_routes.router, prefix="/api/dashboard", tags=["dashboard"])
-async def check_migration_status():
- """Check if database migrations are up to date."""
- try:
- # Get Alembic configuration
- config = Config("alembic.ini")
- config.set_main_option("sqlalchemy.url", get_database_url())
- script = ScriptDirectory.from_config(config)
-
- # Get current database revision
- from sqlalchemy import create_engine
- engine = create_engine(get_database_url())
- with engine.connect() as conn:
- context = MigrationContext.configure(conn)
- current_rev = context.get_current_revision()
-
- # Get head revision
- head_rev = script.get_current_head()
-
- return {
- "current_revision": current_rev,
- "head_revision": head_rev,
- "migrations_up_to_date": current_rev == head_rev
- }
- except Exception as e:
- return {
- "error": str(e),
- "migrations_up_to_date": False
- }
-
@app.get("/health")
-async def health_check(db: AsyncSession = Depends(get_db)):
- """Enhanced health check with migration verification."""
- health_status = {
+async def health_check():
+ """Simplified health check endpoint."""
+ return {
"status": "healthy",
"version": "0.1.0",
- "timestamp": "2024-01-15T10:30:00Z" # Should be dynamic
+ "timestamp": datetime.utcnow().isoformat()
}
- # Database connection check
- try:
- await db.execute(text("SELECT 1"))
- health_status["database"] = "connected"
- except Exception as e:
- health_status["status"] = "unhealthy"
- health_status["database"] = f"error: {str(e)}"
-
- # Migration status check
- migration_info = await check_migration_status()
- health_status["migrations"] = migration_info
-
- if not migration_info.get("migrations_up_to_date", False):
- health_status["status"] = "unhealthy"
-
- return health_status
-
if __name__ == "__main__":
import uvicorn
- uvicorn.run(app, host="0.0.0.0", port=8000)
\ No newline at end of file
+ logger.info("Starting AI Cycling Coach API server")
+ uvicorn.run(app, host="0.0.0.0", port=8000, log_config=None)
\ No newline at end of file
diff --git a/backend/app/models/analysis.py b/backend/app/models/analysis.py
index 8f8c678..29ce039 100644
--- a/backend/app/models/analysis.py
+++ b/backend/app/models/analysis.py
@@ -1,4 +1,4 @@
-from sqlalchemy import Column, Integer, String, ForeignKey, JSON, Boolean, DateTime
+from sqlalchemy import Column, Integer, String, ForeignKey, JSON, Boolean, DateTime, func
from sqlalchemy.orm import relationship
from .base import BaseModel
diff --git a/backend/app/models/plan_rule.py b/backend/app/models/plan_rule.py
new file mode 100644
index 0000000..5a7a189
--- /dev/null
+++ b/backend/app/models/plan_rule.py
@@ -0,0 +1,12 @@
+from sqlalchemy import Column, Integer, ForeignKey
+from sqlalchemy.orm import relationship
+from .base import BaseModel
+
+class PlanRule(BaseModel):
+ __tablename__ = "plan_rules"
+
+ plan_id = Column(Integer, ForeignKey('plans.id'), primary_key=True)
+ rule_id = Column(Integer, ForeignKey('rules.id'), primary_key=True)
+
+ plan = relationship("Plan", back_populates="rules")
+ rule = relationship("Rule", back_populates="plans")
\ No newline at end of file
diff --git a/backend/app/models/rule.py b/backend/app/models/rule.py
index af27143..b52ac70 100644
--- a/backend/app/models/rule.py
+++ b/backend/app/models/rule.py
@@ -1,5 +1,6 @@
-from sqlalchemy import Column, Integer, ForeignKey, Boolean
+from sqlalchemy import Column, Integer, ForeignKey, Boolean, String
from sqlalchemy.dialects.postgresql import JSONB
+from sqlalchemy.orm import relationship
from .base import BaseModel
class Rule(BaseModel):
diff --git a/backend/app/routes/health.py b/backend/app/routes/health.py
index bf870a7..eff2567 100644
--- a/backend/app/routes/health.py
+++ b/backend/app/routes/health.py
@@ -1,9 +1,54 @@
from fastapi import APIRouter
+from fastapi.responses import PlainTextResponse, JSONResponse
from app.services.health_monitor import HealthMonitor
+from prometheus_client import generate_latest, CONTENT_TYPE_LATEST, Gauge
+from pathlib import Path
+import json
router = APIRouter()
monitor = HealthMonitor()
+# Prometheus metrics
+SYNC_QUEUE = Gauge('sync_queue_size', 'Current Garmin sync queue size')
+PENDING_ANALYSES = Gauge('pending_analyses', 'Number of pending workout analyses')
+
@router.get("/health")
async def get_health():
- return monitor.check_system_health()
\ No newline at end of file
+ return monitor.check_system_health()
+
+@router.get("/metrics")
+async def prometheus_metrics():
+ # Update metrics with latest values
+ health_data = monitor.check_system_health()
+ SYNC_QUEUE.set(health_data['services'].get('sync_queue_size', 0))
+ PENDING_ANALYSES.set(health_data['services'].get('pending_analyses', 0))
+
+ return PlainTextResponse(
+ content=generate_latest(),
+ media_type=CONTENT_TYPE_LATEST
+ )
+
+@router.get("/dashboard/health", response_class=JSONResponse)
+async def health_dashboard():
+ """Health dashboard endpoint with aggregated monitoring data"""
+ health_data = monitor.check_system_health()
+
+ # Get recent logs (last 100 lines)
+ log_file = Path("/app/logs/app.log")
+ recent_logs = []
+ try:
+ with log_file.open() as f:
+ lines = f.readlines()[-100:]
+ recent_logs = [json.loads(line.strip()) for line in lines]
+ except FileNotFoundError:
+ pass
+
+ return {
+ "system": health_data,
+ "logs": recent_logs,
+ "statistics": {
+ "log_entries": len(recent_logs),
+ "error_count": sum(1 for log in recent_logs if log.get('level') == 'ERROR'),
+ "warning_count": sum(1 for log in recent_logs if log.get('level') == 'WARNING')
+ }
+ }
\ No newline at end of file
diff --git a/backend/app/routes/workouts.py b/backend/app/routes/workouts.py
index d11dec5..c4a1f1b 100644
--- a/backend/app/routes/workouts.py
+++ b/backend/app/routes/workouts.py
@@ -8,8 +8,9 @@ from app.models.workout import Workout
from app.models.analysis import Analysis
from app.models.garmin_sync_log import GarminSyncLog
from app.models.plan import Plan
-from app.schemas.workout import Workout as WorkoutSchema, WorkoutSyncStatus
+from app.schemas.workout import Workout as WorkoutSchema, WorkoutSyncStatus, WorkoutMetric
from app.schemas.analysis import Analysis as AnalysisSchema
+from app.schemas.plan import Plan as PlanSchema
from app.services.workout_sync import WorkoutSyncService
from app.services.ai_service import AIService
from app.services.plan_evolution import PlanEvolutionService
@@ -32,7 +33,7 @@ async def read_workout(workout_id: int, db: AsyncSession = Depends(get_db)):
raise HTTPException(status_code=404, detail="Workout not found")
return workout
-@router.get("/{workout_id}/metrics", response_model=list[schemas.WorkoutMetric])
+@router.get("/{workout_id}/metrics", response_model=list[WorkoutMetric])
async def get_workout_metrics(
workout_id: int,
db: AsyncSession = Depends(get_db)
@@ -153,7 +154,7 @@ async def approve_analysis(
return {"message": "Analysis approved"}
-@router.get("/plans/{plan_id}/evolution", response_model=List[schemas.Plan])
+@router.get("/plans/{plan_id}/evolution", response_model=List[PlanSchema])
async def get_plan_evolution(
plan_id: int,
db: AsyncSession = Depends(get_db)
diff --git a/backend/app/schemas/plan.py b/backend/app/schemas/plan.py
index a30b17c..41d5e1f 100644
--- a/backend/app/schemas/plan.py
+++ b/backend/app/schemas/plan.py
@@ -1,4 +1,4 @@
-from pydantic import BaseModel
+from pydantic import BaseModel, Field
from datetime import datetime
from typing import List, Optional
from uuid import UUID
diff --git a/backend/app/services/health_monitor.py b/backend/app/services/health_monitor.py
index 0678115..294ab9f 100644
--- a/backend/app/services/health_monitor.py
+++ b/backend/app/services/health_monitor.py
@@ -36,38 +36,51 @@ class HealthMonitor:
return {
'database': self._check_database(),
'garmin_sync': self._check_garmin_sync(),
- 'ai_service': self._check_ai_service()
+ 'ai_service': self._check_ai_service(),
+ 'sync_queue_size': self._get_sync_queue_size(),
+ 'pending_analyses': self._count_pending_analyses()
}
+ def _get_sync_queue_size(self) -> int:
+ """Get number of pending sync operations"""
+ from app.models.garmin_sync_log import GarminSyncLog, SyncStatus
+ return GarminSyncLog.query.filter_by(status=SyncStatus.PENDING).count()
+
+ def _count_pending_analyses(self) -> int:
+ """Count workouts needing analysis"""
+ from app.models.workout import Workout
+ return Workout.query.filter_by(analysis_status='pending').count()
+
def _check_database(self) -> str:
try:
with get_db() as db:
db.execute(text("SELECT 1"))
return "ok"
except Exception as e:
- logger.error(f"Database check failed: {str(e)}")
+ logger.error("Database check failed", extra={"component": "database", "error": str(e)})
return "down"
def _check_garmin_sync(self) -> str:
try:
last_sync = GarminSyncLog.get_latest()
if last_sync and last_sync.status == SyncStatus.FAILED:
+ logger.warning("Garmin sync has failed status", extra={"component": "garmin_sync", "status": last_sync.status.value})
return "warning"
return "ok"
except Exception as e:
- logger.error(f"Garmin sync check failed: {str(e)}")
+ logger.error("Garmin sync check failed", extra={"component": "garmin_sync", "error": str(e)})
return "down"
def _check_ai_service(self) -> str:
try:
response = requests.get(
- f"{settings.AI_SERVICE_URL}/ping",
+ f"{settings.AI_SERVICE_URL}/ping",
timeout=5,
headers={"Authorization": f"Bearer {settings.OPENROUTER_API_KEY}"}
)
return "ok" if response.ok else "down"
except Exception as e:
- logger.error(f"AI service check failed: {str(e)}")
+ logger.error("AI service check failed", extra={"component": "ai_service", "error": str(e)})
return "down"
def _log_anomalies(self, metrics: Dict[str, Any]):
@@ -75,6 +88,7 @@ class HealthMonitor:
for metric, value in metrics.items():
if metric in self.warning_thresholds and value > self.warning_thresholds[metric]:
alerts.append(f"{metric} {value}%")
+ logger.warning("System threshold exceeded", extra={"metric": metric, "value": value, "threshold": self.warning_thresholds[metric]})
if alerts:
- logger.warning(f"System thresholds exceeded: {', '.join(alerts)}")
\ No newline at end of file
+ logger.warning("System thresholds exceeded", extra={"alerts": alerts})
\ No newline at end of file
diff --git a/backend/app/services/workout_sync.py b/backend/app/services/workout_sync.py
index 96831b9..9d47288 100644
--- a/backend/app/services/workout_sync.py
+++ b/backend/app/services/workout_sync.py
@@ -6,6 +6,7 @@ from app.models.garmin_sync_log import GarminSyncLog
from app.models.garmin_sync_log import GarminSyncLog
from datetime import datetime, timedelta
import logging
+from typing import Dict, Any
import asyncio
logger = logging.getLogger(__name__)
diff --git a/backend/requirements.txt b/backend/requirements.txt
index 31e79fb..8d9a3ee 100644
--- a/backend/requirements.txt
+++ b/backend/requirements.txt
@@ -8,4 +8,5 @@ pydantic-settings==2.2.1
python-multipart==0.0.9
gpxpy # Add GPX parsing library
garth==0.4.46 # Garmin Connect API client
-httpx==0.25.2 # Async HTTP client for OpenRouter API
\ No newline at end of file
+httpx==0.25.2 # Async HTTP client for OpenRouter API
+asyncpg==0.29.0 # Async PostgreSQL driver
\ No newline at end of file
diff --git a/backend/scripts/backup_restore.py b/backend/scripts/backup_restore.py
index 7d470d1..5ba5bf4 100644
--- a/backend/scripts/backup_restore.py
+++ b/backend/scripts/backup_restore.py
@@ -24,6 +24,9 @@ class DatabaseManager:
def __init__(self, backup_dir: str = "/app/data/backups"):
self.backup_dir = Path(backup_dir)
self.backup_dir.mkdir(parents=True, exist_ok=True)
+ self.gpx_dir = Path("/app/data/gpx")
+ self.manifest_file = self.backup_dir / "gpx_manifest.json"
+ self.encryption_key = os.getenv("BACKUP_ENCRYPTION_KEY").encode()
def get_db_connection_params(self):
"""Extract database connection parameters from URL."""
@@ -39,15 +42,91 @@ class DatabaseManager:
'database': parsed.path.lstrip('/')
}
+ def _backup_gpx_files(self, backup_dir: Path) -> Optional[Path]:
+ """Backup GPX files directory"""
+ gpx_dir = Path("/app/data/gpx")
+ if not gpx_dir.exists():
+ return None
+
+ backup_path = backup_dir / "gpx.tar.gz"
+ with tarfile.open(backup_path, "w:gz") as tar:
+ tar.add(gpx_dir, arcname="gpx")
+ return backup_path
+
+ def _backup_sessions(self, backup_dir: Path) -> Optional[Path]:
+ """Backup Garmin sessions directory"""
+ sessions_dir = Path("/app/data/sessions")
+ if not sessions_dir.exists():
+ return None
+
+ backup_path = backup_dir / "sessions.tar.gz"
+ with tarfile.open(backup_path, "w:gz") as tar:
+ tar.add(sessions_dir, arcname="sessions")
+ return backup_path
+
+ def _generate_checksum(self, file_path: Path) -> str:
+ """Generate SHA256 checksum for a file"""
+ hash_sha256 = hashlib.sha256()
+ with open(file_path, "rb") as f:
+ for chunk in iter(lambda: f.read(4096), b""):
+ hash_sha256.update(chunk)
+ return hash_sha256.hexdigest()
+
+ def _verify_backup_integrity(self, backup_path: Path):
+ """Verify backup file integrity using checksum"""
+ checksum_file = backup_path.with_suffix('.sha256')
+ if not checksum_file.exists():
+ raise FileNotFoundError(f"Checksum file missing for {backup_path.name}")
+
+ with open(checksum_file) as f:
+ expected_checksum = f.read().split()[0]
+
+ actual_checksum = self._generate_checksum(backup_path)
+ if actual_checksum != expected_checksum:
+ raise ValueError(f"Checksum mismatch for {backup_path.name}")
+
def create_backup(self, name: Optional[str] = None) -> str:
- """Create a database backup."""
+ """Create a full system backup including database, GPX files, and sessions"""
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
- backup_name = name or f"backup_{timestamp}"
- backup_file = self.backup_dir / f"{backup_name}.sql"
+ backup_name = name or f"full_backup_{timestamp}"
+ backup_dir = self.backup_dir / backup_name
+ backup_dir.mkdir(parents=True, exist_ok=True)
+ try:
+ # Backup database
+ db_backup_path = self._backup_database(backup_dir)
+
+ # Backup GPX files
+ gpx_backup_path = self._backup_gpx_files(backup_dir)
+
+ # Backup sessions
+ sessions_backup_path = self._backup_sessions(backup_dir)
+
+ # Generate checksums for all backup files
+ for file in backup_dir.glob("*"):
+ if file.is_file():
+ checksum = self._generate_checksum(file)
+ with open(f"{file}.sha256", "w") as f:
+ f.write(f"{checksum} {file.name}")
+
+ # Verify backups
+ for file in backup_dir.glob("*"):
+ if file.is_file() and not file.name.endswith('.sha256'):
+ self._verify_backup_integrity(file)
+
+ print(f"✅ Full backup created successfully: {backup_dir}")
+ return str(backup_dir)
+
+ except Exception as e:
+ shutil.rmtree(backup_dir, ignore_errors=True)
+ print(f"❌ Backup failed: {str(e)}")
+ raise
+
+ def _backup_database(self, backup_dir: Path) -> Path:
+ """Create database backup"""
params = self.get_db_connection_params()
+ backup_file = backup_dir / "database.dump"
- # Use pg_dump for backup
cmd = [
"pg_dump",
"-h", params['host'],
@@ -56,28 +135,18 @@ class DatabaseManager:
"-d", params['database'],
"-f", str(backup_file),
"--no-password",
- "--format=custom", # Custom format for better compression
+ "--format=custom",
"--compress=9"
]
- # Set password environment variable
env = os.environ.copy()
env['PGPASSWORD'] = params['password']
- try:
- print(f"Creating backup: {backup_file}")
- result = subprocess.run(cmd, env=env, capture_output=True, text=True)
-
- if result.returncode == 0:
- print(f"✅ Backup created successfully: {backup_file}")
- return str(backup_file)
- else:
- print(f"❌ Backup failed: {result.stderr}")
- raise Exception(f"Backup failed: {result.stderr}")
-
- except FileNotFoundError:
- print("❌ pg_dump not found. Ensure PostgreSQL client tools are installed.")
- raise
+ result = subprocess.run(cmd, env=env, capture_output=True, text=True)
+ if result.returncode != 0:
+ raise Exception(f"Database backup failed: {result.stderr}")
+
+ return backup_file
def restore_backup(self, backup_file: str, confirm: bool = False) -> None:
"""Restore database from backup."""
@@ -128,6 +197,80 @@ class DatabaseManager:
print("❌ pg_restore not found. Ensure PostgreSQL client tools are installed.")
raise
+ def backup_gpx_files(self, incremental: bool = True) -> Optional[Path]:
+ """Handle GPX backup creation with incremental/full strategy"""
+ try:
+ if incremental:
+ return self._incremental_gpx_backup()
+ return self._full_gpx_backup()
+ except Exception as e:
+ print(f"GPX backup failed: {str(e)}")
+ return None
+
+ def _full_gpx_backup(self) -> Path:
+ """Create full GPX backup"""
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
+ backup_path = self.backup_dir / f"gpx_full_{timestamp}"
+ backup_path.mkdir()
+
+ # Copy all GPX files
+ subprocess.run(["rsync", "-a", f"{self.gpx_dir}/", f"{backup_path}/"])
+ self._encrypt_backup(backup_path)
+ return backup_path
+
+ def _incremental_gpx_backup(self) -> Optional[Path]:
+ """Create incremental GPX backup using rsync --link-dest"""
+ last_full = self._find_last_full_backup()
+ if not last_full:
+ return self._full_gpx_backup()
+
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
+ backup_path = self.backup_dir / f"gpx_inc_{timestamp}"
+ backup_path.mkdir()
+
+ # Use hardlinks to previous backup for incremental
+ subprocess.run([
+ "rsync", "-a",
+ "--link-dest", str(last_full),
+ f"{self.gpx_dir}/",
+ f"{backup_path}/"
+ ])
+ self._encrypt_backup(backup_path)
+ return backup_path
+
+ def _find_last_full_backup(self) -> Optional[Path]:
+ """Find most recent full backup"""
+ full_backups = sorted(self.backup_dir.glob("gpx_full_*"), reverse=True)
+ return full_backups[0] if full_backups else None
+
+ def _encrypt_backup(self, backup_path: Path):
+ """Encrypt backup directory using Fernet (AES-256-CBC with HMAC-SHA256)"""
+ from cryptography.fernet import Fernet
+
+ fernet = Fernet(self.encryption_key)
+
+ for file in backup_path.rglob('*'):
+ if file.is_file():
+ with open(file, 'rb') as f:
+ data = f.read()
+ encrypted = fernet.encrypt(data)
+ with open(file, 'wb') as f:
+ f.write(encrypted)
+
+ def decrypt_backup(self, backup_path: Path):
+ """Decrypt backup directory"""
+ from cryptography.fernet import Fernet
+
+ fernet = Fernet(self.encryption_key)
+
+ for file in backup_path.rglob('*'):
+ if file.is_file():
+ with open(file, 'rb') as f:
+ data = f.read()
+ decrypted = fernet.decrypt(data)
+ with open(file, 'wb') as f:
+ f.write(decrypted)
+
def _recreate_database(self):
"""Drop and recreate the database."""
params = self.get_db_connection_params()
@@ -184,10 +327,11 @@ class DatabaseManager:
cutoff = datetime.now() - timedelta(days=keep_days)
removed = []
- for backup in self.backup_dir.glob("*.sql"):
- if datetime.fromtimestamp(backup.stat().st_mtime) < cutoff:
- backup.unlink()
- removed.append(backup.name)
+ # Clean all backup directories (full_backup_*)
+ for backup_dir in self.backup_dir.glob("full_backup_*"):
+ if backup_dir.is_dir() and datetime.fromtimestamp(backup_dir.stat().st_mtime) < cutoff:
+ shutil.rmtree(backup_dir)
+ removed.append(backup_dir.name)
if removed:
print(f"Removed {len(removed)} old backups: {', '.join(removed)}")
@@ -198,10 +342,12 @@ def main():
if len(sys.argv) < 2:
print("Usage: python backup_restore.py [options]")
print("Commands:")
- print(" backup [name] - Create a new backup")
+ print(" backup [name] - Create a new database backup")
+ print(" gpx-backup [--full] - Create GPX backup (incremental by default)")
print(" restore [--yes] - Restore from backup")
print(" list - List available backups")
print(" cleanup [days] - Remove backups older than N days (default: 30)")
+ print(" decrypt - Decrypt backup directory")
sys.exit(1)
manager = DatabaseManager()
@@ -210,13 +356,21 @@ def main():
try:
if command == "backup":
name = sys.argv[2] if len(sys.argv) > 2 else None
- manager.create_backup(name)
+ name = sys.argv[2] if len(sys.argv) > 2 else None
+ manager.create_backup(name)
+ elif command == "gpx-backup":
+ if len(sys.argv) > 2 and sys.argv[2] == "--full":
+ manager.backup_gpx_files(incremental=False)
+ else:
+ manager.backup_gpx_files()
elif command == "restore":
if len(sys.argv) < 3:
print("Error: Please specify backup file to restore from")
sys.exit(1)
+ backup_file = sys.argv[2]
+ confirm = "--yes" in sys.argv
backup_file = sys.argv[2]
confirm = "--yes" in sys.argv
manager.restore_backup(backup_file, confirm)
diff --git a/backend/tests/services/test_ai_service.py b/backend/tests/services/test_ai_service.py
new file mode 100644
index 0000000..ab0a69c
--- /dev/null
+++ b/backend/tests/services/test_ai_service.py
@@ -0,0 +1,102 @@
+import pytest
+from unittest.mock import AsyncMock, patch, MagicMock
+from app.services.ai_service import AIService, AIServiceError
+from app.models.workout import Workout
+import json
+
+@pytest.mark.asyncio
+async def test_analyze_workout_success():
+ """Test successful workout analysis with valid API response"""
+ mock_db = MagicMock()
+ mock_prompt = MagicMock()
+ mock_prompt.format.return_value = "test prompt"
+
+ ai_service = AIService(mock_db)
+ ai_service.prompt_manager.get_active_prompt = AsyncMock(return_value=mock_prompt)
+
+ test_response = json.dumps({
+ "performance_summary": "Good workout",
+ "suggestions": ["More recovery"]
+ })
+
+ with patch('httpx.AsyncClient.post') as mock_post:
+ mock_post.return_value = AsyncMock(
+ status_code=200,
+ json=lambda: {"choices": [{"message": {"content": test_response}}]}
+ )
+
+ workout = Workout(activity_type="cycling", duration_seconds=3600)
+ result = await ai_service.analyze_workout(workout)
+
+ assert "performance_summary" in result
+ assert len(result["suggestions"]) == 1
+
+@pytest.mark.asyncio
+async def test_generate_plan_success():
+ """Test plan generation with structured response"""
+ mock_db = MagicMock()
+ ai_service = AIService(mock_db)
+ ai_service.prompt_manager.get_active_prompt = AsyncMock(return_value="Plan prompt: {rules} {goals}")
+
+ test_plan = {
+ "weeks": [{"workouts": ["ride"]}],
+ "focus": "endurance"
+ }
+
+ with patch('httpx.AsyncClient.post') as mock_post:
+ mock_post.return_value = AsyncMock(
+ status_code=200,
+ json=lambda: {"choices": [{"message": {"content": json.dumps(test_plan)}}]}
+ )
+
+ result = await ai_service.generate_plan([], {})
+ assert "weeks" in result
+ assert result["focus"] == "endurance"
+
+@pytest.mark.asyncio
+async def test_api_retry_logic():
+ """Test API request retries on failure"""
+ mock_db = MagicMock()
+ ai_service = AIService(mock_db)
+
+ with patch('httpx.AsyncClient.post') as mock_post:
+ mock_post.side_effect = Exception("API failure")
+
+ with pytest.raises(AIServiceError):
+ await ai_service._make_ai_request("test")
+
+ assert mock_post.call_count == 3
+
+@pytest.mark.asyncio
+async def test_invalid_json_handling():
+ """Test graceful handling of invalid JSON responses"""
+ mock_db = MagicMock()
+ ai_service = AIService(mock_db)
+
+ with patch('httpx.AsyncClient.post') as mock_post:
+ mock_post.return_value = AsyncMock(
+ status_code=200,
+ json=lambda: {"choices": [{"message": {"content": "invalid{json"}}]}
+ )
+
+ result = await ai_service.parse_rules_from_natural_language("test")
+ assert "raw_rules" in result
+ assert not result["structured"]
+
+@pytest.mark.asyncio
+async def test_code_block_parsing():
+ """Test extraction of JSON from code blocks"""
+ mock_db = MagicMock()
+ ai_service = AIService(mock_db)
+
+ test_response = "```json\n" + json.dumps({"max_rides": 4}) + "\n```"
+
+ with patch('httpx.AsyncClient.post') as mock_post:
+ mock_post.return_value = AsyncMock(
+ status_code=200,
+ json=lambda: {"choices": [{"message": {"content": test_response}}]}
+ )
+
+ result = await ai_service.evolve_plan({})
+ assert "max_rides" in result
+ assert result["max_rides"] == 4
\ No newline at end of file
diff --git a/backend/tests/services/test_plan_evolution.py b/backend/tests/services/test_plan_evolution.py
new file mode 100644
index 0000000..1ed113f
--- /dev/null
+++ b/backend/tests/services/test_plan_evolution.py
@@ -0,0 +1,56 @@
+import pytest
+from unittest.mock import AsyncMock, MagicMock
+from app.services.plan_evolution import PlanEvolutionService
+from app.models.plan import Plan
+from app.models.analysis import Analysis
+from datetime import datetime
+
+@pytest.mark.asyncio
+async def test_evolve_plan_with_valid_analysis():
+ """Test plan evolution with approved analysis and suggestions"""
+ mock_db = AsyncMock()
+ mock_plan = Plan(
+ id=1,
+ version=1,
+ jsonb_plan={"weeks": []},
+ parent_plan_id=None
+ )
+ mock_analysis = Analysis(
+ approved=True,
+ jsonb_feedback={"suggestions": ["More recovery"]}
+ )
+
+ service = PlanEvolutionService(mock_db)
+ service.ai_service.evolve_plan = AsyncMock(return_value={"weeks": [{"recovery": True}]})
+
+ result = await service.evolve_plan_from_analysis(mock_analysis, mock_plan)
+
+ assert result.version == 2
+ assert result.parent_plan_id == 1
+ mock_db.add.assert_called_once()
+ mock_db.commit.assert_awaited_once()
+
+@pytest.mark.asyncio
+async def test_evolution_skipped_for_unapproved_analysis():
+ """Test plan evolution is skipped for unapproved analysis"""
+ mock_db = AsyncMock()
+ mock_analysis = Analysis(approved=False)
+
+ service = PlanEvolutionService(mock_db)
+ result = await service.evolve_plan_from_analysis(mock_analysis, MagicMock())
+
+ assert result is None
+
+@pytest.mark.asyncio
+async def test_evolution_history_retrieval():
+ """Test getting plan evolution history"""
+ mock_db = AsyncMock()
+ mock_db.execute.return_value.scalars.return_value = [
+ Plan(version=1), Plan(version=2)
+ ]
+
+ service = PlanEvolutionService(mock_db)
+ history = await service.get_plan_evolution_history(1)
+
+ assert len(history) == 2
+ assert history[0].version == 1
\ No newline at end of file
diff --git a/backend/tests/services/test_workflow_sync.py b/backend/tests/services/test_workflow_sync.py
new file mode 100644
index 0000000..8c82e70
--- /dev/null
+++ b/backend/tests/services/test_workflow_sync.py
@@ -0,0 +1,81 @@
+import pytest
+from unittest.mock import AsyncMock, MagicMock, patch
+from app.services.workout_sync import WorkoutSyncService
+from app.models.workout import Workout
+from app.models.garmin_sync_log import GarminSyncLog
+from datetime import datetime, timedelta
+import asyncio
+
+@pytest.mark.asyncio
+async def test_successful_sync():
+ """Test successful sync of new activities"""
+ mock_db = AsyncMock()
+ mock_garmin = MagicMock()
+ mock_garmin.get_activities.return_value = [{'activityId': '123'}]
+ mock_garmin.get_activity_details.return_value = {'metrics': 'data'}
+
+ service = WorkoutSyncService(mock_db)
+ service.garmin_service = mock_garmin
+
+ result = await service.sync_recent_activities()
+
+ assert result == 1
+ mock_db.add.assert_called()
+ mock_db.commit.assert_awaited()
+
+@pytest.mark.asyncio
+async def test_duplicate_activity_handling():
+ """Test skipping duplicate activities"""
+ mock_db = AsyncMock()
+ mock_db.execute.return_value.scalar_one_or_none.return_value = True
+ mock_garmin = MagicMock()
+ mock_garmin.get_activities.return_value = [{'activityId': '123'}]
+
+ service = WorkoutSyncService(mock_db)
+ service.garmin_service = mock_garmin
+
+ result = await service.sync_recent_activities()
+ assert result == 0
+
+@pytest.mark.asyncio
+async def test_activity_detail_retry_logic():
+ """Test retry logic for activity details"""
+ mock_db = AsyncMock()
+ mock_garmin = MagicMock()
+ mock_garmin.get_activities.return_value = [{'activityId': '123'}]
+ mock_garmin.get_activity_details.side_effect = [Exception(), {'metrics': 'data'}]
+
+ service = WorkoutSyncService(mock_db)
+ service.garmin_service = mock_garmin
+
+ result = await service.sync_recent_activities()
+ assert mock_garmin.get_activity_details.call_count == 2
+ assert result == 1
+
+@pytest.mark.asyncio
+async def test_auth_error_handling():
+ """Test authentication error handling"""
+ mock_db = AsyncMock()
+ mock_garmin = MagicMock()
+ mock_garmin.get_activities.side_effect = Exception("Auth failed")
+
+ service = WorkoutSyncService(mock_db)
+ service.garmin_service = mock_garmin
+
+ with pytest.raises(Exception):
+ await service.sync_recent_activities()
+
+ sync_log = mock_db.add.call_args[0][0]
+ assert sync_log.status == "auth_error"
+
+@pytest.mark.asyncio
+async def test_get_sync_status():
+ """Test retrieval of latest sync status"""
+ mock_db = AsyncMock()
+ mock_log = GarminSyncLog(status="success")
+ mock_db.execute.return_value.scalar_one_or_none.return_value = mock_log
+
+ service = WorkoutSyncService(mock_db)
+ result = await service.get_latest_sync_status()
+
+ assert result.status == "success"
\ No newline at end of file
diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml
index db33de0..985ed80 100644
--- a/docker-compose.prod.yml
+++ b/docker-compose.prod.yml
@@ -11,6 +11,11 @@ services:
- ./data/gpx:/app/data/gpx
- ./data/sessions:/app/data/sessions
- ./data/logs:/app/logs
+ logging:
+ driver: "json-file"
+ options:
+ max-size: "10m"
+ max-file: "5"
environment:
- DATABASE_URL=postgresql://postgres:${POSTGRES_PASSWORD}@db:5432/cycling
- API_KEY=${API_KEY}
@@ -33,12 +38,23 @@ services:
dockerfile: Dockerfile
restart: unless-stopped
ports:
- - "3000:3000"
+ - "80:80"
environment:
- - REACT_APP_API_URL=http://localhost:8000
+ - REACT_APP_API_URL=http://backend:8000
+ - NODE_ENV=production
+ healthcheck:
+ test: ["CMD", "curl", "-f", "http://localhost:80/healthz"]
+ interval: 30s
+ timeout: 10s
+ retries: 3
depends_on:
backend:
condition: service_healthy
+ deploy:
+ resources:
+ limits:
+ memory: 512M
+ cpus: '0.5'
db:
image: postgres:15-alpine
diff --git a/docker-compose.yml b/docker-compose.yml
index 7e439ed..2dbc303 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -8,7 +8,8 @@ services:
ports:
- "8000:8000"
environment:
- - DATABASE_URL=postgresql://postgres:password@db:5432/cycling
+ - DATABASE_URL=postgresql+asyncpg://postgres:password@db:5432/cycling
+ - GPX_STORAGE_PATH=/app/data/gpx
- GARMIN_USERNAME=${GARMIN_USERNAME}
- GARMIN_PASSWORD=${GARMIN_PASSWORD}
- OPENROUTER_API_KEY=${OPENROUTER_API_KEY}
@@ -27,9 +28,9 @@ services:
frontend:
build: ./frontend
ports:
- - "3000:3000"
+ - "8888:80"
environment:
- - REACT_APP_API_URL=http://localhost:8000
+ - REACT_APP_API_URL=http://backend:8000
- REACT_APP_API_KEY=${API_KEY}
db:
diff --git a/frontend/.dockerignore b/frontend/.dockerignore
new file mode 100644
index 0000000..045e02f
--- /dev/null
+++ b/frontend/.dockerignore
@@ -0,0 +1,11 @@
+node_modules
+.next
+Dockerfile
+.dockerignore
+.git
+.gitignore
+coverage
+.env
+.env.local
+.vscode
+*.log
\ No newline at end of file
diff --git a/frontend/Dockerfile b/frontend/Dockerfile
index 81d6214..06f0dd5 100644
--- a/frontend/Dockerfile
+++ b/frontend/Dockerfile
@@ -1,39 +1,60 @@
-# Build stage
-FROM node:20-alpine AS build
+# Stage 1: Build application
+FROM node:20-alpine AS builder
-# Set working directory
WORKDIR /app
-# Copy package.json and package-lock.json
-COPY package*.json ./
+# Copy package manifests first for optimal caching
+COPY package.json package-lock.json* ./
-# Install all dependencies including devDependencies
-RUN npm install --include=dev
+# Clean cache and install dependencies
+RUN npm cache clean --force && \
+ export NODE_OPTIONS="--max-old-space-size=1024" && \
+ npm install --include=dev
-# Copy source code
+# Copy source files
COPY . .
-# Build application
-RUN npm run build
+# Build application with production settings
+RUN export NODE_OPTIONS="--max-old-space-size=1024" && \
+ npm run build
-# Production stage
-FROM node:20-alpine AS production
+# Stage 2: Production runtime
+FROM nginx:1.25-alpine
-# Set working directory
-WORKDIR /app
+# Install curl for healthchecks
+RUN apk add --no-cache curl
-# Copy build artifacts and dependencies
-COPY --from=build /app/package*.json ./
-COPY --from=build /app/.next ./.next
-COPY --from=build /app/node_modules ./node_modules
-COPY --from=build /app/public ./public
+# Create necessary directories and set permissions
+RUN mkdir -p /var/cache/nginx/client_temp && \
+ mkdir -p /var/run/nginx && \
+ chown -R nginx:nginx /usr/share/nginx/html && \
+ chown -R nginx:nginx /var/cache/nginx && \
+ chown -R nginx:nginx /var/run/nginx && \
+ chmod -R 755 /usr/share/nginx/html
-# Create non-root user
-RUN addgroup -S appgroup && adduser -S appuser -G appgroup
-USER appuser
+# Copy build artifacts
+COPY --from=builder /app/.next /usr/share/nginx/html/_next
-# Expose application port
-EXPOSE 3000
+# Copy nginx configuration
+COPY nginx.conf /etc/nginx/nginx.conf
-# Run application
-CMD ["npm", "start"]
\ No newline at end of file
+# Copy Next.js routes manifest for proper routing
+COPY --from=builder /app/.next/routes-manifest.json /usr/share/nginx/html/_next/
+
+# Copy main HTML files to root
+COPY --from=builder /app/.next/server/pages/index.html /usr/share/nginx/html/index.html
+COPY --from=builder /app/.next/server/pages/404.html /usr/share/nginx/html/404.html
+
+# Modify nginx config to use custom PID path
+RUN sed -i 's|pid /var/run/nginx.pid;|pid /var/run/nginx/nginx.pid;|' /etc/nginx/nginx.conf
+
+# Healthcheck
+HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
+ CMD curl --fail http://localhost:80 || exit 1
+
+# Run as root to avoid permission issues
+# USER nginx
+
+EXPOSE 80
+
+CMD ["nginx", "-g", "daemon off;"]
\ No newline at end of file
diff --git a/frontend/jest.config.js b/frontend/jest.config.js
new file mode 100644
index 0000000..b3b2c1a
--- /dev/null
+++ b/frontend/jest.config.js
@@ -0,0 +1,15 @@
+module.exports = {
+ collectCoverage: true,
+ coverageDirectory: "coverage",
+ coverageReporters: ["text", "lcov"],
+ coveragePathIgnorePatterns: [
+ "/node_modules/",
+ "/.next/",
+ "/__tests__/",
+ "jest.config.js"
+ ],
+ testEnvironment: "jest-environment-jsdom",
+ moduleNameMapper: {
+ "^@/(.*)$": "/src/$1"
+ }
+};
\ No newline at end of file
diff --git a/frontend/nginx.conf b/frontend/nginx.conf
new file mode 100644
index 0000000..91de7fe
--- /dev/null
+++ b/frontend/nginx.conf
@@ -0,0 +1,45 @@
+worker_processes auto;
+
+events {
+ worker_connections 1024;
+}
+
+http {
+ include /etc/nginx/mime.types;
+ default_type application/octet-stream;
+
+ sendfile on;
+ keepalive_timeout 65;
+ gzip on;
+ gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript;
+
+ server {
+ listen 80;
+ server_name localhost;
+
+ location / {
+ root /usr/share/nginx/html;
+ index index.html;
+ try_files $uri $uri/ /index.html;
+
+ # Cache control for static assets
+ location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg)$ {
+ expires 1y;
+ add_header Cache-Control "public, immutable";
+ }
+ }
+
+ # Next.js specific routes
+ location /_next/ {
+ alias /usr/share/nginx/html/_next/;
+ expires 365d;
+ add_header Cache-Control "public, max-age=31536000, immutable";
+ }
+
+ # Health check endpoint
+ location /healthz {
+ access_log off;
+ return 200 'ok';
+ }
+ }
+}
\ No newline at end of file
diff --git a/frontend/package.json b/frontend/package.json
index aeaefb2..6930ec7 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -8,14 +8,19 @@
"start": "next start",
"lint": "next lint",
"test": "jest",
- "test:watch": "jest --watch"
+ "test:watch": "jest --watch",
+ "test:coverage": "jest --coverage"
},
"dependencies": {
"@emotion/react": "^11.14.0",
"@emotion/styled": "^11.14.1",
+ "axios": "^1.7.2",
+ "date-fns": "^3.6.0",
"next": "14.2.3",
"react": "18.2.0",
"react-dom": "18.2.0",
+ "react-router-dom": "^6.22.3",
+ "react-toastify": "^10.0.4",
"recharts": "2.8.0"
},
"devDependencies": {
diff --git a/frontend/src/components/__tests__/LoadingSpinner.test.jsx b/frontend/src/components/__tests__/LoadingSpinner.test.jsx
new file mode 100644
index 0000000..84958d9
--- /dev/null
+++ b/frontend/src/components/__tests__/LoadingSpinner.test.jsx
@@ -0,0 +1,26 @@
+import { render, screen } from '@testing-library/react';
+import LoadingSpinner from '../LoadingSpinner';
+
+describe('LoadingSpinner Component', () => {
+ test('renders spinner with animation', () => {
+ render();
+
+ // Check for the spinner container
+ const spinnerContainer = screen.getByRole('status');
+ expect(spinnerContainer).toBeInTheDocument();
+
+ // Verify animation classes
+ const spinnerElement = screen.getByTestId('loading-spinner');
+ expect(spinnerElement).toHaveClass('animate-spin');
+ expect(spinnerElement).toHaveClass('rounded-full');
+
+ // Check accessibility attributes
+ expect(spinnerElement).toHaveAttribute('aria-live', 'polite');
+ expect(spinnerElement).toHaveAttribute('aria-busy', 'true');
+ });
+
+ test('matches snapshot', () => {
+ const { asFragment } = render();
+ expect(asFragment()).toMatchSnapshot();
+ });
+});
\ No newline at end of file
diff --git a/frontend/src/context/AuthContext.jsx b/frontend/src/context/AuthContext.jsx
index ac658f4..94cf7de 100644
--- a/frontend/src/context/AuthContext.jsx
+++ b/frontend/src/context/AuthContext.jsx
@@ -44,8 +44,19 @@ export const AuthProvider = ({ children }) => {
export const useAuth = () => {
const context = useContext(AuthContext);
+
+ // Return safe defaults during build time
+ if (typeof window === 'undefined') {
+ return {
+ apiKey: null,
+ authFetch: () => {},
+ loading: false
+ };
+ }
+
if (!context) {
throw new Error('useAuth must be used within an AuthProvider');
}
+
return context;
};
\ No newline at end of file
diff --git a/frontend/src/pages/Dashboard.jsx b/frontend/src/pages/Dashboard.jsx
index dd37015..b723092 100644
--- a/frontend/src/pages/Dashboard.jsx
+++ b/frontend/src/pages/Dashboard.jsx
@@ -7,6 +7,7 @@ import LoadingSpinner from '../components/LoadingSpinner';
const Dashboard = () => {
const { apiKey, loading: apiLoading } = useAuth();
+ const isBuildTime = typeof window === 'undefined';
const [recentWorkouts, setRecentWorkouts] = useState([]);
const [currentPlan, setCurrentPlan] = useState(null);
const [stats, setStats] = useState({ totalWorkouts: 0, totalDistance: 0 });
@@ -18,16 +19,16 @@ const Dashboard = () => {
const fetchDashboardData = async () => {
try {
const [workoutsRes, planRes, statsRes, healthRes] = await Promise.all([
- fetch('/api/workouts?limit=3', {
+ fetch(`${process.env.REACT_APP_API_URL}/api/workouts?limit=3`, {
headers: { 'X-API-Key': apiKey }
}),
- fetch('/api/plans/active', {
+ fetch(`${process.env.REACT_APP_API_URL}/api/plans/active`, {
headers: { 'X-API-Key': apiKey }
}),
- fetch('/api/stats', {
+ fetch(`${process.env.REACT_APP_API_URL}/api/stats`, {
headers: { 'X-API-Key': apiKey }
}),
- fetch('/api/health', {
+ fetch(`${process.env.REACT_APP_API_URL}/api/health`, {
headers: { 'X-API-Key': apiKey }
})
]);
@@ -61,6 +62,17 @@ const Dashboard = () => {
fetchDashboardData();
}, [apiKey]);
+ if (isBuildTime) {
+ return (
+
+
Training Dashboard
+
+
Loading dashboard data...
+
+
+ );
+ }
+
if (localLoading || apiLoading) return ;
if (error) return {error}
;
diff --git a/frontend/src/pages/Plans.jsx b/frontend/src/pages/Plans.jsx
index 0f047ce..b8b4cbe 100644
--- a/frontend/src/pages/Plans.jsx
+++ b/frontend/src/pages/Plans.jsx
@@ -9,8 +9,11 @@ const Plans = () => {
const [selectedPlan, setSelectedPlan] = useState(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState('');
+
+ const isBuildTime = typeof window === 'undefined';
useEffect(() => {
+ if (isBuildTime) return;
const fetchPlans = async () => {
try {
const response = await axios.get('/api/plans', {
@@ -30,6 +33,17 @@ const Plans = () => {
fetchPlans();
}, [apiKey]);
+ if (typeof window === 'undefined') {
+ return (
+
+
Training Plans
+
+
Loading training plans...
+
+
+ );
+ }
+
if (loading) return Loading plans...
;
if (error) return {error}
;
diff --git a/frontend/src/pages/RoutesPage.jsx b/frontend/src/pages/RoutesPage.jsx
index ed563af..1451ece 100644
--- a/frontend/src/pages/RoutesPage.jsx
+++ b/frontend/src/pages/RoutesPage.jsx
@@ -2,6 +2,18 @@ import { useAuth } from '../context/AuthContext';
const RoutesPage = () => {
const { apiKey } = useAuth();
+
+ // Handle build-time case where apiKey is undefined
+ if (typeof window === 'undefined') {
+ return (
+
+
Routes
+
+
Loading route management...
+
+
+ );
+ }
return (
diff --git a/frontend/src/pages/Workouts.jsx b/frontend/src/pages/Workouts.jsx
index 33980dc..e46bd23 100644
--- a/frontend/src/pages/Workouts.jsx
+++ b/frontend/src/pages/Workouts.jsx
@@ -9,8 +9,11 @@ const Workouts = () => {
const [selectedWorkout, setSelectedWorkout] = useState(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState('');
-
+
+ const isBuildTime = typeof window === 'undefined';
+
useEffect(() => {
+ if (isBuildTime) return;
const fetchWorkouts = async () => {
try {
const response = await axios.get('/api/workouts', {
@@ -27,6 +30,17 @@ const Workouts = () => {
fetchWorkouts();
}, [apiKey]);
+ if (isBuildTime) {
+ return (
+
+
Workouts
+
+
Loading workout data...
+
+
+ );
+ }
+
if (loading) return
Loading workouts...
;
if (error) return
{error}
;