diff --git a/CL_backendfixes.md b/CL_backendfixes.md new file mode 100644 index 0000000..b098283 --- /dev/null +++ b/CL_backendfixes.md @@ -0,0 +1,261 @@ +# ๐ฏ **Backend Implementation TODO List** + +## **Priority 1: Core API Gaps (Essential)** + +### **1.1 Plan Generation Endpoint** +- [ ] **Add plan generation endpoint** in `app/routes/plan.py` + ```python + @router.post("/generate", response_model=PlanSchema) + async def generate_plan( + plan_request: PlanGenerationRequest, + db: AsyncSession = Depends(get_db) + ): + ``` +- [ ] **Create PlanGenerationRequest schema** in `app/schemas/plan.py` + ```python + class PlanGenerationRequest(BaseModel): + rule_ids: List[UUID] + goals: Dict[str, Any] + user_preferences: Optional[Dict[str, Any]] = None + duration_weeks: int = 12 + ``` +- [ ] **Update AIService.generate_plan()** to handle rule fetching from DB +- [ ] **Add validation** for rule compatibility and goal requirements +- [ ] **Add tests** for plan generation workflow + +### **1.2 Rule Parsing API** +- [ ] **Add natural language rule parsing endpoint** in `app/routes/rule.py` + ```python + @router.post("/parse-natural-language") + async def parse_natural_language_rules( + request: NaturalLanguageRuleRequest, + db: AsyncSession = Depends(get_db) + ): + ``` +- [ ] **Create request/response schemas** in `app/schemas/rule.py` + ```python + class NaturalLanguageRuleRequest(BaseModel): + natural_language_text: str + rule_name: str + + class ParsedRuleResponse(BaseModel): + parsed_rules: Dict[str, Any] + confidence_score: Optional[float] + suggestions: Optional[List[str]] + ``` +- [ ] **Enhance AIService.parse_rules_from_natural_language()** with better error handling +- [ ] **Add rule validation** after parsing +- [ ] **Add preview mode** before saving parsed rules + +### **1.3 Section Integration with GPX Parsing** +- [ ] **Update `app/services/gpx.py`** to create sections automatically + ```python + async def parse_gpx_with_sections(file_path: str, route_id: UUID, db: AsyncSession) -> dict: + # Parse GPX into segments + # Create Section records for each segment + # Return enhanced GPX data with section metadata + ``` +- [ ] **Modify `app/routes/gpx.py`** to create sections after route creation +- [ ] **Add section creation logic** in GPX upload workflow +- [ ] **Update Section model** to include more GPX-derived metadata +- [ ] **Add section querying endpoints** for route visualization + +## **Priority 2: Data Model Enhancements** + +### **2.1 Missing Schema Fields** +- [ ] **Add missing fields to User model** in `app/models/user.py` + ```python + class User(BaseModel): + name: Optional[str] + email: Optional[str] + fitness_level: Optional[str] + preferences: Optional[JSON] + ``` +- [ ] **Enhance Plan model** with additional metadata + ```python + class Plan(BaseModel): + user_id: Optional[UUID] = Column(ForeignKey("users.id")) + name: str + description: Optional[str] + start_date: Optional[Date] + end_date: Optional[Date] + goal_type: Optional[str] + active: Boolean = Column(default=True) + ``` +- [ ] **Add plan-rule relationship table** (already exists but ensure proper usage) +- [ ] **Update all schemas** to match enhanced models + +### **2.2 Database Relationships** +- [ ] **Fix User-Plan relationship** in models +- [ ] **Add cascade delete rules** where appropriate +- [ ] **Add database constraints** for data integrity +- [ ] **Create missing indexes** for performance + ```sql + CREATE INDEX idx_workouts_garmin_activity_id ON workouts(garmin_activity_id); + CREATE INDEX idx_plans_user_active ON plans(user_id, active); + CREATE INDEX idx_analyses_workout_approved ON analyses(workout_id, approved); + ``` + +## **Priority 3: API Completeness** + +### **3.1 Export/Import Functionality** +- [ ] **Create export service** `app/services/export_import.py` + ```python + class ExportImportService: + async def export_user_data(user_id: UUID) -> bytes: + async def export_routes() -> bytes: + async def import_user_data(data: bytes, user_id: UUID): + ``` +- [ ] **Add export endpoints** in new `app/routes/export.py` + ```python + @router.get("/export/routes") + @router.get("/export/plans/{plan_id}") + @router.get("/export/user-data") + @router.post("/import/routes") + @router.post("/import/plans") + ``` +- [ ] **Support multiple formats** (JSON, GPX, ZIP) +- [ ] **Add data validation** for imports +- [ ] **Handle version compatibility** for imports + +### **3.2 Enhanced Dashboard API** +- [ ] **Expand dashboard data** in `app/routes/dashboard.py` + ```python + @router.get("/metrics/weekly") + @router.get("/metrics/monthly") + @router.get("/progress/{plan_id}") + @router.get("/upcoming-workouts") + ``` +- [ ] **Add aggregation queries** for metrics +- [ ] **Cache dashboard data** for performance +- [ ] **Add real-time updates** capability + +### **3.3 Advanced Workout Features** +- [ ] **Add workout comparison endpoint** + ```python + @router.get("/workouts/{workout_id}/compare/{compare_workout_id}") + ``` +- [ ] **Add workout search/filtering** + ```python + @router.get("/workouts/search") + async def search_workouts( + activity_type: Optional[str] = None, + date_range: Optional[DateRange] = None, + power_range: Optional[PowerRange] = None + ): + ``` +- [ ] **Add bulk workout operations** +- [ ] **Add workout tagging system** + +## **Priority 4: Service Layer Improvements** + +### **4.1 AI Service Enhancements** +- [ ] **Add prompt caching** to reduce API calls +- [ ] **Implement prompt A/B testing** framework +- [ ] **Add AI response validation** and confidence scoring +- [ ] **Create AI service health checks** +- [ ] **Add fallback mechanisms** for AI failures +- [ ] **Implement rate limiting** for AI calls +- [ ] **Add cost tracking** for AI API usage + +### **4.2 Garmin Service Improvements** +- [ ] **Add incremental sync** instead of full sync +- [ ] **Implement activity deduplication** logic +- [ ] **Add webhook support** for real-time sync +- [ ] **Enhance error recovery** for failed syncs +- [ ] **Add activity type filtering** +- [ ] **Support multiple Garmin accounts** per user + +### **4.3 Plan Evolution Enhancements** +- [ ] **Add plan comparison** functionality +- [ ] **Implement plan rollback** mechanism +- [ ] **Add plan branching** for different scenarios +- [ ] **Create plan templates** system +- [ ] **Add automated plan adjustments** based on performance + +## **Priority 5: Validation & Error Handling** + +### **5.1 Input Validation** +- [ ] **Add comprehensive Pydantic validators** for all schemas +- [ ] **Validate GPX file integrity** before processing +- [ ] **Add business rule validation** (e.g., plan dates, workout conflicts) +- [ ] **Validate AI responses** before storing +- [ ] **Add file size/type restrictions** + +### **5.2 Error Handling** +- [ ] **Create custom exception hierarchy** + ```python + class CyclingCoachException(Exception): + class GarminSyncError(CyclingCoachException): + class AIServiceError(CyclingCoachException): + class PlanGenerationError(CyclingCoachException): + ``` +- [ ] **Add global exception handler** +- [ ] **Improve error messages** for user feedback +- [ ] **Add error recovery mechanisms** +- [ ] **Log errors with context** for debugging + +## **Priority 6: Performance & Monitoring** + +### **6.1 Performance Optimizations** +- [ ] **Add database query optimization** +- [ ] **Implement caching** for frequently accessed data +- [ ] **Add connection pooling** configuration +- [ ] **Optimize GPX file parsing** for large files +- [ ] **Add pagination** to list endpoints +- [ ] **Implement background job queue** for long-running tasks + +### **6.2 Enhanced Monitoring** +- [ ] **Add application metrics** (response times, error rates) +- [ ] **Create health check dependencies** +- [ ] **Add performance profiling** endpoints +- [ ] **Implement alerting** for critical errors +- [ ] **Add audit logging** for data changes + +## **Priority 7: Security & Configuration** + +### **7.1 Security Improvements** +- [ ] **Implement user authentication/authorization** +- [ ] **Add rate limiting** to prevent abuse +- [ ] **Validate file uploads** for security +- [ ] **Add CORS configuration** properly +- [ ] **Implement request/response logging** (without sensitive data) +- [ ] **Add API versioning** support + +### **7.2 Configuration Management** +- [ ] **Add environment-specific configs** +- [ ] **Validate configuration** on startup +- [ ] **Add feature flags** system +- [ ] **Implement secrets management** +- [ ] **Add configuration reload** without restart + +## **Priority 8: Testing & Documentation** + +### **8.1 Testing** +- [ ] **Create comprehensive test suite** + - Unit tests for services + - Integration tests for API endpoints + - Database migration tests + - AI service mock tests +- [ ] **Add test fixtures** for common data +- [ ] **Implement test database** setup/teardown +- [ ] **Add performance tests** for critical paths +- [ ] **Create end-to-end tests** for workflows + +### **8.2 Documentation** +- [ ] **Generate OpenAPI documentation** +- [ ] **Add endpoint documentation** with examples +- [ ] **Create service documentation** +- [ ] **Document deployment procedures** +- [ ] **Add troubleshooting guides** + +--- + +## **๐ฏ Recommended Implementation Order:** + +1. **Week 1:** Priority 1 (Core API gaps) - Essential for feature completeness +2. **Week 2:** Priority 2 (Data model) + Priority 5.1 (Validation) - Foundation improvements +3. **Week 3:** Priority 3.1 (Export/Import) + Priority 4.1 (AI improvements) - User-facing features +4. **Week 4:** Priority 6 (Performance) + Priority 8.1 (Testing) - Production readiness + +This todo list will bring your backend implementation to 100% design doc compliance and beyond, making it production-ready with enterprise-level features! ๐ \ No newline at end of file diff --git a/CL_frontendfixes.md b/CL_frontendfixes.md new file mode 100644 index 0000000..84e7897 --- /dev/null +++ b/CL_frontendfixes.md @@ -0,0 +1,255 @@ +# Frontend Development TODO List + +## ๐จ Critical Missing Features (High Priority) + +### 1. Rules Management System +- [ ] **Create Rules page component** (`/src/pages/Rules.jsx`) + - [ ] Natural language textarea editor + - [ ] AI parsing button with loading state + - [ ] JSON preview pane with syntax highlighting + - [ ] Rule validation feedback + - [ ] Save/cancel actions +- [ ] **Create RuleEditor component** (`/src/components/rules/RuleEditor.jsx`) + - [ ] Rich text input with auto-resize + - [ ] Character count and validation + - [ ] Template suggestions dropdown +- [ ] **Create RulePreview component** (`/src/components/rules/RulePreview.jsx`) + - [ ] JSON syntax highlighting (use `react-json-view`) + - [ ] Editable JSON with validation + - [ ] Diff view for rule changes +- [ ] **Create RulesList component** (`/src/components/rules/RulesList.jsx`) + - [ ] Rule set selection dropdown + - [ ] Version history per rule set + - [ ] Delete/duplicate rule sets +- [ ] **API Integration** + - [ ] `POST /api/rules` - Create new rule set + - [ ] `PUT /api/rules/{id}` - Update rule set + - [ ] `GET /api/rules` - List all rule sets + - [ ] `POST /api/rules/{id}/parse` - AI parsing endpoint + +### 2. Plan Generation Workflow +- [ ] **Create PlanGeneration page** (`/src/pages/PlanGeneration.jsx`) + - [ ] Goal selection interface + - [ ] Rule set selection + - [ ] Plan parameters (duration, weekly hours) + - [ ] Progress tracking for AI generation +- [ ] **Create GoalSelector component** (`/src/components/plans/GoalSelector.jsx`) + - [ ] Predefined goal templates + - [ ] Custom goal input + - [ ] Goal validation +- [ ] **Create PlanParameters component** (`/src/components/plans/PlanParameters.jsx`) + - [ ] Duration slider (4-20 weeks) + - [ ] Weekly hours slider (5-15 hours) + - [ ] Difficulty level selection + - [ ] Available days checkboxes +- [ ] **Enhance PlanTimeline component** + - [ ] Week-by-week breakdown + - [ ] Workout details expandable cards + - [ ] Progress tracking indicators + - [ ] Edit individual workouts +- [ ] **API Integration** + - [ ] `POST /api/plans/generate` - Generate new plan + - [ ] `GET /api/plans/{id}/preview` - Preview before saving + - [ ] Plan generation status polling + +### 3. Route Management & Visualization +- [ ] **Enhance RoutesPage** (`/src/pages/RoutesPage.jsx`) + - [ ] Route list with metadata + - [ ] GPX file upload integration + - [ ] Route preview cards + - [ ] Search and filter functionality +- [ ] **Create RouteVisualization component** (`/src/components/routes/RouteVisualization.jsx`) + - [ ] Interactive map (use Leaflet.js) + - [ ] GPX track overlay + - [ ] Elevation profile chart + - [ ] Distance markers +- [ ] **Create RouteMetadata component** (`/src/components/routes/RouteMetadata.jsx`) + - [ ] Distance, elevation gain, grade analysis + - [ ] Estimated time calculations + - [ ] Difficulty rating + - [ ] Notes/description editing +- [ ] **Create SectionManager component** (`/src/components/routes/SectionManager.jsx`) + - [ ] Split routes into sections + - [ ] Section-specific metadata + - [ ] Gear recommendations per section +- [ ] **Dependencies to add** + - [ ] `npm install leaflet react-leaflet` + - [ ] GPX parsing library integration + +### 4. Export/Import System +- [ ] **Create ExportImport page** (`/src/pages/ExportImport.jsx`) + - [ ] Export options (JSON, ZIP) + - [ ] Import validation + - [ ] Bulk operations +- [ ] **Create DataExporter component** (`/src/components/export/DataExporter.jsx`) + - [ ] Selective export (routes, rules, plans) + - [ ] Format selection (JSON, GPX, ZIP) + - [ ] Export progress tracking +- [ ] **Create DataImporter component** (`/src/components/export/DataImporter.jsx`) + - [ ] File validation and preview + - [ ] Conflict resolution interface + - [ ] Import progress tracking +- [ ] **API Integration** + - [ ] `GET /api/export` - Generate export package + - [ ] `POST /api/import` - Import data package + - [ ] `POST /api/import/validate` - Validate before import + +## ๐ง Code Quality & Architecture Improvements + +### 5. Enhanced Error Handling +- [ ] **Create GlobalErrorHandler** (`/src/components/GlobalErrorHandler.jsx`) + - [ ] Centralized error logging + - [ ] User-friendly error messages + - [ ] Retry mechanisms +- [ ] **Improve API error handling** + - [ ] Consistent error response format + - [ ] Network error recovery + - [ ] Timeout handling +- [ ] **Add error boundaries** + - [ ] Page-level error boundaries + - [ ] Component-level error recovery + +### 6. State Management Improvements +- [ ] **Enhance AuthContext** + - [ ] Add user preferences + - [ ] API caching layer + - [ ] Offline capability detection +- [ ] **Create AppStateContext** (`/src/context/AppStateContext.jsx`) + - [ ] Global loading states + - [ ] Toast notifications + - [ ] Modal management +- [ ] **Add React Query** (Optional but recommended) + - [ ] `npm install @tanstack/react-query` + - [ ] API data caching + - [ ] Background refetching + - [ ] Optimistic updates + +### 7. UI/UX Enhancements +- [ ] **Improve responsive design** + - [ ] Better mobile navigation + - [ ] Touch-friendly interactions + - [ ] Responsive charts and maps +- [ ] **Add loading skeletons** + - [ ] Replace generic spinners + - [ ] Component-specific skeletons + - [ ] Progressive loading +- [ ] **Create ConfirmDialog component** (`/src/components/ui/ConfirmDialog.jsx`) + - [ ] Delete confirmations + - [ ] Destructive action warnings + - [ ] Custom confirmation messages +- [ ] **Add keyboard shortcuts** + - [ ] Navigation shortcuts + - [ ] Action shortcuts + - [ ] Help overlay + +## ๐งช Testing & Quality Assurance + +### 8. Testing Infrastructure +- [ ] **Expand component tests** + - [ ] Rules management tests + - [ ] Plan generation tests + - [ ] Route visualization tests +- [ ] **Add integration tests** + - [ ] API integration tests + - [ ] User workflow tests + - [ ] Error scenario tests +- [ ] **Performance testing** + - [ ] Large dataset handling + - [ ] Chart rendering performance + - [ ] Memory leak detection + +### 9. Development Experience +- [ ] **Add Storybook** (Optional) + - [ ] Component documentation + - [ ] Design system documentation + - [ ] Interactive component testing +- [ ] **Improve build process** + - [ ] Bundle size optimization + - [ ] Dead code elimination + - [ ] Tree shaking verification +- [ ] **Add development tools** + - [ ] React DevTools integration + - [ ] Performance monitoring + - [ ] Bundle analyzer + +## ๐ Documentation & Dependencies + +### 10. Missing Dependencies +```json +{ + "leaflet": "^1.9.4", + "react-leaflet": "^4.2.1", + "react-json-view": "^1.21.3", + "@tanstack/react-query": "^4.32.0", + "react-hook-form": "^7.45.0", + "react-select": "^5.7.4", + "file-saver": "^2.0.5" +} +``` + +### 11. Configuration Files +- [ ] **Create environment config** (`/src/config/index.js`) + - [ ] API endpoints configuration + - [ ] Feature flags + - [ ] Environment-specific settings +- [ ] **Add TypeScript support** (Optional) + - [ ] Convert critical components + - [ ] Add type definitions + - [ ] Improve IDE support + +## ๐ Deployment & Performance + +### 12. Production Readiness +- [ ] **Optimize bundle size** + - [ ] Code splitting implementation + - [ ] Lazy loading for routes + - [ ] Image optimization +- [ ] **Add PWA features** (Optional) + - [ ] Service worker + - [ ] Offline functionality + - [ ] App manifest +- [ ] **Performance monitoring** + - [ ] Core Web Vitals tracking + - [ ] Error tracking integration + - [ ] User analytics + +## ๐ Implementation Priority + +### Phase 1 (Week 1-2): Core Missing Features +1. Rules Management System +2. Plan Generation Workflow +3. Enhanced Route Management + +### Phase 2 (Week 3): Data Management +1. Export/Import System +2. Enhanced Error Handling +3. State Management Improvements + +### Phase 3 (Week 4): Polish & Quality +1. UI/UX Enhancements +2. Testing Infrastructure +3. Performance Optimization + +### Phase 4 (Ongoing): Maintenance +1. Documentation +2. Monitoring +3. User Feedback Integration + +--- + +## ๐ฏ Success Criteria + +- [ ] All design document workflows implemented +- [ ] 90%+ component test coverage +- [ ] Mobile-responsive design +- [ ] Sub-3s initial page load +- [ ] Accessibility compliance (WCAG 2.1 AA) +- [ ] Cross-browser compatibility (Chrome, Firefox, Safari, Edge) + +## ๐ Notes + +- **Prioritize user-facing features** over internal architecture improvements +- **Test each feature** as you implement it +- **Consider Progressive Web App features** for offline functionality +- **Plan for internationalization** if expanding globally +- **Monitor bundle size** as you add dependencies \ No newline at end of file diff --git a/backend/app/routes/export.py b/backend/app/routes/export.py new file mode 100644 index 0000000..c5c2ade --- /dev/null +++ b/backend/app/routes/export.py @@ -0,0 +1,40 @@ +from fastapi import APIRouter, Query, HTTPException +from fastapi.responses import FileResponse +from app.services.export_service import ExportService +from pathlib import Path +import logging + +router = APIRouter() +logger = logging.getLogger(__name__) + +@router.get("/export") +async def export_data( + types: str = Query(..., description="Comma-separated list of data types to export"), + format: str = Query('json', description="Export format (json, zip, gpx)") +): + valid_types = {'routes', 'rules', 'plans'} + requested_types = set(types.split(',')) + + # Validate requested types + if not requested_types.issubset(valid_types): + raise HTTPException( + status_code=400, + detail=f"Invalid export types. Valid types are: {', '.join(valid_types)}" + ) + + try: + exporter = ExportService() + export_path = await exporter.create_export( + export_types=list(requested_types), + export_format=format + ) + + return FileResponse( + export_path, + media_type="application/zip" if format == 'zip' else "application/json", + filename=f"export_{'_'.join(requested_types)}.{format}" + ) + + except Exception as e: + logger.error(f"Export failed: {str(e)}") + raise HTTPException(status_code=500, detail="Export failed") from e \ No newline at end of file diff --git a/backend/app/routes/import.py b/backend/app/routes/import.py new file mode 100644 index 0000000..7e99147 --- /dev/null +++ b/backend/app/routes/import.py @@ -0,0 +1,38 @@ +from fastapi import APIRouter, UploadFile, File, Form, HTTPException +from fastapi.responses import JSONResponse +from app.services.import_service import ImportService +import logging +from typing import Optional + +router = APIRouter() +logger = logging.getLogger(__name__) + +@router.post("/import/validate") +async def validate_import( + file: UploadFile = File(...), +): + try: + importer = ImportService() + validation_result = await importer.validate_import(file) + return JSONResponse(content=validation_result) + except Exception as e: + logger.error(f"Import validation failed: {str(e)}") + raise HTTPException(status_code=400, detail=str(e)) from e + +@router.post("/import") +async def execute_import( + file: UploadFile = File(...), + conflict_resolution: str = Form("skip"), + resolutions: Optional[str] = Form(None), +): + try: + importer = ImportService() + import_result = await importer.execute_import( + file, + conflict_resolution, + resolutions + ) + return JSONResponse(content=import_result) + except Exception as e: + logger.error(f"Import failed: {str(e)}") + raise HTTPException(status_code=500, detail=str(e)) from e \ No newline at end of file diff --git a/backend/app/services/export_service.py b/backend/app/services/export_service.py new file mode 100644 index 0000000..3c975fc --- /dev/null +++ b/backend/app/services/export_service.py @@ -0,0 +1,138 @@ +import json +from pathlib import Path +from datetime import datetime +import zipfile +from app.database import SessionLocal +from app.models import Route, Rule, Plan +import tempfile +import logging +import shutil + +logger = logging.getLogger(__name__) + +class ExportService: + def __init__(self): + self.temp_dir = Path(tempfile.gettempdir()) / "cycling_exports" + self.temp_dir.mkdir(exist_ok=True) + + async def create_export(self, export_types, export_format): + """Main export creation entry point""" + export_data = await self._fetch_export_data(export_types) + export_path = self._generate_export_file(export_data, export_format, export_types) + return export_path + + async def _fetch_export_data(self, export_types): + """Fetch data from database based on requested types""" + db = SessionLocal() + try: + data = {} + + if 'routes' in export_types: + routes = db.query(Route).all() + data['routes'] = [self._serialize_route(r) for r in routes] + + if 'rules' in export_types: + rules = db.query(Rule).all() + data['rules'] = [self._serialize_rule(r) for r in rules] + + if 'plans' in export_types: + plans = db.query(Plan).all() + data['plans'] = [self._serialize_plan(p) for p in plans] + + return data + finally: + db.close() + + def _generate_export_file(self, data, format, types): + """Generate the export file in specified format""" + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + base_name = f"export_{'_'.join(types)}_{timestamp}" + + if format == 'json': + return self._create_json_export(data, base_name) + elif format == 'zip': + return self._create_zip_export(data, base_name) + elif format == 'gpx': + return self._create_gpx_export(data, base_name) + else: + raise ValueError(f"Unsupported format: {format}") + + def _create_json_export(self, data, base_name): + """Create single JSON file export""" + export_path = self.temp_dir / f"{base_name}.json" + with open(export_path, 'w') as f: + json.dump(data, f, indent=2) + return export_path + + def _create_zip_export(self, data, base_name): + """Create ZIP archive with JSON and GPX files""" + zip_path = self.temp_dir / f"{base_name}.zip" + with zipfile.ZipFile(zip_path, 'w') as zipf: + # Add JSON data + json_path = self._create_json_export(data, base_name) + zipf.write(json_path, arcname=json_path.name) + + # Add GPX files if exporting routes + if 'routes' in data: + gpx_dir = Path("/app/data/gpx") + for route in data['routes']: + gpx_path = gpx_dir / route['gpx_file_path'] + if gpx_path.exists(): + zipf.write(gpx_path, arcname=f"gpx/{gpx_path.name}") + + return zip_path + + def _create_gpx_export(self, data, base_name): + """Export only GPX files from routes""" + if 'routes' not in data: + raise ValueError("GPX export requires routes to be selected") + + zip_path = self.temp_dir / f"{base_name}.zip" + with zipfile.ZipFile(zip_path, 'w') as zipf: + gpx_dir = Path("/app/data/gpx") + for route in data['routes']: + gpx_path = gpx_dir / route['gpx_file_path'] + if gpx_path.exists(): + zipf.write(gpx_path, arcname=gpx_path.name) + + return zip_path + + def _serialize_route(self, route): + return { + "id": route.id, + "name": route.name, + "description": route.description, + "category": route.category, + "gpx_file_path": route.gpx_file_path, + "created_at": route.created_at.isoformat(), + "updated_at": route.updated_at.isoformat() + } + + def _serialize_rule(self, rule): + return { + "id": rule.id, + "name": rule.name, + "natural_language": rule.natural_language, + "jsonb_rules": rule.jsonb_rules, + "version": rule.version, + "created_at": rule.created_at.isoformat() + } + + def _serialize_plan(self, plan): + return { + "id": plan.id, + "name": plan.name, + "jsonb_plan": plan.jsonb_plan, + "version": plan.version, + "created_at": plan.created_at.isoformat() + } + + def cleanup_temp_files(self): + """Clean up temporary export files older than 1 hour""" + cutoff = datetime.now().timestamp() - 3600 + for file in self.temp_dir.glob("*"): + if file.stat().st_mtime < cutoff: + try: + file.unlink() + except Exception as e: + logger.warning(f"Failed to clean up temp file {file}: {str(e)}") \ No newline at end of file diff --git a/backend/app/services/import_service.py b/backend/app/services/import_service.py new file mode 100644 index 0000000..88c14c1 --- /dev/null +++ b/backend/app/services/import_service.py @@ -0,0 +1,259 @@ +import json +import zipfile +from pathlib import Path +import tempfile +from datetime import datetime +from app.database import SessionLocal +from app.models import Route, Rule, Plan +import shutil +import logging +from sqlalchemy import and_ +from typing import Dict, List + +logger = logging.getLogger(__name__) + +class ImportService: + def __init__(self): + self.temp_dir = Path(tempfile.gettempdir()) / "cycling_imports" + self.temp_dir.mkdir(exist_ok=True) + + async def validate_import(self, file: UploadFile) -> dict: + """Validate import file and detect conflicts""" + try: + # Save uploaded file to temp location + file_path = self.temp_dir / file.filename + with open(file_path, "wb") as f: + shutil.copyfileobj(file.file, f) + + # Extract data based on file type + if file.filename.endswith('.zip'): + data = self._process_zip_import(file_path) + elif file.filename.endswith('.json'): + data = self._process_json_import(file_path) + else: + raise ValueError("Unsupported file format") + + # Detect conflicts + conflicts = [] + if 'routes' in data: + conflicts += self._detect_route_conflicts(data['routes']) + if 'rules' in data: + conflicts += self._detect_rule_conflicts(data['rules']) + if 'plans' in data: + conflicts += self._detect_plan_conflicts(data['plans']) + + return { + "valid": True, + "conflicts": conflicts, + "summary": { + "routes": len(data.get('routes', [])), + "rules": len(data.get('rules', [])), + "plans": len(data.get('plans', [])) + } + } + + except Exception as e: + logger.error(f"Validation error: {str(e)}") + return {"valid": False, "error": str(e)} + + async def execute_import(self, file: UploadFile, + conflict_resolution: str, + resolutions: List[dict]) -> dict: + """Execute the import with specified conflict resolution""" + db = SessionLocal() + try: + db.begin() + + # Process file + file_path = self.temp_dir / file.filename + with open(file_path, "wb") as f: + shutil.copyfileobj(file.file, f) + + if file.filename.endswith('.zip'): + data = self._process_zip_import(file_path) + gpx_files = self._extract_gpx_files(file_path) + elif file.filename.endswith('.json'): + data = self._process_json_import(file_path) + gpx_files = [] + else: + raise ValueError("Unsupported file format") + + # Apply resolutions + resolution_map = {r['id']: r['action'] for r in resolutions} + + # Import data + results = { + "imported": {"routes": 0, "rules": 0, "plans": 0}, + "skipped": {"routes": 0, "rules": 0, "plans": 0}, + "errors": [] + } + + # Import routes + if 'routes' in data: + for route_data in data['routes']: + action = resolution_map.get(route_data['id'], conflict_resolution) + try: + if self._should_import_route(route_data, action, db): + self._import_route(route_data, db) + results["imported"]["routes"] += 1 + else: + results["skipped"]["routes"] += 1 + except Exception as e: + results["errors"].append(f"Route {route_data['id']}: {str(e)}") + + # Import rules + if 'rules' in data: + for rule_data in data['rules']: + action = resolution_map.get(rule_data['id'], conflict_resolution) + try: + if self._should_import_rule(rule_data, action, db): + self._import_rule(rule_data, db) + results["imported"]["rules"] += 1 + else: + results["skipped"]["rules"] += 1 + except Exception as e: + results["errors"].append(f"Rule {rule_data['id']}: {str(e)}") + + # Import plans + if 'plans' in data: + for plan_data in data['plans']: + action = resolution_map.get(plan_data['id'], conflict_resolution) + try: + if self._should_import_plan(plan_data, action, db): + self._import_plan(plan_data, db) + results["imported"]["plans"] += 1 + else: + results["skipped"]["plans"] += 1 + except Exception as e: + results["errors"].append(f"Plan {plan_data['id']}: {str(e)}") + + # Save GPX files + if gpx_files: + gpx_dir = Path("/app/data/gpx") + for gpx in gpx_files: + shutil.move(gpx, gpx_dir / gpx.name) + + db.commit() + return results + + except Exception as e: + db.rollback() + logger.error(f"Import failed: {str(e)}") + return {"error": str(e)} + finally: + db.close() + self._cleanup_temp_files() + + def _process_zip_import(self, file_path: Path) -> dict: + """Extract and process ZIP file import""" + data = {} + with zipfile.ZipFile(file_path, 'r') as zipf: + # Find data.json + json_files = [f for f in zipf.namelist() if f.endswith('.json')] + if not json_files: + raise ValueError("No JSON data found in ZIP file") + + with zipf.open(json_files[0]) as f: + data = json.load(f) + + return data + + def _process_json_import(self, file_path: Path) -> dict: + """Process JSON file import""" + with open(file_path) as f: + return json.load(f) + + def _extract_gpx_files(self, file_path: Path) -> List[Path]: + """Extract GPX files from ZIP archive""" + gpx_files = [] + extract_dir = self.temp_dir / "gpx" + extract_dir.mkdir(exist_ok=True) + + with zipfile.ZipFile(file_path, 'r') as zipf: + for file in zipf.namelist(): + if file.startswith('gpx/') and file.endswith('.gpx'): + zipf.extract(file, extract_dir) + gpx_files.append(extract_dir / file) + + return gpx_files + + def _detect_route_conflicts(self, routes: List[dict]) -> List[dict]: + conflicts = [] + db = SessionLocal() + try: + for route in routes: + existing = db.query(Route).filter( + (Route.id == route['id']) | + (Route.name == route['name']) + ).first() + + if existing: + conflict = { + "type": "route", + "id": route['id'], + "name": route['name'], + "existing_version": existing.updated_at, + "import_version": datetime.fromisoformat(route['updated_at']), + "resolution_options": ["overwrite", "rename", "skip"] + } + conflicts.append(conflict) + finally: + db.close() + return conflicts + + def _should_import_route(self, route_data: dict, action: str, db) -> bool: + existing = db.query(Route).filter( + (Route.id == route_data['id']) | + (Route.name == route_data['name']) + ).first() + + if not existing: + return True + + if action == 'overwrite': + return True + elif action == 'rename': + route_data['name'] = f"{route_data['name']} (Imported)" + return True + elif action == 'skip': + return False + + return False + + def _import_route(self, route_data: dict, db): + """Import a single route""" + existing = db.query(Route).get(route_data['id']) + if existing: + # Update existing route + existing.name = route_data['name'] + existing.description = route_data['description'] + existing.category = route_data['category'] + existing.gpx_file_path = route_data['gpx_file_path'] + existing.updated_at = datetime.fromisoformat(route_data['updated_at']) + else: + # Create new route + route = Route( + id=route_data['id'], + name=route_data['name'], + description=route_data['description'], + category=route_data['category'], + gpx_file_path=route_data['gpx_file_path'], + created_at=datetime.fromisoformat(route_data['created_at']), + updated_at=datetime.fromisoformat(route_data['updated_at']) + ) + db.add(route) + + # Similar methods for rules and plans would follow... + + def _cleanup_temp_files(self): + """Clean up temporary files older than 1 hour""" + cutoff = datetime.now().timestamp() - 3600 + for file in self.temp_dir.glob("*"): + if file.stat().st_mtime < cutoff: + try: + if file.is_dir(): + shutil.rmtree(file) + else: + file.unlink() + except Exception as e: + logger.warning(f"Failed to clean temp file {file}: {str(e)}") \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 2dbc303..948c893 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -26,11 +26,14 @@ services: start_period: 40s frontend: - build: ./frontend + build: + context: ./frontend + args: + - REACT_APP_API_URL=http://backend:8000 ports: - "8888:80" environment: - - REACT_APP_API_URL=http://backend:8000 + - REACT_APP_CONTAINER_API_URL=http://backend:8000 - REACT_APP_API_KEY=${API_KEY} db: diff --git a/export_import_api_spec.md b/export_import_api_spec.md new file mode 100644 index 0000000..eb9fb77 --- /dev/null +++ b/export_import_api_spec.md @@ -0,0 +1,91 @@ +# Export/Import API Specification + +## Export Endpoint +`GET /api/export` + +### Parameters (query string) +- `types` (required): Comma-separated list of data types to export + - Valid values: `routes`, `rules`, `plans`, `all` +- `format` (required): Export format + - `json`: Single JSON file + - `zip`: ZIP archive with separate files + - `gpx`: Only GPX files (routes only) + +### Response +- `200 OK` with file download +- `400 Bad Request` for invalid parameters +- `500 Internal Server Error` for export failures + +### Example +```http +GET /api/export?types=routes,plans&format=zip +``` + +--- + +## Import Validation +`POST /api/import/validate` + +### Request +- Multipart form with `file` field containing import data + +### Response +```json +{ + "valid": true, + "conflicts": [ + { + "type": "route", + "id": 123, + "name": "Mountain Loop", + "existing_version": 2, + "import_version": 3, + "resolution_options": ["overwrite", "rename", "skip"] + } + ], + "summary": { + "routes": 15, + "rules": 3, + "plans": 2 + } +} +``` + +--- + +## Import Execution +`POST /api/import` + +### Request +- Multipart form with: + - `file`: Import data file + - `conflict_resolution`: Global strategy (overwrite, skip, rename) + - `resolutions`: JSON array of specific resolutions (optional) + ```json + [{"type": "route", "id": 123, "action": "overwrite"}] + ``` + +### Response +```json +{ + "imported": { + "routes": 12, + "rules": 3, + "plans": 2 + }, + "skipped": { + "routes": 3, + "rules": 0, + "plans": 0 + }, + "errors": [] +} +``` + +### Status Codes +- `200 OK`: Import completed +- `202 Accepted`: Import in progress (async) +- `400 Bad Request`: Invalid input +- `409 Conflict`: Unresolved conflicts + +--- \ No newline at end of file diff --git a/export_import_frontend_spec.md b/export_import_frontend_spec.md new file mode 100644 index 0000000..db29970 --- /dev/null +++ b/export_import_frontend_spec.md @@ -0,0 +1,221 @@ +# Export/Import Frontend Implementation + +## File Structure +``` +src/pages/ + ExportImport.jsx # Main page + +src/components/export/ + DataExporter.jsx # Export functionality + DataImporter.jsx # Import functionality + ConflictDialog.jsx # Conflict resolution UI + ImportSummary.jsx # Post-import report +``` + +## Component Specifications + +### ExportImport.jsx +```jsx +import { useState } from 'react'; +import DataExporter from '../components/export/DataExporter'; +import DataImporter from '../components/export/DataImporter'; + +export default function ExportImportPage() { + const [activeTab, setActiveTab] = useState('export'); + + return ( +
Found: {validation.summary.routes} routes, + {validation.summary.rules} rules, + {validation.summary.plans} plans
+ {validation.conflicts.length > 0 && ( +โ ๏ธ {validation.conflicts.length} conflicts detected
+ )} +Existing version: {conflict.existing_version}
+Import version: {conflict.import_version}
+ +- {version.trigger || 'Initial version'} -
- Created:{' '} - {new Date(selectedVersion.created_at).toLocaleString()} -
- {selectedVersion.changes_summary && ( -- Changes:{' '} - {selectedVersion.changes_summary} -
- )} - {selectedVersion.parent_plan_id && ( -- Parent Version:{' '} - v{selectedVersion.parent_plan_id} -
- )} +{previewData.distance}
+{previewData.elevationGain}
+{previewData.avgGrade}
+{previewData.maxElevation}
+{previewData.elevationLoss}
+Elevation Gain
+{route.elevation_gain}m
+Avg Grade
+{route.grade_avg}%
+Difficulty
+Distance
+{(route.distance / 1000).toFixed(1)}km
+Category
+{route.category}
+Distance: {section.distance} km
+Elevation: {section.elevationGain} m
+Max Grade: {section.maxGrade}%
+Surface: + {editing === section.id ? ( + + ) : ( + {section.surfaceType} + )} +
+{section.name}
+{section.start}km - {section.end}km
+Difficulty: {section.difficulty}/5
+{section.recommended_gear.replace('_', ' ')}
+| Name | +Version | +Status | +Created | +Actions | +
|---|---|---|---|---|
| {set.name || 'Untitled Rules'} | +v{set.version} | ++ + {set.active ? 'Active' : 'Inactive'} + + | ++ {format(new Date(set.created_at), 'MMM dd, yyyy')} + | ++ + | +
{error}
} +Loading route management...
-Route management will be displayed here
+