diff --git a/FitnessSync/backend/__pycache__/main.cpython-311.pyc b/FitnessSync/backend/__pycache__/main.cpython-311.pyc index afc9663..126f32f 100644 Binary files a/FitnessSync/backend/__pycache__/main.cpython-311.pyc and b/FitnessSync/backend/__pycache__/main.cpython-311.pyc differ diff --git a/FitnessSync/backend/__pycache__/main.cpython-313.pyc b/FitnessSync/backend/__pycache__/main.cpython-313.pyc index 45ac203..6adbf82 100644 Binary files a/FitnessSync/backend/__pycache__/main.cpython-313.pyc and b/FitnessSync/backend/__pycache__/main.cpython-313.pyc differ diff --git a/FitnessSync/backend/main.py b/FitnessSync/backend/main.py index 9fd25cd..31fed25 100644 --- a/FitnessSync/backend/main.py +++ b/FitnessSync/backend/main.py @@ -60,7 +60,20 @@ async def log_requests(request: Request, call_next): logger.error(f"Request Failed: {e}") raise -app.mount("/static", StaticFiles(directory="../static"), name="static") +from pathlib import Path + +# Resolve absolute path to static directory +BASE_DIR = Path(__file__).resolve().parent +STATIC_DIR = BASE_DIR.parent / "static" + +if not STATIC_DIR.exists(): + # Fallback or create? + # For now, just logging warning or ensuring it works in dev + logging.warning(f"Static directory not found at {STATIC_DIR}") + # Create it to prevent crash? + STATIC_DIR.mkdir(parents=True, exist_ok=True) + +app.mount("/static", StaticFiles(directory=str(STATIC_DIR)), name="static") templates = Jinja2Templates(directory="templates") from src.api import status, sync, auth, logs, metrics, activities, scheduling, config_routes @@ -82,6 +95,10 @@ app.include_router(segments.router, prefix="/api") from src.api import bike_setups app.include_router(bike_setups.router) +from src.api import discovery +app.include_router(discovery.router, prefix="/api/discovery") + + from src.routers import web diff --git a/FitnessSync/backend/src/api/__pycache__/activities.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/activities.cpython-311.pyc index 5fb4f4b..d422953 100644 Binary files a/FitnessSync/backend/src/api/__pycache__/activities.cpython-311.pyc and b/FitnessSync/backend/src/api/__pycache__/activities.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/api/__pycache__/activities.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/activities.cpython-313.pyc index fb99fa5..68efc6c 100644 Binary files a/FitnessSync/backend/src/api/__pycache__/activities.cpython-313.pyc and b/FitnessSync/backend/src/api/__pycache__/activities.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/api/__pycache__/bike_setups.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/bike_setups.cpython-311.pyc index 1f91ad2..4aa8edf 100644 Binary files a/FitnessSync/backend/src/api/__pycache__/bike_setups.cpython-311.pyc and b/FitnessSync/backend/src/api/__pycache__/bike_setups.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/api/__pycache__/discovery.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/discovery.cpython-311.pyc new file mode 100644 index 0000000..36547b6 Binary files /dev/null and b/FitnessSync/backend/src/api/__pycache__/discovery.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/api/__pycache__/discovery.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/discovery.cpython-313.pyc new file mode 100644 index 0000000..0a8ee2b Binary files /dev/null and b/FitnessSync/backend/src/api/__pycache__/discovery.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/api/__pycache__/scheduling.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/scheduling.cpython-313.pyc index 6b5cbeb..3a9470b 100644 Binary files a/FitnessSync/backend/src/api/__pycache__/scheduling.cpython-313.pyc and b/FitnessSync/backend/src/api/__pycache__/scheduling.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/api/__pycache__/segments.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/segments.cpython-311.pyc index 9eeeadd..ae8e55b 100644 Binary files a/FitnessSync/backend/src/api/__pycache__/segments.cpython-311.pyc and b/FitnessSync/backend/src/api/__pycache__/segments.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/api/__pycache__/segments.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/segments.cpython-313.pyc new file mode 100644 index 0000000..3fb2d31 Binary files /dev/null and b/FitnessSync/backend/src/api/__pycache__/segments.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/api/__pycache__/status.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/status.cpython-313.pyc index d149b9a..5a1e3d6 100644 Binary files a/FitnessSync/backend/src/api/__pycache__/status.cpython-313.pyc and b/FitnessSync/backend/src/api/__pycache__/status.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/api/activities.py b/FitnessSync/backend/src/api/activities.py index eb279ed..672fcb3 100644 --- a/FitnessSync/backend/src/api/activities.py +++ b/FitnessSync/backend/src/api/activities.py @@ -141,6 +141,7 @@ async def query_activities( start_date: Optional[str] = Query(None), end_date: Optional[str] = Query(None), download_status: Optional[str] = Query(None), + bike_setup_id: Optional[int] = Query(None), db: Session = Depends(get_db) ): """ @@ -154,7 +155,21 @@ async def query_activities( # Apply filters based on parameters if activity_type: - query = query.filter(Activity.activity_type == activity_type) + if activity_type == 'cycling': + # Match outdoor cycling types + # Using OR filtering for various sub-types + from sqlalchemy import or_ + query = query.filter(or_( + Activity.activity_type == 'cycling', + Activity.activity_type == 'road_biking', + Activity.activity_type == 'mountain_biking', + Activity.activity_type == 'gravel_cycling', + Activity.activity_type == 'cyclocross', + Activity.activity_type == 'track_cycling', + Activity.activity_type == 'commuting' + )) + else: + query = query.filter(Activity.activity_type == activity_type) if start_date: from datetime import datetime @@ -168,6 +183,9 @@ async def query_activities( if download_status: query = query.filter(Activity.download_status == download_status) + + if bike_setup_id: + query = query.filter(Activity.bike_setup_id == bike_setup_id) # Execute the query activities = query.all() @@ -376,7 +394,20 @@ async def redownload_activity_endpoint(activity_id: str, db: Session = Depends(g success = sync_app.redownload_activity(activity_id) if success: - return {"message": f"Successfully redownloaded activity {activity_id}", "status": "success"} + # Trigger bike matching + try: + from ..services.bike_matching import process_activity_matching + + # Fetch fresh activity object using new session logic or flush/commit handled by sync_app + # Just query by garmin_id + act_obj = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first() + if act_obj: + process_activity_matching(db, act_obj.id) + logger.info(f"Retriggered bike match for {activity_id} after redownload") + except Exception as match_err: + logger.error(f"Error matching bike after redownload: {match_err}") + + return {"message": f"Successfully redownloaded and matched activity {activity_id}", "status": "success"} else: raise HTTPException(status_code=500, detail="Failed to redownload activity. Check logs for details.") @@ -389,6 +420,48 @@ async def redownload_activity_endpoint(activity_id: str, db: Session = Depends(g # New Sync Endpoints +class BikeMatchUpdate(BaseModel): + bike_setup_id: Optional[int] = None + manual_override: bool = True + +@router.put("/activities/{activity_id}/bike") +async def update_activity_bike(activity_id: str, update: BikeMatchUpdate, db: Session = Depends(get_db)): + """ + Manually update the bike setup for an activity. + Sets bike_match_confidence to 2.0 to indicate manual override. + """ + try: + activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first() + if not activity: + raise HTTPException(status_code=404, detail="Activity not found") + + # Verify bike setup exists if provided + if update.bike_setup_id: + from ..models.bike_setup import BikeSetup + setup = db.query(BikeSetup).filter(BikeSetup.id == update.bike_setup_id).first() + if not setup: + raise HTTPException(status_code=404, detail="Bike Setup not found") + + activity.bike_setup_id = setup.id + activity.bike_match_confidence = 2.0 # Manual Override + logger.info(f"Manual bike override for {activity_id} to setup {setup.id}") + else: + # Clear setup + activity.bike_setup_id = None + activity.bike_match_confidence = 2.0 # Manual Clear + logger.info(f"Manual bike override for {activity_id} to cleared") + + db.commit() + return {"message": "Bike setup updated successfully", "status": "success"} + except HTTPException: + raise + except Exception as e: + logger.error(f"Error updating activity bike: {e}") + db.rollback() + raise HTTPException(status_code=500, detail=str(e)) + + + def run_scan_job(job_id: str, days_back: int, db_session_factory): """Background task wrapper for scan""" try: @@ -685,6 +758,23 @@ async def get_activity_streams(activity_id: str, db: Session = Depends(get_db)): logger.error(f"Error getting streams: {e}") raise HTTPException(status_code=500, detail=str(e)) +@router.post("/activities/{activity_id}/estimate_power") +async def estimate_activity_power(activity_id: int, db: Session = Depends(get_db)): + """ + Trigger physics-based power estimation. + """ + from ..services.power_estimator import PowerEstimatorService + + try: + service = PowerEstimatorService(db) + result = service.estimate_power_for_activity(activity_id) + return {"message": "Power estimated successfully", "stats": result} + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error estimating power: {e}") + raise HTTPException(status_code=500, detail=str(e)) + @router.get("/activities/{activity_id}/navigation") async def get_activity_navigation(activity_id: str, db: Session = Depends(get_db)): """ diff --git a/FitnessSync/backend/src/api/bike_setups.py b/FitnessSync/backend/src/api/bike_setups.py index dd880a2..d485911 100644 --- a/FitnessSync/backend/src/api/bike_setups.py +++ b/FitnessSync/backend/src/api/bike_setups.py @@ -2,7 +2,7 @@ from fastapi import APIRouter, Depends, HTTPException, status from sqlalchemy.orm import Session from pydantic import BaseModel from typing import List, Optional -from datetime import datetime +from datetime import datetime, date import logging from ..models.bike_setup import BikeSetup @@ -22,12 +22,18 @@ class BikeSetupCreate(BaseModel): frame: str chainring: int rear_cog: int + weight_kg: Optional[float] = None + purchase_date: Optional[date] = None + retirement_date: Optional[date] = None name: Optional[str] = None class BikeSetupUpdate(BaseModel): frame: Optional[str] = None chainring: Optional[int] = None rear_cog: Optional[int] = None + weight_kg: Optional[float] = None + purchase_date: Optional[date] = None + retirement_date: Optional[date] = None name: Optional[str] = None class BikeSetupRead(BaseModel): @@ -35,9 +41,15 @@ class BikeSetupRead(BaseModel): frame: str chainring: int rear_cog: int + year: Optional[int] = None + weight_kg: Optional[float] = None + purchase_date: Optional[date] = None + retirement_date: Optional[date] = None name: Optional[str] = None created_at: Optional[datetime] updated_at: Optional[datetime] + activity_count: int = 0 + total_distance: float = 0.0 class Config: from_attributes = True @@ -46,8 +58,40 @@ router = APIRouter(prefix="/api/bike-setups", tags=["bike-setups"]) @router.get("/", response_model=List[BikeSetupRead]) def get_bike_setups(db: Session = Depends(get_db)): - """List all bike setups.""" - return db.query(BikeSetup).all() + """List all bike setups with usage stats.""" + from sqlalchemy import func + from ..models.activity import Activity + + # Query setups with aggregated activity stats + results = db.query( + BikeSetup, + func.count(Activity.id).label("count"), + func.sum(Activity.distance).label("dist") + ).outerjoin(Activity, BikeSetup.id == Activity.bike_setup_id)\ + .group_by(BikeSetup.id).all() + + response = [] + for setup, count, dist in results: + # Clone setup attributes to Pydantic model + # Assuming Pydantic v2 or mapped correctly. + # We can construct dict or let Pydantic handle it if we pass enriched object? + # Constructing explicitly is safer with Pydantic 1.x pattern + response.append(BikeSetupRead( + id=setup.id, + frame=setup.frame, + chainring=setup.chainring, + rear_cog=setup.rear_cog, + weight_kg=setup.weight_kg, + purchase_date=setup.purchase_date, + retirement_date=setup.retirement_date, + name=setup.name, + created_at=setup.created_at, + updated_at=setup.updated_at, + activity_count=count, + total_distance=dist if dist else 0.0 + )) + + return response @router.post("/", response_model=BikeSetupRead, status_code=status.HTTP_201_CREATED) def create_bike_setup(setup: BikeSetupCreate, db: Session = Depends(get_db)): @@ -56,6 +100,9 @@ def create_bike_setup(setup: BikeSetupCreate, db: Session = Depends(get_db)): frame=setup.frame, chainring=setup.chainring, rear_cog=setup.rear_cog, + weight_kg=setup.weight_kg, + purchase_date=setup.purchase_date, + retirement_date=setup.retirement_date, name=setup.name ) db.add(new_setup) @@ -85,6 +132,12 @@ def update_bike_setup(setup_id: int, setup_data: BikeSetupUpdate, db: Session = setup.chainring = setup_data.chainring if setup_data.rear_cog is not None: setup.rear_cog = setup_data.rear_cog + if setup_data.weight_kg is not None: + setup.weight_kg = setup_data.weight_kg + if setup_data.purchase_date is not None: + setup.purchase_date = setup_data.purchase_date + if setup_data.retirement_date is not None: + setup.retirement_date = setup_data.retirement_date if setup_data.name is not None: setup.name = setup_data.name diff --git a/FitnessSync/backend/src/api/discovery.py b/FitnessSync/backend/src/api/discovery.py new file mode 100644 index 0000000..7eba267 --- /dev/null +++ b/FitnessSync/backend/src/api/discovery.py @@ -0,0 +1,83 @@ +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.orm import Session +from datetime import datetime + +from ..models import Base # Ensure models are loaded if needed +from ..services.postgresql_manager import PostgreSQLManager +from ..utils.config import config + +from ..services.discovery import SegmentDiscoveryService +from ..schemas.discovery import DiscoveryFilter, DiscoveryResult, CandidateSegmentSchema, SingleDiscoveryRequest + + +router = APIRouter() + +def get_db_session(): + db_manager = PostgreSQLManager(config.DATABASE_URL) + with db_manager.get_db_session() as session: + yield session + + +@router.post("/segments", response_model=DiscoveryResult) +def discover_segments( + filter: DiscoveryFilter, + db: Session = Depends(get_db_session) +): + service = SegmentDiscoveryService(db) + + + # Defaults + start = filter.start_date or datetime.now().replace(year=datetime.now().year - 1) # Default 1 year? + + candidates, debug_paths = service.discover_segments( + activity_type=filter.activity_type, + start_date=start, + end_date=filter.end_date + ) + + + # Convert to schema + results = [] + for c in candidates: + results.append(CandidateSegmentSchema( + points=c.points, + frequency=c.frequency, + distance=c.distance, + activity_ids=c.activity_ids + )) + + return DiscoveryResult( + candidates=results, + generated_at=datetime.now(), + activity_count=len(debug_paths), + debug_paths=debug_paths + ) + + +@router.post("/single", response_model=DiscoveryResult) +def discover_single_activity( + request: SingleDiscoveryRequest, + db: Session = Depends(get_db_session) +): + service = SegmentDiscoveryService(db) + + candidates = service.analyze_single_activity(request.activity_id) + + # Convert to schema + results = [] + for c in candidates: + results.append(CandidateSegmentSchema( + points=c.points, + frequency=c.frequency, + distance=c.distance, + activity_ids=c.activity_ids + )) + + return DiscoveryResult( + candidates=results, + generated_at=datetime.now(), + activity_count=1, + debug_paths=None + ) + + diff --git a/FitnessSync/backend/src/api/segments.py b/FitnessSync/backend/src/api/segments.py index cd0eac7..5615b6b 100644 --- a/FitnessSync/backend/src/api/segments.py +++ b/FitnessSync/backend/src/api/segments.py @@ -44,7 +44,9 @@ class SegmentResponse(BaseModel): distance: float elevation_gain: Optional[float] activity_type: str + activity_type: str points: List[List[float]] + effort_count: int = 0 @router.post("/segments/create") def create_segment(payload: SegmentCreate, db: Session = Depends(get_db)): @@ -120,9 +122,17 @@ def create_segment(payload: SegmentCreate, db: Session = Depends(get_db)): @router.get("/segments", response_model=List[SegmentResponse]) def list_segments(db: Session = Depends(get_db)): - segments = db.query(Segment).all() + # Query segments with effort count + from sqlalchemy import func + + # Outer join to count efforts, grouping by Segment + # SQLAlchemy < 2.0 style + results = db.query(Segment, func.count(SegmentEffort.id)) \ + .outerjoin(SegmentEffort, Segment.id == SegmentEffort.segment_id) \ + .group_by(Segment.id).all() + res = [] - for s in segments: + for s, count in results: pts = json.loads(s.points) if isinstance(s.points, str) else s.points res.append(SegmentResponse( id=s.id, @@ -130,7 +140,8 @@ def list_segments(db: Session = Depends(get_db)): distance=s.distance, elevation_gain=s.elevation_gain, activity_type=s.activity_type, - points=pts + points=pts, + effort_count=count )) return res @@ -222,5 +233,94 @@ def scan_segments(db: Session = Depends(get_db)): # Run in background thread = threading.Thread(target=job_manager.run_serialized, args=(job_id, run_segment_matching_job)) thread.start() - return {"message": "Segment scan started", "job_id": job_id} + +@router.post("/segments/scan/{activity_id}") +def scan_activity_segments(activity_id: int, db: Session = Depends(get_db)): + """Scan a specific activity for segment matches.""" + from ..models.activity import Activity + from ..services.segment_matcher import SegmentMatcher + from ..services.parsers import extract_points_from_file + + # Resolve ID + activity = db.query(Activity).filter(Activity.id == activity_id).first() + if not activity: + activity = db.query(Activity).filter(Activity.garmin_activity_id == str(activity_id)).first() + + if not activity: + raise HTTPException(status_code=404, detail="Activity not found") + + if not activity.file_content: + raise HTTPException(status_code=400, detail="Activity has no file content") + + # Clear existing efforts + db.query(SegmentEffort).filter(SegmentEffort.activity_id == activity.id).delete() + db.commit() # Commit delete + + try: + points = extract_points_from_file(activity.file_content, activity.file_type) + if not points: + return {"message": "No points found in activity", "matches": 0} + + matcher = SegmentMatcher(db) + efforts = matcher.match_activity(activity, points) + + # matcher commits internally + + return {"message": "Scan complete", "matches": len(efforts), "segment_ids": [e.segment_id for e in efforts]} + + except Exception as e: + print(f"Error scanning activity {activity.id}: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +class SegmentCreateCustom(BaseModel): + name: str + description: Optional[str] = None + activity_type: str + points: List[List[float]] # [[lon, lat], ...] or [[lon, lat, ele], ...] + +@router.post("/segments/save_custom") +def save_custom_segment(payload: SegmentCreateCustom, db: Session = Depends(get_db)): + """Save a segment from custom points (e.g. discovery results).""" + from ..utils.geo import calculate_bounds, haversine_distance, ramer_douglas_peucker + + if not payload.points or len(payload.points) < 2: + raise HTTPException(status_code=400, detail="Invalid points") + + # Simplify if needed? Discovery results are already simplified. + # But maybe we ensure consistency. + # payload.points is likely already simplified. + + # Calculate metadata + dist = 0.0 + elev_gain = 0.0 + + for i in range(len(payload.points)-1): + p1 = payload.points[i] + p2 = payload.points[i+1] + dist += haversine_distance(p1[1], p1[0], p2[1], p2[0]) + + # Elevation if present + if len(p1) > 2 and len(p2) > 2 and p1[2] is not None and p2[2] is not None: + diff = p2[2] - p1[2] + if diff > 0: + elev_gain += diff + + bounds = calculate_bounds(payload.points) + + segment = Segment( + name=payload.name, + description=payload.description, + distance=dist, + elevation_gain=elev_gain, + activity_type=payload.activity_type, + points=json.dumps(payload.points), + bounds=json.dumps(bounds) + ) + + db.add(segment) + db.commit() + db.refresh(segment) + + return {"message": "Segment saved", "id": segment.id} diff --git a/FitnessSync/backend/src/jobs/segment_matching_job.py b/FitnessSync/backend/src/jobs/segment_matching_job.py index 03afe52..a5abdfb 100644 --- a/FitnessSync/backend/src/jobs/segment_matching_job.py +++ b/FitnessSync/backend/src/jobs/segment_matching_job.py @@ -25,11 +25,37 @@ def run_segment_matching_job(job_id: str): activities = db.query(Activity).all() total_activities = len(activities) + # Optimization: Pre-fetch segment locations for coarse filtering + segments_list = db.query(Segment).all() + segment_locations = [] + + # Parse segment bounds once + import json + for s in segments_list: + try: + if s.bounds: + # bounds: [min_lat, min_lon, max_lat, max_lon] + # We just need a center point or use bounds directly + b = json.loads(s.bounds) if isinstance(s.bounds, str) else s.bounds + if b and len(b) == 4: + segment_locations.append(b) + except: pass + + has_segments = len(segment_locations) > 0 + job_manager.update_job(job_id, progress=0, message=f"Starting scan of {total_activities} activities...") matcher = SegmentMatcher(db) total_matches = 0 + skipped_far = 0 + + # APPROX 1000 miles in degrees + # 1 deg lat ~ 69 miles. 1000 miles ~ 14.5 degrees. + # Longitude varies but 14.5 is a safe upper bound (it's less distance at poles). + # Let's use 15 degrees buffer. + BUFFER_DEG = 15.0 + for i, activity in enumerate(activities): if job_manager.should_cancel(job_id): logger.info(f"Job {job_id} cancelled.") @@ -37,33 +63,57 @@ def run_segment_matching_job(job_id: str): # Calculate progress prog = int((i / total_activities) * 100) - job_manager.update_job(job_id, progress=prog, message=f"Scanning activity {i+1}/{total_activities} ({activity.id})") + job_manager.update_job(job_id, progress=prog, message=f"Scanning {i+1}/{total_activities} (Matches: {total_matches}, Skipped: {skipped_far})") - # Check for content + # Check for content first if not activity.file_content: continue + + # OPTIMIZATION: Check Coarse Distance + # If activity has start location, check if it's "close" to ANY segment + if has_segments and activity.start_lat is not None and activity.start_lng is not None: + is_near_any = False + a_lat = activity.start_lat + a_lng = activity.start_lng + for b in segment_locations: + # b: [min_lat, min_lon, max_lat, max_lon] + # Expand bounds by buffer + # Check if point is inside expanded bounds + if (b[0] - BUFFER_DEG <= a_lat <= b[2] + BUFFER_DEG) and \ + (b[1] - BUFFER_DEG <= a_lng <= b[3] + BUFFER_DEG): + is_near_any = True + break + + if not is_near_any: + # Skip parsing! + skipped_far += 1 + continue + # Extract points - cache this? # For now, re-extract. It's CPU intensive but safe. try: points = extract_points_from_file(activity.file_content, activity.file_type) if points: - # Clear existing efforts for this activity to avoid duplicates? - # Or SegmentMatcher handles it? - # SegmentMatcher currently just ADDS. It doesn't check for existence. - # So we should delete existing efforts for this activity first. + # Clear existing efforts db.query(SegmentEffort).filter(SegmentEffort.activity_id == activity.id).delete() efforts = matcher.match_activity(activity, points) total_matches += len(efforts) - logger.info(f"Activity {activity.id}: {len(efforts)} matches") + if efforts: + logger.info(f"Activity {activity.id}: {len(efforts)} matches") except Exception as e: logger.error(f"Error processing activity {activity.id}: {e}") # Continue to next db.commit() # Final commit - job_manager.complete_job(job_id, result={"total_matches": total_matches, "activities_scanned": total_activities}) + job_manager.complete_job(job_id, result={ + "total_matches": total_matches, + "activities_scanned": total_activities, + "skipped_due_to_distance": skipped_far + }) + except Exception as e: logger.error(f"Job {job_id} failed: {e}") diff --git a/FitnessSync/backend/src/models/__pycache__/activity.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/activity.cpython-311.pyc index e1a0996..6f4394b 100644 Binary files a/FitnessSync/backend/src/models/__pycache__/activity.cpython-311.pyc and b/FitnessSync/backend/src/models/__pycache__/activity.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/models/__pycache__/activity.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/activity.cpython-313.pyc index 461488d..9db1605 100644 Binary files a/FitnessSync/backend/src/models/__pycache__/activity.cpython-313.pyc and b/FitnessSync/backend/src/models/__pycache__/activity.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/models/__pycache__/bike_setup.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/bike_setup.cpython-311.pyc index 21b958f..0493970 100644 Binary files a/FitnessSync/backend/src/models/__pycache__/bike_setup.cpython-311.pyc and b/FitnessSync/backend/src/models/__pycache__/bike_setup.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/models/__pycache__/bike_setup.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/bike_setup.cpython-313.pyc index dd9e613..d499ffe 100644 Binary files a/FitnessSync/backend/src/models/__pycache__/bike_setup.cpython-313.pyc and b/FitnessSync/backend/src/models/__pycache__/bike_setup.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/models/activity.py b/FitnessSync/backend/src/models/activity.py index 7fcf8fb..b1dc0c7 100644 --- a/FitnessSync/backend/src/models/activity.py +++ b/FitnessSync/backend/src/models/activity.py @@ -14,6 +14,10 @@ class Activity(Base): duration = Column(Integer, nullable=True) # Duration in seconds duration = Column(Integer, nullable=True) # Duration in seconds + # Location (added for optimization) + start_lat = Column(Float, nullable=True) + start_lng = Column(Float, nullable=True) + # Extended Metrics distance = Column(Float, nullable=True) # meters calories = Column(Float, nullable=True) # kcal @@ -42,4 +46,5 @@ class Activity(Base): updated_at = Column(DateTime(timezone=True), onupdate=func.now()) bike_setup_id = Column(Integer, ForeignKey("bike_setups.id"), nullable=True) + bike_match_confidence = Column(Float, nullable=True) # 0.0 to 1.0 score of match confidence bike_setup = relationship("BikeSetup") \ No newline at end of file diff --git a/FitnessSync/backend/src/models/bike_setup.py b/FitnessSync/backend/src/models/bike_setup.py index f426ca6..d288088 100644 --- a/FitnessSync/backend/src/models/bike_setup.py +++ b/FitnessSync/backend/src/models/bike_setup.py @@ -1,4 +1,4 @@ -from sqlalchemy import Column, Integer, String, DateTime +from sqlalchemy import Column, Integer, String, DateTime, Float from sqlalchemy.sql import func from .base import Base @@ -9,6 +9,9 @@ class BikeSetup(Base): frame = Column(String, nullable=False) chainring = Column(Integer, nullable=False) rear_cog = Column(Integer, nullable=False) + weight_kg = Column(Float, nullable=True) # Weight of the bike in kg + purchase_date = Column(DateTime, nullable=True) + retirement_date = Column(DateTime, nullable=True) name = Column(String, nullable=True) # Optional, can be derived or user-set created_at = Column(DateTime(timezone=True), server_default=func.now()) diff --git a/FitnessSync/backend/src/routers/__pycache__/web.cpython-311.pyc b/FitnessSync/backend/src/routers/__pycache__/web.cpython-311.pyc index bb8d378..b4fe801 100644 Binary files a/FitnessSync/backend/src/routers/__pycache__/web.cpython-311.pyc and b/FitnessSync/backend/src/routers/__pycache__/web.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/routers/__pycache__/web.cpython-313.pyc b/FitnessSync/backend/src/routers/__pycache__/web.cpython-313.pyc index 2427de4..3108717 100644 Binary files a/FitnessSync/backend/src/routers/__pycache__/web.cpython-313.pyc and b/FitnessSync/backend/src/routers/__pycache__/web.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/routers/web.py b/FitnessSync/backend/src/routers/web.py index 054eb41..79a0a66 100644 --- a/FitnessSync/backend/src/routers/web.py +++ b/FitnessSync/backend/src/routers/web.py @@ -36,3 +36,9 @@ async def bike_setups_page(request: Request): async def activity_view_page(request: Request, activity_id: str): return templates.TemplateResponse("activity_view.html", {"request": request, "activity_id": activity_id}) +@router.get("/discovery") +async def discovery_page(request: Request): + from datetime import datetime, timedelta + return templates.TemplateResponse("discovery.html", {"request": request, "now": datetime.now(), "timedelta": timedelta}) + + diff --git a/FitnessSync/backend/src/schemas/__pycache__/discovery.cpython-311.pyc b/FitnessSync/backend/src/schemas/__pycache__/discovery.cpython-311.pyc new file mode 100644 index 0000000..4af798d Binary files /dev/null and b/FitnessSync/backend/src/schemas/__pycache__/discovery.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/schemas/__pycache__/discovery.cpython-313.pyc b/FitnessSync/backend/src/schemas/__pycache__/discovery.cpython-313.pyc new file mode 100644 index 0000000..0a975ab Binary files /dev/null and b/FitnessSync/backend/src/schemas/__pycache__/discovery.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/schemas/discovery.py b/FitnessSync/backend/src/schemas/discovery.py new file mode 100644 index 0000000..0cf08d7 --- /dev/null +++ b/FitnessSync/backend/src/schemas/discovery.py @@ -0,0 +1,29 @@ +from pydantic import BaseModel +from typing import List, Optional +from datetime import datetime + +class DiscoveryFilter(BaseModel): + activity_type: str + start_date: Optional[datetime] = None + end_date: Optional[datetime] = None + lat_min: Optional[float] = None + lat_max: Optional[float] = None + lon_min: Optional[float] = None + lon_max: Optional[float] = None + +class SingleDiscoveryRequest(BaseModel): + activity_id: int + + +class CandidateSegmentSchema(BaseModel): + points: List[List[float]] + frequency: int + distance: float + activity_ids: List[int] + +class DiscoveryResult(BaseModel): + candidates: List[CandidateSegmentSchema] + debug_paths: Optional[List[List[List[float]]]] = None + generated_at: datetime + activity_count: int + # How many activities were analyzed diff --git a/FitnessSync/backend/src/services/__pycache__/bike_matching.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/bike_matching.cpython-311.pyc index f511889..8114b9f 100644 Binary files a/FitnessSync/backend/src/services/__pycache__/bike_matching.cpython-311.pyc and b/FitnessSync/backend/src/services/__pycache__/bike_matching.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/services/__pycache__/bike_matching.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/bike_matching.cpython-313.pyc index ca91540..588349e 100644 Binary files a/FitnessSync/backend/src/services/__pycache__/bike_matching.cpython-313.pyc and b/FitnessSync/backend/src/services/__pycache__/bike_matching.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/services/__pycache__/discovery.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/discovery.cpython-311.pyc new file mode 100644 index 0000000..bb83f90 Binary files /dev/null and b/FitnessSync/backend/src/services/__pycache__/discovery.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/services/__pycache__/discovery.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/discovery.cpython-313.pyc new file mode 100644 index 0000000..a0e99d3 Binary files /dev/null and b/FitnessSync/backend/src/services/__pycache__/discovery.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/services/__pycache__/job_manager.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/job_manager.cpython-313.pyc index 9c0da88..4d32df5 100644 Binary files a/FitnessSync/backend/src/services/__pycache__/job_manager.cpython-313.pyc and b/FitnessSync/backend/src/services/__pycache__/job_manager.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/services/__pycache__/parsers.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/parsers.cpython-311.pyc index c53514a..c1621f9 100644 Binary files a/FitnessSync/backend/src/services/__pycache__/parsers.cpython-311.pyc and b/FitnessSync/backend/src/services/__pycache__/parsers.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/services/__pycache__/parsers.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/parsers.cpython-313.pyc index a265331..66fbf51 100644 Binary files a/FitnessSync/backend/src/services/__pycache__/parsers.cpython-313.pyc and b/FitnessSync/backend/src/services/__pycache__/parsers.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/services/__pycache__/power_estimator.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/power_estimator.cpython-311.pyc new file mode 100644 index 0000000..230a128 Binary files /dev/null and b/FitnessSync/backend/src/services/__pycache__/power_estimator.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/services/__pycache__/scheduler.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/scheduler.cpython-313.pyc index 48a0fd6..0fba0b1 100644 Binary files a/FitnessSync/backend/src/services/__pycache__/scheduler.cpython-313.pyc and b/FitnessSync/backend/src/services/__pycache__/scheduler.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/services/__pycache__/segment_matcher.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/segment_matcher.cpython-311.pyc index 2f2ee49..50893ad 100644 Binary files a/FitnessSync/backend/src/services/__pycache__/segment_matcher.cpython-311.pyc and b/FitnessSync/backend/src/services/__pycache__/segment_matcher.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/services/__pycache__/sync_app.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/sync_app.cpython-313.pyc index 2cb3aa8..7dbab21 100644 Binary files a/FitnessSync/backend/src/services/__pycache__/sync_app.cpython-313.pyc and b/FitnessSync/backend/src/services/__pycache__/sync_app.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/services/bike_matching.py b/FitnessSync/backend/src/services/bike_matching.py index 77f8958..8d57179 100644 --- a/FitnessSync/backend/src/services/bike_matching.py +++ b/FitnessSync/backend/src/services/bike_matching.py @@ -4,12 +4,70 @@ from sqlalchemy.orm import Session from ..models.activity import Activity from ..models.bike_setup import BikeSetup +import statistics +from ..services.parsers import extract_activity_data + logger = logging.getLogger(__name__) # Constants WHEEL_CIRCUMFERENCE_M = 2.1 # Approx 700x23c/28c generic TOLERANCE_PERCENT = 0.15 +def calculate_ratio_from_streams(speed_stream: List[float], cadence_stream: List[int], window_size: int = 10) -> float: + """ + Calculate median gear ratio from steady-state segments in streams using a sliding window. + """ + if not speed_stream or not cadence_stream or len(speed_stream) != len(cadence_stream): + return 0.0 + + ratios = [] + + # Pre-clean streams to handle None values efficiently? + # Or just handle inside loop. + # Python Loop might be slow for very long rides (10k+ points). + # But usually < 20k points. + + n = len(speed_stream) + if n < window_size: + return 0.0 + + # Optimization: Skip if we don't have enough data + # Optimization: Step by window_size or half-window to avoid O(N*W)? + # User loop is O(N*W). For W=10, it's fine. + + for i in range(0, n - window_size, 5): # Step by 5 to speed up/reduce overlap redundancy + window_speeds = speed_stream[i:i+window_size] + window_cadences = cadence_stream[i:i+window_size] + + # Quick check for None before processing + if any(v is None for v in window_speeds) or any(c is None for c in window_cadences): + continue + + # Check thresholds + if all(c > 55 for c in window_cadences) and all(v > 2.5 for v in window_speeds): + # Check consistency (stdev) + # Catch potential low variance errors if all values identical (stdev=0) + try: + cad_std = statistics.stdev(window_cadences) + spd_std = statistics.stdev(window_speeds) + + if cad_std < 5 and spd_std < 0.5: + # Steady! + avg_speed = statistics.mean(window_speeds) + avg_cadence = statistics.mean(window_cadences) + + # Ratio + ratio = (avg_speed * 60) / (avg_cadence * WHEEL_CIRCUMFERENCE_M) + ratios.append(ratio) + except statistics.StatisticsError: + # Variance requires at least two data points, window_size=10 is safe. + pass + + if not ratios: + return 0.0 + + return statistics.median(ratios) + def calculate_observed_ratio(speed_mps: float, cadence_rpm: float) -> float: """ Calculate gear ratio from speed and cadence. @@ -25,7 +83,7 @@ def match_activity_to_bike(db: Session, activity: Activity) -> Optional[BikeSetu Match an activity to a bike setup based on gear ratio. """ if not activity.activity_type: - return None + return None, 0.0 type_lower = activity.activity_type.lower() @@ -45,23 +103,41 @@ def match_activity_to_bike(db: Session, activity: Activity) -> Optional[BikeSetu if not is_cycling: # Not cycling - return None + return None, 0.0 if 'indoor' in type_lower: # Indoor cycling - ignore - return None + return None, 0.0 - if not activity.avg_speed or not activity.avg_cadence: - # Not enough data - return None - - observed_ratio = calculate_observed_ratio(activity.avg_speed, activity.avg_cadence) + observed_ratio = 0.0 + + # helper to check if we can use streams + if activity.file_content: + try: + data = extract_activity_data(activity.file_content, activity.file_type) + speeds = data.get('speed') or [] + cadences = data.get('cadence') or [] + + # If explicit streams exist, use them + if speeds and cadences and len(speeds) > 0: + observed_ratio = calculate_ratio_from_streams(speeds, cadences) + logger.debug(f"Smart Match Ratio for {activity.id}: {observed_ratio:.2f}") + except Exception as e: + logger.warning(f"Failed to extract streams for Smart Matching activity {activity.id}: {e}") + + # Fallback to averages if Smart Matching failed or returned 0 if observed_ratio == 0: - return None + if not activity.avg_speed or not activity.avg_cadence: + # Not enough data + return None, 0.0 + observed_ratio = calculate_observed_ratio(activity.avg_speed, activity.avg_cadence) + + if observed_ratio == 0: + return None, 0.0 setups = db.query(BikeSetup).all() if not setups: - return None + return None, 0.0 best_match = None min_diff = float('inf') @@ -70,6 +146,33 @@ def match_activity_to_bike(db: Session, activity: Activity) -> Optional[BikeSetu if not setup.chainring or not setup.rear_cog: continue + # Check Date Constraints + # Ignore if activity date is before purchase or after retirement + # Start time is datetime with timezone + act_date = activity.start_time + + if setup.purchase_date: + p_date = setup.purchase_date + if p_date.tzinfo: + p_date = p_date.replace(tzinfo=None) + a_date = act_date + if a_date.tzinfo: + a_date = a_date.replace(tzinfo=None) + + if a_date < p_date: + continue + + if setup.retirement_date: + r_date = setup.retirement_date + if r_date.tzinfo: + r_date = r_date.replace(tzinfo=None) + a_date = act_date + if a_date.tzinfo: + a_date = a_date.replace(tzinfo=None) + + if a_date > r_date: + continue + mechanical_ratio = setup.chainring / setup.rear_cog diff = abs(observed_ratio - mechanical_ratio) @@ -80,7 +183,15 @@ def match_activity_to_bike(db: Session, activity: Activity) -> Optional[BikeSetu min_diff = diff best_match = setup - return best_match + if best_match: + match_ratio = best_match.chainring / best_match.rear_cog + error_pct = min_diff / match_ratio + confidence = max(0.0, 1.0 - error_pct) + return best_match, confidence + + return None, 0.0 + + return None, 0.0 def process_activity_matching(db: Session, activity_id: int): """ @@ -90,17 +201,20 @@ def process_activity_matching(db: Session, activity_id: int): if not activity: return - match = match_activity_to_bike(db, activity) + match, confidence = match_activity_to_bike(db, activity) if match: activity.bike_setup_id = match.id - logger.info(f"Matched Activity {activity.id} to Setup {match.frame} (Found Ratio: {calculate_observed_ratio(activity.avg_speed, activity.avg_cadence):.2f})") + activity.bike_match_confidence = confidence + logger.info(f"Matched Activity {activity.id} to Setup {match.frame} (Ratio: {calculate_observed_ratio(activity.avg_speed, activity.avg_cadence):.2f}, Confidence: {confidence:.2f})") else: # Implicitly "Generic" if None, but user requested explicit default logic. generic = db.query(BikeSetup).filter(BikeSetup.name == "GenericBike").first() if generic: activity.bike_setup_id = generic.id + activity.bike_match_confidence = 0.5 # Low confidence fallback else: activity.bike_setup_id = None # Truly unknown + activity.bike_match_confidence = 0.0 db.commit() @@ -111,7 +225,8 @@ def run_matching_for_all(db: Session): from sqlalchemy import or_ activities = db.query(Activity).filter( - Activity.bike_setup_id == None, + # Activity.bike_setup_id == None, + # Re-match everything to enforce new rules/constraints or_( Activity.activity_type.ilike('%cycling%'), Activity.activity_type.ilike('%road_biking%'), @@ -119,7 +234,9 @@ def run_matching_for_all(db: Session): Activity.activity_type.ilike('%mtb%'), Activity.activity_type.ilike('%cyclocross%') ), - Activity.activity_type.notilike('%indoor%') + Activity.activity_type.notilike('%indoor%'), + # Skip manual overrides (confidence >= 2.0) + or_(Activity.bike_match_confidence == None, Activity.bike_match_confidence < 2.0) ).all() count = 0 diff --git a/FitnessSync/backend/src/services/discovery.py b/FitnessSync/backend/src/services/discovery.py new file mode 100644 index 0000000..de5cc8f --- /dev/null +++ b/FitnessSync/backend/src/services/discovery.py @@ -0,0 +1,453 @@ +from typing import List, Dict, Optional, Tuple, Set +from datetime import datetime, timedelta +import logging +import math +from sqlalchemy.orm import Session +from sqlalchemy import func + +from ..models.activity import Activity +from ..models.segment import Segment +from ..utils.geo import haversine_distance, calculate_bounds, calculate_bearing, ramer_douglas_peucker_indices +from ..services.parsers import extract_points_from_file, extract_activity_data + + +logger = logging.getLogger(__name__) + +class CandidateSegment: + def __init__(self, points: List[List[float]], frequency: int, activity_ids: List[int]): + self.points = points # [[lon, lat], ...] + self.frequency = frequency + self.activity_ids = activity_ids + self.distance = self._calculate_distance() + self.uuid = None # To be assigned for frontend reference + + def _calculate_distance(self) -> float: + d = 0.0 + for i in range(len(self.points) - 1): + p1 = self.points[i] + p2 = self.points[i+1] + d += haversine_distance(p1[1], p1[0], p2[1], p2[0]) + return d + +class SegmentDiscoveryService: + def __init__(self, db: Session): + self.db = db + + def discover_segments(self, + activity_type: str, + start_date: Optional[datetime], + end_date: Optional[datetime] = None) -> Tuple[List[CandidateSegment], List[List[List[float]]]]: + + + + logger.info(f"Starting segment discovery for {activity_type} since {start_date}") + + # 1. Fetch activities + query = self.db.query(Activity).filter(Activity.activity_type == activity_type) + if start_date: + query = query.filter(Activity.start_time >= start_date) + if end_date: + query = query.filter(Activity.start_time <= end_date) + + activities = query.all() + logger.info(f"Analyzing {len(activities)} activities.") + + if len(activities) < 2: + return [], [] + + + # 2. Extract and Simplify Points (The "Cloud") + # Structure: { activity_id: [[lon, lat], ...] } + # Decimate to ~50m + activity_paths = {} + for act in activities: + if not act.file_content: + continue + try: + raw_points = extract_points_from_file(act.file_content, act.file_type) + # Reduced min_dist to 15m (smaller than grid 20m) to ensure connectivity + simplified = self._decimate_points(raw_points, min_dist=15.0) + if len(simplified) > 5: # Ignore tiny paths + + activity_paths[act.id] = simplified + except Exception as e: + logger.warning(f"Failed to process activity {act.id}: {e}") + + + + + # 3. Grid-Based Clustering + # We map every point to a grid cell (approx 20m x 20m). + # Count unique activities per cell. + grid_size_deg = 0.0002 # Approx 20m at equator + + # cell_key -> set(activity_ids) + grid: Dict[Tuple[int, int], Set[int]] = {} + + for act_id, points in activity_paths.items(): + for p in points: + lon, lat = p[0], p[1] + xi = int(lon / grid_size_deg) + yi = int(lat / grid_size_deg) + if (xi, yi) not in grid: + grid[(xi, yi)] = set() + grid[(xi, yi)].add(act_id) + + # 4. Filter Hotspots + # Keep cells with > 2 unique activities + min_freq = 2 + hotspot_cells = {k: v for k, v in grid.items() if len(v) >= min_freq} + + logger.info(f"Found {len(hotspot_cells)} hotspot cells.") + + + + if not hotspot_cells: + return [], list(activity_paths.values()) + + + # 5. Connect Hotspots (Stitching) + # Identify chains of adjacent hotspot cells. + # This is a graph traversal problem. + # Simple approach: Connected Components on grid. + + clusters = self._find_connected_components(hotspot_cells, grid_size_deg) + + + + # 6. Reconstruct Paths & Candidates + candidates = [] + for cluster_cells in clusters: + # Reconstruct a representative path for this cluster. + # We can take the center of each cell and sort them? + # Sorting is hard without knowing direction. + # Better: Pick one activity that traverses this cluster best? + + # Find activity that visits most cells in this cluster + best_act_id = self._find_representative_activity(cluster_cells, activity_paths, grid_size_deg) + if best_act_id: + # Extract the segment sub-path from this activity + path_points = self._extract_subpath_from_activity(activity_paths[best_act_id], cluster_cells, grid_size_deg) + + if path_points and self._calculate_path_length(path_points) > 200: # Min length 200m + # Calculate actual frequency for this specific path + # (Refined from grid count) + # For now, use the max cell count in the cluster as proxy or re-verify? + # Let's use the average cell count? Or max? + # Robust way: check how many other activities follow this path (Hausdorff check). + # For MVP, use the cell overlap count. + + freq = self._estimate_frequency(cluster_cells, hotspot_cells) + + # Collect all activity IDs involved in this cluster + cluster_activity_ids = set() + for cell in cluster_cells: + if cell in hotspot_cells: + cluster_activity_ids.update(hotspot_cells[cell]) + + cand = CandidateSegment(path_points, freq, list(cluster_activity_ids)) + candidates.append(cand) + + # 7. Deduplicate against DB + final_candidates = self._deduplicate_against_db(candidates, activity_type) + + return final_candidates, list(activity_paths.values()) + + + def analyze_single_activity(self, activity_id: int) -> List[CandidateSegment]: + act = self.db.query(Activity).filter(Activity.id == activity_id).first() + + # Fallback to Garmin ID if not found by primary key + if not act: + act = self.db.query(Activity).filter(Activity.garmin_activity_id == str(activity_id)).first() + + if not act or not act.file_content: + return [] + + # Parse data + data = extract_activity_data(act.file_content, act.file_type) + points = data.get('points', []) + timestamps = data.get('timestamps', []) + + if not points or not timestamps or len(points) != len(timestamps): + logger.warning(f"Analysis failed for {activity_id}: Mismatched points/timestamps") + return [] + + clean_points = [] + + # clean points loop + for p, ts in zip(points, timestamps): + if p and ts: + clean_points.append(p) + # Note: we drop timestamp alignment here if we just append p + # But we need timestamp for pause... + # Let's keep aligned struct or use index map? + # Actually clean_points is new list. We need aligned ts. + # Let's rebuild aligned lists + + # Re-build aligned lists + aligned_points = [] + aligned_ts = [] + for p, ts in zip(points, timestamps): + if p and ts: + aligned_points.append(p) + aligned_ts.append(ts) + + if len(aligned_points) < 10: + return [] + + # Step 1: Split by Pauses (> 10s) + sub_segments_indices = [] # List of [start_idx, end_idx] + + seg_start = 0 + for i in range(1, len(aligned_points)): + t1 = aligned_ts[i-1] + t2 = aligned_ts[i] + diff = (t2 - t1).total_seconds() + + if diff > 10.0: + # Pause detected, split + if i - seg_start > 5: + sub_segments_indices.append([seg_start, i]) # i is exclusive? + seg_start = i + + # Add last one + if len(aligned_points) - seg_start > 5: + sub_segments_indices.append([seg_start, len(aligned_points)]) + + final_segments = [] + + # Step 2: RDP Turn Detection on each sub-segment + for start_idx, end_idx in sub_segments_indices: + segment_points = aligned_points[start_idx:end_idx] + + # Get RDP simplified INDICES (relative to segment_points start) + # Use epsilon=10.0m for robust major turn detection + rdp_indices = ramer_douglas_peucker_indices(segment_points, 10.0) + + # Check turns at RDP vertices + split_points_relative = [] + + if len(rdp_indices) > 2: + last_bearing = None + + # Iterate simplified vertices + for k in range(1, len(rdp_indices)): + idx1 = rdp_indices[k-1] + idx2 = rdp_indices[k] + + p1 = segment_points[idx1] + p2 = segment_points[idx2] + + bearing = calculate_bearing(p1[1], p1[0], p2[1], p2[0]) + + if last_bearing is not None: + diff = abs(bearing - last_bearing) + if diff > 180: diff = 360 - diff + + if diff > 60: + # Turn detected at vertex k-1 (idx1) + # Convert relative idx1 to split point + split_points_relative.append(idx1) + + last_bearing = bearing + + # Split segment based on turns + current_rel_start = 0 + for split_idx in split_points_relative: + # Check min length (e.g. 5 points) + if split_idx - current_rel_start > 5: + abs_start = start_idx + current_rel_start + abs_end = start_idx + split_idx + 1 # Include the vertex point? + # Turns usually happen AT a point. + # Segment should end at turn, next starts at turn? Or gap? + # Continuous: End at k, next start at k. + + final_segments.append(aligned_points[abs_start : abs_end]) + current_rel_start = split_idx + + # Last piece + if len(segment_points) - current_rel_start > 5: + abs_start = start_idx + current_rel_start + abs_end = start_idx + len(segment_points) + final_segments.append(aligned_points[abs_start : abs_end]) + + # Step 3: Filter & Convert + candidates = [] + for path in final_segments: + d = self._calculate_path_length(path) + if d > 100: # Min 100m + # Simple decimation for display + simplified = self._decimate_points(path, min_dist=10.0) + cand = CandidateSegment(simplified, 1, [activity_id]) + candidates.append(cand) + + return candidates + + + + + def _decimate_points(self, points: List[List[float]], min_dist: float) -> List[List[float]]: + if not points: return [] + out = [points[0]] + last = points[0] + for p in points[1:]: + d = haversine_distance(last[1], last[0], p[1], p[0]) + if d >= min_dist: + out.append(p) + last = p + return out + + def _find_connected_components(self, cells: Dict[Tuple[int, int], Set[int]], grid_step: float) -> List[List[Tuple[int, int]]]: + # cells: map of (x,y) -> activity_ids + visited = set() + components = [] + + cell_keys = list(cells.keys()) + + for k in cell_keys: + if k in visited: + continue + + # BFS + q = [k] + visited.add(k) + cluster = [] + + while q: + curr = q.pop(0) + cluster.append(curr) + cx, cy = curr + + # Check 8 neighbors + for dx in [-1, 0, 1]: + for dy in [-1, 0, 1]: + if dx == 0 and dy == 0: continue + neighbor = (cx + dx, cy + dy) + if neighbor in cells and neighbor not in visited: + visited.add(neighbor) + q.append(neighbor) + + if len(cluster) > 5: # Min cluster size + components.append(cluster) + + return components + + def _find_representative_activity(self, cluster_cells: List[Tuple[int, int]], + activity_paths: Dict[int, List[List[float]]], + grid_step: float) -> Optional[int]: + # Count which activity ID appears most in these cells + counts = {} + cell_set = set(cluster_cells) + + # Optimization: We already stored act_ids in 'cells' dict in step 3. + # But I didn't pass 'cells' (just keys) to this func. + # Re-eval by looking at activity paths? No, too slow. + # Start scanning the paths against the cluster cells is slow. + # Better to pass the cell data. + + # Quick hack: Iterate all cells in cluster, tally votes for act_ids. + # Need access to the grid content. + # I'll rely on the caller logic or re-design slightly. + # For this draft, let's assume I can't access grid content easily without passing it. + # I'll pass it in next iter. + # Actually I can reconstruct quickly if I had the grid. + + # Let's iterate all paths (naive) - optimization for later. + best_id = None + max_overlap = 0 + + for act_id, points in activity_paths.items(): + overlap = 0 + for p in points: + xi = int(p[0] / grid_step) + yi = int(p[1] / grid_step) + if (xi, yi) in cell_set: + overlap += 1 + + if overlap > max_overlap: + max_overlap = overlap + best_id = act_id + + return best_id + + def _extract_subpath_from_activity(self, points: List[List[float]], + cluster_cells: List[Tuple[int, int]], + grid_step: float) -> List[List[float]]: + # Extract the contiguous sequence of points that lie within the cluster + cell_set = set(cluster_cells) + + subpath = [] + longest_subpath = [] + + for p in points: + xi = int(p[0] / grid_step) + yi = int(p[1] / grid_step) + + if (xi, yi) in cell_set: + subpath.append(p) + else: + if len(subpath) > len(longest_subpath): + longest_subpath = subpath + subpath = [] + + if len(subpath) > len(longest_subpath): + longest_subpath = subpath + + return longest_subpath + + def _estimate_frequency(self, cluster_keys: List[Tuple[int, int]], grid: Dict[Tuple[int, int], Set[int]]) -> int: + # Average unique visitors per cell in cluster + if not cluster_keys: return 0 + total = 0 + for k in cluster_keys: + if k in grid: + total += len(grid[k]) + return int(total / len(cluster_keys)) + + def _calculate_path_length(self, points: List[List[float]]) -> float: + d = 0.0 + for i in range(len(points) - 1): + d += haversine_distance(points[i][1], points[i][0], points[i+1][1], points[i+1][0]) + return d + + def _deduplicate_against_db(self, candidates: List[CandidateSegment], activity_type: str) -> List[CandidateSegment]: + # Load all segments of type + existing = self.db.query(Segment).filter(Segment.activity_type == activity_type).all() + + unique = [] + + for cand in candidates: + # Simple check: Do start and end points match any existing segment? + # Or bounding box overlap + Fréchet? + # Pure Python Fréchet is expensive O(N*M). + # Fast check: Hausdorff distance? + # Even faster: "Projected overlap". + + is_duplicate = False + for ex in existing: + # Check simple proximity of Start/End (e.g. 50m) + # Need to parse JSON points + import json + ex_points = json.loads(ex.points) if isinstance(ex.points, str) else ex.points + if not ex_points: continue + + ex_start = ex_points[0] + ex_end = ex_points[-1] + cand_start = cand.points[0] + cand_end = cand.points[-1] + + d_start = haversine_distance(ex_start[1], ex_start[0], cand_start[1], cand_start[0]) + d_end = haversine_distance(ex_end[1], ex_end[0], cand_end[1], cand_end[0]) + + if d_start < 50 and d_end < 50: + # Likely duplicate + # Check length similarity + if abs(cand.distance - ex.distance) < 200: + is_duplicate = True + break + + if not is_duplicate: + unique.append(cand) + + return unique diff --git a/FitnessSync/backend/src/services/parsers.py b/FitnessSync/backend/src/services/parsers.py index d864a45..e3d7e1f 100644 --- a/FitnessSync/backend/src/services/parsers.py +++ b/FitnessSync/backend/src/services/parsers.py @@ -14,7 +14,9 @@ def extract_activity_data(file_content: bytes, file_type: str) -> Dict[str, List 'points': [[lon, lat, ele], ...], 'timestamps': [datetime, ...], 'heart_rate': [int, ...], - 'power': [int, ...] + 'power': [int, ...], + 'speed': [float, ...], + 'cadence': [int, ...] } """ if file_type == 'fit': @@ -34,7 +36,7 @@ def extract_timestamps_from_file(file_content: bytes, file_type: str) -> List[Op return data['timestamps'] def _extract_data_from_fit(file_content: bytes) -> Dict[str, List[Any]]: - data = {'points': [], 'timestamps': [], 'heart_rate': [], 'power': []} + data = {'points': [], 'timestamps': [], 'heart_rate': [], 'power': [], 'speed': [], 'cadence': []} try: with io.BytesIO(file_content) as f: with fitdecode.FitReader(f) as fit: @@ -64,6 +66,14 @@ def _extract_data_from_fit(file_content: bytes) -> Dict[str, List[Any]]: ts = frame.get_value('timestamp') if frame.has_field('timestamp') else None data['timestamps'].append(ts) + # Speed + speed = frame.get_value('enhanced_speed') if frame.has_field('enhanced_speed') else frame.get_value('speed') if frame.has_field('speed') else None + data['speed'].append(speed) + + # Cadence + cad = frame.get_value('cadence') if frame.has_field('cadence') else None + data['cadence'].append(cad) + # HR hr = frame.get_value('heart_rate') if frame.has_field('heart_rate') else None data['heart_rate'].append(hr) diff --git a/FitnessSync/backend/src/services/power_estimator.py b/FitnessSync/backend/src/services/power_estimator.py new file mode 100644 index 0000000..c4f98c7 --- /dev/null +++ b/FitnessSync/backend/src/services/power_estimator.py @@ -0,0 +1,160 @@ + +import math +import logging +from typing import List, Optional, Tuple, Dict + +from ..models.activity import Activity +from ..models.bike_setup import BikeSetup +from ..models.weight_record import WeightRecord +from ..services.parsers import extract_activity_data + +logger = logging.getLogger(__name__) + +class PowerEstimatorService: + def __init__(self, db_session): + self.db = db_session + + # Physics Constants + self.GRAVITY = 9.80665 + self.RHO = 1.225 # Air density at sea level, standard temp (kg/m^3) + + # Default Parameters if not provided/estimated + self.DEFAULT_CDA = 0.32 # Typical road cyclist + self.DEFAULT_CRR = 0.005 # Typical road tire on asphalt + self.DRIVETRAIN_LOSS = 0.03 # 3% loss + + def estimate_power_for_activity(self, activity_id: int) -> Dict[str, any]: + """ + Estimate power activity streams based on physics model. + Returns summary stats. + """ + activity = self.db.query(Activity).filter(Activity.id == activity_id).first() + if not activity: + raise ValueError("Activity not found") + + if not activity.file_content: + raise ValueError("No file content to analyze") + + # 1. Get Setup and Weights + bike_weight = 9.0 # Default 9kg + if activity.bike_setup and activity.bike_setup.weight_kg: + bike_weight = activity.bike_setup.weight_kg + + rider_weight = 75.0 # Default 75kg + # Try to find weight record closest to activity date? Or just latest? + # Latest for now. + latest_weight = self.db.query(WeightRecord).order_by(WeightRecord.date.desc()).first() + if latest_weight and latest_weight.weight_kg: + rider_weight = latest_weight.weight_kg + + total_mass = rider_weight + bike_weight + + # 2. Extract Data + data = extract_activity_data(activity.file_content, activity.file_type) + # We need: Speed (m/s), Elevation (m) for Grade, Time (s) for acceleration + + timestamps = data.get('timestamps') + speeds = data.get('enhanced_speed') or data.get('speed') + elevations = data.get('enhanced_altitude') or data.get('altitude') + + if not speeds or not len(speeds) > 0: + raise ValueError("No speed data available") + + # Generate Power Stream + power_stream = [] + total_power = 0.0 + count = 0 + + # Smoothing window? Physics is noisy on raw data. + # We'll calculate point-by-point but maybe assume slight smoothing explicitly or implicitly via grade. + + # Pre-calc gradients? + # We need grade at each point. slope = d_ele / d_dist + # d_dist = speed * d_time + + for i in range(len(speeds)): + t = timestamps[i] + v = speeds[i] # m/s + + # Skip if stopped + if v is None or v < 0.1: + power_stream.append(0) + continue + + # Get slope + # Look ahead/behind for slope smoothing (e.g. +/- 5 seconds) would be better + # Simple difference for now: + grade = 0.0 + accel = 0.0 + + if i > 0 and i < len(speeds) - 1: + # Central difference + d_t = (timestamps[i+1] - timestamps[i-1]).total_seconds() + if d_t > 0: + d_v = (speeds[i+1] - speeds[i-1]) # acc + d_e = (elevations[i+1] - elevations[i-1]) if elevations else 0 + d_s = (v * d_t) # approx distance covers + + accel = d_v / d_t + if d_s > 1.0: # avoid div by zero/noise + grade = d_e / d_s + + # Physics Formula + # F_total = F_grav + F_roll + F_aero + F_acc + + # F_grav = m * g * sin(arctan(grade)) ~= m * g * grade + f_grav = total_mass * self.GRAVITY * grade + + # F_roll = m * g * cos(arctan(grade)) * Crr ~= m * g * Crr + f_roll = total_mass * self.GRAVITY * self.DEFAULT_CRR + + # F_aero = 0.5 * rho * CdA * v^2 + # Assume no wind for now + f_aero = 0.5 * self.RHO * self.DEFAULT_CDA * (v**2) + + # F_acc = m * a + f_acc = total_mass * accel + + f_total = f_grav + f_roll + f_aero + f_acc + + # Power = Force * Velocity + p_raw = f_total * v + + # Apply Drivetrain Loss + p_mech = p_raw / (1 - self.DRIVETRAIN_LOSS) + + # Power can't be negative for a human (braking/coasting = 0w output) + if p_mech < 0: + p_mech = 0 + + power_stream.append(int(p_mech)) + + total_power += p_mech + count += 1 + + avg_power = int(total_power / count) if count > 0 else 0 + + # Return estimated stream and stats + # Ideally we'd update the Activity 'power' stream and 'avg_power' metric + # BUT 'extract_activity_data' reads from FILE. We can't easily write back to FIT file. + # We should store "estimated_power" in DB or separate storage? + # The prompt implies we want to USE this data. + # If we just update `Activity.avg_power`, that's easy. + # Displaying the stream might require `Activity` to support JSON storage for streams or similar. + # Current schema has `Activity.file_content`. + # Updating the FIT file is hard. + # Maybe we just return it for now, or update the scalar metrics in DB? + + # Let's update scalars. + activity.avg_power = avg_power + # Max power? + activity.max_power = max(power_stream) if power_stream else 0 + + self.db.commit() + + return { + "avg_power": avg_power, + "max_power": activity.max_power, + "stream_sample": power_stream[:20] + } + diff --git a/FitnessSync/backend/src/services/segment_matcher.py b/FitnessSync/backend/src/services/segment_matcher.py index 06e5916..2270272 100644 --- a/FitnessSync/backend/src/services/segment_matcher.py +++ b/FitnessSync/backend/src/services/segment_matcher.py @@ -34,8 +34,15 @@ class SegmentMatcher: # without special extensions. We'll fetch all and filter in Python. # Ideally, we'd use PostGIS geometry types. + # Normalize activity type + act_type = activity.activity_type + if act_type in ['road_biking', 'mountain_biking', 'gravel_cycling', 'virtual_cycling', 'indoor_cycling']: + act_type = 'cycling' + elif act_type in ['trail_running', 'treadmill_running']: + act_type = 'running' + segments = self.db.query(Segment).filter( - Segment.activity_type == activity.activity_type + (Segment.activity_type == activity.activity_type) | (Segment.activity_type == act_type) ).all() matched_efforts = [] diff --git a/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-311.pyc b/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-311.pyc index edb828d..496d334 100644 Binary files a/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-311.pyc and b/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-313.pyc b/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-313.pyc index 14114f2..faa0ba5 100644 Binary files a/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-313.pyc and b/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/services/sync/activity.py b/FitnessSync/backend/src/services/sync/activity.py index 2574a51..81964e4 100644 --- a/FitnessSync/backend/src/services/sync/activity.py +++ b/FitnessSync/backend/src/services/sync/activity.py @@ -317,3 +317,13 @@ class GarminActivitySync: data.get('maxBikingCadenceInRevPerMinute') or data.get('maxSwimCadenceInStrokesPerMinute') ) + + # Location + if data.get('startingLatitude') and data.get('startingLongitude'): + activity.start_lat = data.get('startingLatitude') + activity.start_lng = data.get('startingLongitude') + elif data.get('startRecallLatitude') and data.get('startRecallLongitude'): + # Sometimes Garmin uses these + activity.start_lat = data.get('startRecallLatitude') + activity.start_lng = data.get('startRecallLongitude') + diff --git a/FitnessSync/backend/src/utils/__pycache__/geo.cpython-311.pyc b/FitnessSync/backend/src/utils/__pycache__/geo.cpython-311.pyc index 57e9c24..dab611d 100644 Binary files a/FitnessSync/backend/src/utils/__pycache__/geo.cpython-311.pyc and b/FitnessSync/backend/src/utils/__pycache__/geo.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/utils/__pycache__/geo.cpython-313.pyc b/FitnessSync/backend/src/utils/__pycache__/geo.cpython-313.pyc index 5ded0de..46f9304 100644 Binary files a/FitnessSync/backend/src/utils/__pycache__/geo.cpython-313.pyc and b/FitnessSync/backend/src/utils/__pycache__/geo.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/utils/geo.py b/FitnessSync/backend/src/utils/geo.py index 14b7c97..6caf17c 100644 --- a/FitnessSync/backend/src/utils/geo.py +++ b/FitnessSync/backend/src/utils/geo.py @@ -7,7 +7,7 @@ def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> fl on the earth (specified in decimal degrees) """ # Convert decimal degrees to radians - lon1, lat1, lon2, lat2 = map(math.radians, [lon1, lat2, lon2, lat2]) + lon1, lat1, lon2, lat2 = map(math.radians, [lon1, lat1, lon2, lat2]) # Haversine formula dlon = lon2 - lon1 @@ -17,6 +17,21 @@ def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> fl r = 6371000 # Radius of earth in meters return c * r +def calculate_bearing(lat1: float, lon1: float, lat2: float, lon2: float) -> float: + """ + Calculate initial bearing between two points. + Returns degrees [0, 360). + """ + lat1, lon1, lat2, lon2 = map(math.radians, [lat1, lon1, lat2, lon2]) + + dLon = lon2 - lon1 + y = math.sin(dLon) * math.cos(lat2) + x = math.cos(lat1) * math.sin(lat2) - math.sin(lat1) * math.cos(lat2) * math.cos(dLon) + + brng = math.atan2(y, x) + return (math.degrees(brng) + 360) % 360 + + def perpendicular_distance(point: List[float], line_start: List[float], line_end: List[float]) -> float: """ Calculate perpendicular distance from point to line segment. @@ -85,6 +100,54 @@ def ramer_douglas_peucker(points: List[List[float]], epsilon: float) -> List[Lis else: return [points[0], points[end]] +def ramer_douglas_peucker_indices(points: List[List[float]], epsilon: float) -> List[int]: + """ + Simplify a list of [lon, lat] points using RDP algorithm. + Returns the INDICES of the simplified points in the original list. + """ + if len(points) < 3: + return list(range(len(points))) + + # Since recursion makes relative indexing hard, we can use an iterative approach + # or pass absolute start index? + # Actually, easiest is to pass (points, start_idx, end_idx) recursively? + # Or just use mask? + + # Recursive helper that takes absolute indices + def _rdp(start_idx: int, end_idx: int) -> List[int]: + if end_idx - start_idx < 2: + return [start_idx, end_idx] + + dmax = 0.0 + index = 0 + + # Points are derived from original list by slicing? No, need random access + # Optimized: access global 'points' + + # Find the point with the maximum distance + # Line from start_idx to end_idx + p_start = points[start_idx] + p_end = points[end_idx] + + # Pre-calc line params for speed? + # perpendicular_distance is expensive in loop. + + for i in range(start_idx + 1, end_idx): + d = perpendicular_distance(points[i], p_start, p_end) + if d > dmax: + index = i + dmax = d + + if dmax > epsilon: + res1 = _rdp(start_idx, index) + res2 = _rdp(index, end_idx) + return res1[:-1] + res2 + else: + return [start_idx, end_idx] + + return _rdp(0, len(points) - 1) + + def calculate_bounds(points: List[List[float]]) -> List[float]: """ Return [min_lat, min_lon, max_lat, max_lon] diff --git a/FitnessSync/backend/templates/activities.html b/FitnessSync/backend/templates/activities.html index 2be5582..4b68c62 100644 --- a/FitnessSync/backend/templates/activities.html +++ b/FitnessSync/backend/templates/activities.html @@ -35,9 +35,9 @@
+
+ +
+
+ +
@@ -315,7 +324,10 @@ detailsModal = new bootstrap.Modal(document.getElementById('activityDetailsModal')); + detailsModal = new bootstrap.Modal(document.getElementById('activityDetailsModal')); + loadActivities(); + fetchBikeSetups(); document.getElementById('prev-page-btn').addEventListener('click', () => changePage(-1)); document.getElementById('next-page-btn').addEventListener('click', () => changePage(1)); @@ -375,10 +387,19 @@ tbody.innerHTML = 'Loading...'; const typeFilter = document.getElementById('filter-type').value; + const bikeFilter = document.getElementById('filter-bike').value; + let url = `/api/activities/list?limit=${limit}&offset=${currentPage * limit}`; - if (typeFilter) { - url = `/api/activities/query?activity_type=${typeFilter}`; + // If any filter is active, force query mode + if (typeFilter || bikeFilter) { + url = `/api/activities/query?`; + const params = new URLSearchParams(); + if (typeFilter) params.append('activity_type', typeFilter); + if (bikeFilter) params.append('bike_setup_id', bikeFilter); + + url += params.toString(); + document.getElementById('prev-page-btn').disabled = true; document.getElementById('next-page-btn').disabled = true; } else { @@ -438,8 +459,8 @@ - @@ -577,6 +598,44 @@ } } + async function fetchBikeSetups() { + try { + const res = await fetch('/api/bike-setups'); + if (!res.ok) throw new Error("Failed to fetch bikes"); + const bikes = await res.json(); + const select = document.getElementById('filter-bike'); + bikes.forEach(bike => { + const opt = document.createElement('option'); + opt.value = bike.id; + opt.textContent = bike.name ? `${bike.name} (${bike.frame})` : bike.frame; + select.appendChild(opt); + }); + } catch (e) { + console.error(e); + } + } + + async function refreshActivity(garminId) { + if (!confirm("Are you sure you want to re-download this activity from Garmin and run bike matching?")) { + return; + } + + showToast("Processing...", "Refreshing activity data...", "info"); + try { + const res = await fetch(`/api/activities/${garminId}/redownload`, { method: 'POST' }); + const data = await res.json(); + + if (res.ok) { + showToast("Success", data.message, "success"); + loadActivities(); // Reload table + } else { + throw new Error(data.detail || "Refresh failed"); + } + } catch (e) { + showToast("Error", e.message, "error"); + } + } + window.showActivityDetails = async function (id) { // Reset fields document.querySelectorAll('[id^="det-"]').forEach(el => el.textContent = '-'); diff --git a/FitnessSync/backend/templates/activity_view.html b/FitnessSync/backend/templates/activity_view.html index 793c501..f726e68 100644 --- a/FitnessSync/backend/templates/activity_view.html +++ b/FitnessSync/backend/templates/activity_view.html @@ -56,6 +56,12 @@ + + @@ -228,7 +234,10 @@
-
Bike Setup
+
+ Bike Setup + +
No Setup
@@ -647,6 +656,132 @@ setupDrag(endMarker, false); } + async function refreshActivity() { + if (!confirm("Are you sure you want to re-download this activity from Garmin and run bike matching? This will overwrite any manual bike selection.")) { + return; + } + + const btn = document.getElementById('refresh-btn'); + const origHtml = btn.innerHTML; + btn.disabled = true; + btn.innerHTML = ' Refreshing...'; + + // Helper toast fallback if showToast not defined in this view (it inherits from base.html usually?) + // base.html usually has showToast. + if (typeof showToast === 'function') showToast("Processing...", "Refreshing activity data...", "info"); + + try { + const res = await fetch(`/api/activities/${activityId}/redownload`, { method: 'POST' }); + const data = await res.json(); + + if (res.ok) { + if (typeof showToast === 'function') showToast("Success", data.message, "success"); + else alert(data.message); + + // Reload details + loadDetails(); + loadCharts(); + } else { + throw new Error(data.detail || "Refresh failed"); + } + } catch (e) { + console.error(e); + if (typeof showToast === 'function') showToast("Error", e.message, "error"); + else alert("Error: " + e.message); + } finally { + btn.disabled = false; + btn.innerHTML = origHtml; + } + } + + let allBikes = []; + async function fetchAllBikes() { + try { + const res = await fetch('/api/bike-setups'); + if (res.ok) allBikes = await res.json(); + } catch (e) { console.error(e); } + } + document.addEventListener('DOMContentLoaded', fetchAllBikes); + + function editBikeSetup() { + const container = document.getElementById('m-bike-info'); + if (container.querySelector('select')) return; + + // Current text + const currentHtml = container.innerHTML; + + let initialSelect = `
+ + + +
`; + + container.innerHTML = initialSelect; + } + + async function saveBikeSetup() { + const sel = document.getElementById('bike-select'); + const newId = sel.value ? parseInt(sel.value) : null; + + try { + const res = await fetch(`/api/activities/${activityId}/bike`, { + method: 'PUT', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ bike_setup_id: newId, manual_override: true }) + }); + + if (res.ok) { + if (typeof showToast === 'function') showToast("Success", "Bike setup updated", "success"); + else alert("Bike setup updated"); + loadDetails(); + } else { + const err = await res.json(); + alert("Error: " + err.detail); + } + } catch (e) { + console.error(e); + alert("Save failed"); + } + } + + async function estimatePower() { + if (!confirm("Estimate power for this activity using physics usage calculation? This will update average/max power stats.")) return; + + const btn = document.getElementById('estimate-power-btn'); + const origText = btn.innerHTML; + btn.disabled = true; + btn.innerHTML = ' Estimating...'; + + try { + const res = await fetch(`/api/activities/${window.currentDbId}/estimate_power`, { + method: 'POST' + }); + + if (res.ok) { + const data = await res.json(); + alert("Power estimation complete! Avg: " + data.stats.avg_power + " W"); + loadDetails(); // Refresh stats + loadCharts(); // Refresh charts if stream updated (Service returns stream but we'd need to reload) + } else { + const err = await res.json(); + alert("Error: " + err.detail); + } + } catch (e) { + console.error(e); + alert("Estimate failed: " + e.message); + } finally { + btn.disabled = false; + btn.innerHTML = origText; + } + } + async function saveSegment() { if (startIndex >= endIndex) { alert("Start point must be before End point."); diff --git a/FitnessSync/backend/templates/base.html b/FitnessSync/backend/templates/base.html index 7d0ac8a..51cc6a3 100644 --- a/FitnessSync/backend/templates/base.html +++ b/FitnessSync/backend/templates/base.html @@ -75,6 +75,11 @@
+ +
- +
- +
- - + +
+
+
+
+ + +
+
+ + +
+
+ +
@@ -96,7 +117,7 @@ function renderTable() { const tbody = document.getElementById('setupsTableBody'); tbody.innerHTML = ''; - + currentSetups.forEach(setup => { const ratio = (setup.chainring / setup.rear_cog).toFixed(2); const tr = document.createElement('tr'); @@ -106,6 +127,10 @@ ${setup.chainring}t ${setup.rear_cog}t ${ratio} + ${setup.activity_count || 0} + ${setup.total_distance ? (setup.total_distance / 1000).toFixed(0) + ' km' : '-'} + ${setup.weight_kg ? setup.weight_kg + 'kg' : '-'} + ${setup.retirement_date ? 'Retired' : 'Active'} + + + +
+
+
+ +
+
+
+ + +
+
+ + +
+
+
+ + +
+
+
+ +
+
+
+ + +
+
+
+ + +
Enter the ID of the activity to slice into segments.
+
+
+ +
+
+
+
+
+ + + + + + +
+ +
+
+ +
+ Run a search to see recommendations. +
+
+
+ + +
+
+
+
+
+
+ + +{% endblock %} \ No newline at end of file diff --git a/FitnessSync/backend/templates/segments.html b/FitnessSync/backend/templates/segments.html index c447233..cd9d240 100644 --- a/FitnessSync/backend/templates/segments.html +++ b/FitnessSync/backend/templates/segments.html @@ -30,7 +30,9 @@ Name Type Distance + Distance Elevation + Efforts Actions @@ -141,7 +143,9 @@ ${seg.name} ${seg.activity_type} ${(seg.distance / 1000).toFixed(2)} km + ${(seg.distance / 1000).toFixed(2)} km ${seg.elevation_gain ? seg.elevation_gain.toFixed(1) + ' m' : '-'} + ${seg.effort_count || 0}