diff --git a/FitnessSync/backend/__pycache__/main.cpython-311.pyc b/FitnessSync/backend/__pycache__/main.cpython-311.pyc index e8b58cb..afc9663 100644 Binary files a/FitnessSync/backend/__pycache__/main.cpython-311.pyc and b/FitnessSync/backend/__pycache__/main.cpython-311.pyc differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/a9c00e495f5e_add_segments_tables.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/a9c00e495f5e_add_segments_tables.cpython-311.pyc new file mode 100644 index 0000000..96dbfb7 Binary files /dev/null and b/FitnessSync/backend/alembic/versions/__pycache__/a9c00e495f5e_add_segments_tables.cpython-311.pyc differ diff --git a/FitnessSync/backend/alembic/versions/a9c00e495f5e_add_segments_tables.py b/FitnessSync/backend/alembic/versions/a9c00e495f5e_add_segments_tables.py new file mode 100644 index 0000000..4673665 --- /dev/null +++ b/FitnessSync/backend/alembic/versions/a9c00e495f5e_add_segments_tables.py @@ -0,0 +1,62 @@ +"""Add segments tables + +Revision ID: a9c00e495f5e +Revises: 73e349ef1d88 +Create Date: 2026-01-09 18:23:42.393552 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'a9c00e495f5e' +down_revision: Union[str, None] = '73e349ef1d88' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('segments', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.Column('description', sa.String(), nullable=True), + sa.Column('distance', sa.Float(), nullable=False), + sa.Column('avg_grade', sa.Float(), nullable=True), + sa.Column('elevation_gain', sa.Float(), nullable=True), + sa.Column('points', sa.JSON(), nullable=False), + sa.Column('bounds', sa.JSON(), nullable=False), + sa.Column('activity_type', sa.String(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_segments_id'), 'segments', ['id'], unique=False) + op.create_table('segment_efforts', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('segment_id', sa.Integer(), nullable=False), + sa.Column('activity_id', sa.Integer(), nullable=False), + sa.Column('elapsed_time', sa.Integer(), nullable=False), + sa.Column('start_time', sa.DateTime(), nullable=False), + sa.Column('end_time', sa.DateTime(), nullable=False), + sa.Column('avg_power', sa.Integer(), nullable=True), + sa.Column('avg_hr', sa.Integer(), nullable=True), + sa.Column('kom_rank', sa.Integer(), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.ForeignKeyConstraint(['activity_id'], ['activities.id'], ), + sa.ForeignKeyConstraint(['segment_id'], ['segments.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_segment_efforts_id'), 'segment_efforts', ['id'], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_segment_efforts_id'), table_name='segment_efforts') + op.drop_table('segment_efforts') + op.drop_index(op.f('ix_segments_id'), table_name='segments') + op.drop_table('segments') + # ### end Alembic commands ### diff --git a/FitnessSync/backend/main.py b/FitnessSync/backend/main.py index 0151e8a..9fd25cd 100644 --- a/FitnessSync/backend/main.py +++ b/FitnessSync/backend/main.py @@ -76,6 +76,9 @@ app.include_router(activities.router, prefix="/api") app.include_router(activities.router, prefix="/api") app.include_router(scheduling.router, prefix="/api") +from src.api import segments +app.include_router(segments.router, prefix="/api") + from src.api import bike_setups app.include_router(bike_setups.router) diff --git a/FitnessSync/backend/src/api/__pycache__/activities.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/activities.cpython-311.pyc index 5062520..5fb4f4b 100644 Binary files a/FitnessSync/backend/src/api/__pycache__/activities.cpython-311.pyc and b/FitnessSync/backend/src/api/__pycache__/activities.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/api/__pycache__/scheduling.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/scheduling.cpython-311.pyc index 52e01a6..cb4f24e 100644 Binary files a/FitnessSync/backend/src/api/__pycache__/scheduling.cpython-311.pyc and b/FitnessSync/backend/src/api/__pycache__/scheduling.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/api/__pycache__/segments.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/segments.cpython-311.pyc new file mode 100644 index 0000000..9eeeadd Binary files /dev/null and b/FitnessSync/backend/src/api/__pycache__/segments.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/api/__pycache__/status.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/status.cpython-311.pyc index 2f56181..dfc59b9 100644 Binary files a/FitnessSync/backend/src/api/__pycache__/status.cpython-311.pyc and b/FitnessSync/backend/src/api/__pycache__/status.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/api/activities.py b/FitnessSync/backend/src/api/activities.py index b233064..eb279ed 100644 --- a/FitnessSync/backend/src/api/activities.py +++ b/FitnessSync/backend/src/api/activities.py @@ -11,10 +11,8 @@ from ..utils.config import config # New Sync Imports from ..services.job_manager import job_manager from ..models.activity_state import GarminActivityState -import fitdecode -import io -import xml.etree.ElementTree as ET from datetime import datetime +from ..services.parsers import extract_points_from_file router = APIRouter() @@ -480,64 +478,7 @@ async def get_sync_status_summary(db: Session = Depends(get_db)): return {} -def _extract_points_from_fit(file_content: bytes) -> List[List[float]]: - """ - Extract [lon, lat] points from a FIT file content. - Returns a list of [lon, lat]. - """ - points = [] - try: - with io.BytesIO(file_content) as f: - with fitdecode.FitReader(f) as fit: - for frame in fit: - if frame.frame_type == fitdecode.FIT_FRAME_DATA and frame.name == 'record': - # Check for position_lat and position_long - # Garmin stores lat/long as semicircles. Convert to degrees: semicircle * (180 / 2^31) - if frame.has_field('position_lat') and frame.has_field('position_long'): - lat_sc = frame.get_value('position_lat') - lon_sc = frame.get_value('position_long') - - if lat_sc is not None and lon_sc is not None: - lat = lat_sc * (180.0 / 2**31) - lon = lon_sc * (180.0 / 2**31) - points.append([lon, lat]) - except Exception as e: - logger.error(f"Error parsing FIT file: {e}") - # Return what we have or empty - return points -def _extract_points_from_tcx(file_content: bytes) -> List[List[float]]: - """ - Extract [lon, lat] points from a TCX file content. - """ - points = [] - try: - # TCX is XML - # Namespace usually exists - root = ET.fromstring(file_content) - # Namespaces are annoying in ElementTree, usually {http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2} - # We can just iterate and ignore namespace or handle it. - # Let's try ignoring namespace by using local-name() in xpath if lxml, but this is stdlib ET. - # Just strip namespace for simplicity - - for trkpt in root.iter(): - if trkpt.tag.endswith('Trackpoint'): - lat = None - lon = None - for child in trkpt.iter(): - if child.tag.endswith('LatitudeDegrees'): - try: lat = float(child.text) - except: pass - elif child.tag.endswith('LongitudeDegrees'): - try: lon = float(child.text) - except: pass - - if lat is not None and lon is not None: - points.append([lon, lat]) - - except Exception as e: - logger.error(f"Error parsing TCX file: {e}") - return points @router.get("/activities/{activity_id}/geojson") async def get_activity_geojson(activity_id: str, db: Session = Depends(get_db)): @@ -550,14 +491,9 @@ async def get_activity_geojson(activity_id: str, db: Session = Depends(get_db)): raise HTTPException(status_code=404, detail="Activity or file content not found") points = [] - if activity.file_type == 'fit': - points = _extract_points_from_fit(activity.file_content) - elif activity.file_type == 'tcx': - points = _extract_points_from_tcx(activity.file_content) + if activity.file_type in ['fit', 'tcx']: + points = extract_points_from_file(activity.file_content, activity.file_type) else: - # Try FIT or TCX anyway? - # Default to FIT check headers? - # For now just log warning logger.warning(f"Unsupported file type for map: {activity.file_type}") if not points: diff --git a/FitnessSync/backend/src/api/scheduling.py b/FitnessSync/backend/src/api/scheduling.py index 0b4c0e8..ae17458 100644 --- a/FitnessSync/backend/src/api/scheduling.py +++ b/FitnessSync/backend/src/api/scheduling.py @@ -129,3 +129,11 @@ def delete_scheduled_job(job_id: int, db: Session = Depends(get_db)): db.delete(job) db.commit() return None + +@router.post("/scheduling/jobs/{job_id}/run", status_code=200) +def run_scheduled_job(job_id: int): + """Manually trigger a scheduled job.""" + from ..services.scheduler import scheduler + if scheduler.trigger_job(job_id): + return {"status": "triggered", "message": f"Job {job_id} triggered successfully"} + raise HTTPException(status_code=404, detail="Job not found") diff --git a/FitnessSync/backend/src/api/segments.py b/FitnessSync/backend/src/api/segments.py new file mode 100644 index 0000000..cd0eac7 --- /dev/null +++ b/FitnessSync/backend/src/api/segments.py @@ -0,0 +1,226 @@ +from fastapi import APIRouter, Depends, HTTPException, Query +from typing import List, Optional +from sqlalchemy.orm import Session +from ..models.segment import Segment +from ..models.segment_effort import SegmentEffort +from ..services.postgresql_manager import PostgreSQLManager +from ..utils.config import config +from pydantic import BaseModel +import json + +router = APIRouter() + +def get_db(): + db_manager = PostgreSQLManager(config.DATABASE_URL) + with db_manager.get_db_session() as session: + yield session + +class SegmentCreate(BaseModel): + name: str + description: Optional[str] = None + activity_id: int + start_index: int + end_index: int + +class SegmentEffortResponse(BaseModel): + id: int + segment_id: int + segment_name: str + activity_id: int + elapsed_time: float + start_time: Optional[str] + end_time: Optional[str] + avg_hr: Optional[int] = None + avg_power: Optional[int] = None + kom_rank: Optional[int] + pr_rank: Optional[int] + is_kom: bool + is_pr: bool + + +class SegmentResponse(BaseModel): + id: int + name: str + distance: float + elevation_gain: Optional[float] + activity_type: str + points: List[List[float]] + +@router.post("/segments/create") +def create_segment(payload: SegmentCreate, db: Session = Depends(get_db)): + """Create a new segment from an activity.""" + from ..models.activity import Activity + from ..services.parsers import extract_points_from_file + from ..utils.geo import ramer_douglas_peucker, calculate_bounds + + activity = db.query(Activity).filter(Activity.id == payload.activity_id).first() + if not activity: + raise HTTPException(status_code=404, detail="Activity not found") + + points = extract_points_from_file(activity.file_content, activity.file_type) + + print(f"DEBUG CREATE SEGMENT: ID={activity.id} Name={payload.name} Start={payload.start_index} End={payload.end_index} TotalPoints={len(points)}") + + if not points or len(points) <= payload.end_index: + print(f"DEBUG ERROR: Invalid indices. Points len={len(points)}") + raise HTTPException(status_code=400, detail="Invalid points or indices") + + # Slice points + segment_points = points[payload.start_index : payload.end_index + 1] + + # Simplify (RDP) - epsilon ~10 meters? + simplified_points = ramer_douglas_peucker(segment_points, epsilon=10.0) + + # Calculate bounds + bounds = calculate_bounds(segment_points) + + # Distance/Elevation + # Simple haversine sum for distance + from ..utils.geo import haversine_distance + dist = 0.0 + elev_gain = 0.0 + + for i in range(len(segment_points)-1): + p1 = segment_points[i] + p2 = segment_points[i+1] + dist += haversine_distance(p1[1], p1[0], p2[1], p2[0]) + + # Elevation gain (if ele data exists) + # Check if points have z-coord + if len(p1) > 2 and len(p2) > 2 and p1[2] is not None and p2[2] is not None: + diff = p2[2] - p1[2] + if diff > 0: + elev_gain += diff + + # Create Segment + segment = Segment( + name=payload.name, + description=payload.description, + distance=dist, + elevation_gain=elev_gain, + activity_type=activity.activity_type or 'cycling', + points=json.dumps(simplified_points), + bounds=json.dumps(bounds) + ) + db.add(segment) + db.commit() + db.refresh(segment) + + # Trigger matching for this activity immediately + try: + from ..services.segment_matcher import SegmentMatcher + matcher = SegmentMatcher(db) + # We need activity points - reuse points list + matcher.match_activity(activity, points) + except Exception as e: + # Log error but don't fail the request since segment is created + print(f"Error executing immediate match: {e}") + + return {"message": "Segment created", "id": segment.id} + +@router.get("/segments", response_model=List[SegmentResponse]) +def list_segments(db: Session = Depends(get_db)): + segments = db.query(Segment).all() + res = [] + for s in segments: + pts = json.loads(s.points) if isinstance(s.points, str) else s.points + res.append(SegmentResponse( + id=s.id, + name=s.name, + distance=s.distance, + elevation_gain=s.elevation_gain, + activity_type=s.activity_type, + points=pts + )) + return res + +@router.get("/activities/{activity_id}/efforts", response_model=List[SegmentEffortResponse]) +def get_activity_efforts(activity_id: int, db: Session = Depends(get_db)): + """Get all segment efforts for a specific activity.""" + from ..models.activity import Activity + # Check if activity exists + activity = db.query(Activity).filter(Activity.id == activity_id).first() + if not activity: + # Try garmin_activity_id string lookup if int fails? + # But payload says int. Let's support int from ID. + raise HTTPException(status_code=404, detail="Activity not found") + + efforts = db.query(SegmentEffort).filter(SegmentEffort.activity_id == activity.id).all() + + # Enrich with segment name + responses = [] + for effort in efforts: + responses.append(SegmentEffortResponse( + id=effort.id, + segment_id=effort.segment_id, + segment_name=effort.segment.name, + activity_id=effort.activity_id, + elapsed_time=effort.elapsed_time, + start_time=effort.start_time.isoformat() if effort.start_time else None, + end_time=effort.end_time.isoformat() if effort.end_time else None, + avg_hr=effort.avg_hr, + avg_power=effort.avg_power, + kom_rank=effort.kom_rank, + pr_rank=None, # Placeholder + is_kom=(effort.kom_rank == 1) if effort.kom_rank else False, + is_pr=False # Placeholder + )) + return responses + +@router.delete("/segments/{segment_id}") +def delete_segment(segment_id: int, db: Session = Depends(get_db)): + """Delete a segment and matching efforts.""" + segment = db.query(Segment).filter(Segment.id == segment_id).first() + if not segment: + raise HTTPException(status_code=404, detail="Segment not found") + + # Cascade delete efforts? Or model handles it? + # Usually need explicit delete if not set up in FK + db.query(SegmentEffort).filter(SegmentEffort.segment_id == segment.id).delete() + db.delete(segment) + db.commit() + + return {"message": "Segment deleted"} + +@router.get("/segments/{segment_id}/efforts", response_model=List[SegmentEffortResponse]) +def get_segment_leaderboard(segment_id: int, db: Session = Depends(get_db)): + """Get all efforts for a segment, ordered by time (Leaderboard).""" + segment = db.query(Segment).filter(Segment.id == segment_id).first() + if not segment: + raise HTTPException(status_code=404, detail="Segment not found") + + efforts = db.query(SegmentEffort).filter(SegmentEffort.segment_id == segment_id).order_by(SegmentEffort.elapsed_time.asc()).all() + + responses = [] + for effort in efforts: + responses.append(SegmentEffortResponse( + id=effort.id, + segment_id=effort.segment_id, + segment_name=segment.name, + activity_id=effort.activity_id, + elapsed_time=effort.elapsed_time, + start_time=effort.start_time.isoformat() if effort.start_time else None, + end_time=effort.end_time.isoformat() if effort.end_time else None, + avg_hr=effort.avg_hr, + avg_power=effort.avg_power, + kom_rank=effort.kom_rank, + pr_rank=None, + is_kom=(effort.kom_rank == 1) if effort.kom_rank else False, + is_pr=False + )) + return responses + +@router.post("/segments/scan") +def scan_segments(db: Session = Depends(get_db)): + """Trigger a background job to scan all activities for segment matches.""" + from ..services.job_manager import job_manager + from ..jobs.segment_matching_job import run_segment_matching_job + import threading + + job_id = job_manager.create_job("segment_match_all") + + # Run in background + thread = threading.Thread(target=job_manager.run_serialized, args=(job_id, run_segment_matching_job)) + thread.start() + + return {"message": "Segment scan started", "job_id": job_id} diff --git a/FitnessSync/backend/src/api/status.py b/FitnessSync/backend/src/api/status.py index cbc5737..d32017c 100644 --- a/FitnessSync/backend/src/api/status.py +++ b/FitnessSync/backend/src/api/status.py @@ -124,6 +124,12 @@ def resume_job(job_id: str): def cancel_job(job_id: str): if job_manager.request_cancel(job_id): return {"status": "cancelling", "message": f"Cancellation requested for job {job_id}"} + raise HTTPException(status_code=404, detail="Job not found or not active") + +@router.post("/jobs/{job_id}/force-kill") +def force_kill_job(job_id: str): + if job_manager.force_fail_job(job_id): + return {"status": "failed", "message": f"Job {job_id} forcefully killed"} raise HTTPException(status_code=404, detail="Job not found") import time diff --git a/FitnessSync/backend/src/jobs/__pycache__/segment_matching_job.cpython-311.pyc b/FitnessSync/backend/src/jobs/__pycache__/segment_matching_job.cpython-311.pyc new file mode 100644 index 0000000..4356c41 Binary files /dev/null and b/FitnessSync/backend/src/jobs/__pycache__/segment_matching_job.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/jobs/segment_matching_job.py b/FitnessSync/backend/src/jobs/segment_matching_job.py new file mode 100644 index 0000000..03afe52 --- /dev/null +++ b/FitnessSync/backend/src/jobs/segment_matching_job.py @@ -0,0 +1,70 @@ + +import logging +from sqlalchemy.orm import Session +from ..models.activity import Activity +from ..models.segment import Segment +from ..models.segment_effort import SegmentEffort +from ..services.segment_matcher import SegmentMatcher +from ..services.job_manager import job_manager +from ..services.postgresql_manager import PostgreSQLManager +from ..utils.config import config +from ..services.parsers import extract_points_from_file + +logger = logging.getLogger(__name__) + +def run_segment_matching_job(job_id: str): + """ + Job to scan all activities and match them against all segments. + """ + # 1. Setup DB + db_manager = PostgreSQLManager(config.DATABASE_URL) + + with db_manager.get_db_session() as db: + try: + # 2. Get all activities and segments + activities = db.query(Activity).all() + total_activities = len(activities) + + job_manager.update_job(job_id, progress=0, message=f"Starting scan of {total_activities} activities...") + + matcher = SegmentMatcher(db) + total_matches = 0 + + for i, activity in enumerate(activities): + if job_manager.should_cancel(job_id): + logger.info(f"Job {job_id} cancelled.") + return + + # Calculate progress + prog = int((i / total_activities) * 100) + job_manager.update_job(job_id, progress=prog, message=f"Scanning activity {i+1}/{total_activities} ({activity.id})") + + # Check for content + if not activity.file_content: + continue + + # Extract points - cache this? + # For now, re-extract. It's CPU intensive but safe. + try: + points = extract_points_from_file(activity.file_content, activity.file_type) + if points: + # Clear existing efforts for this activity to avoid duplicates? + # Or SegmentMatcher handles it? + # SegmentMatcher currently just ADDS. It doesn't check for existence. + # So we should delete existing efforts for this activity first. + db.query(SegmentEffort).filter(SegmentEffort.activity_id == activity.id).delete() + + efforts = matcher.match_activity(activity, points) + total_matches += len(efforts) + logger.info(f"Activity {activity.id}: {len(efforts)} matches") + + except Exception as e: + logger.error(f"Error processing activity {activity.id}: {e}") + # Continue to next + + db.commit() # Final commit + job_manager.complete_job(job_id, result={"total_matches": total_matches, "activities_scanned": total_activities}) + + except Exception as e: + logger.error(f"Job {job_id} failed: {e}") + job_manager.fail_job(job_id, str(e)) diff --git a/FitnessSync/backend/src/models/__init__.py b/FitnessSync/backend/src/models/__init__.py index 722be08..3176d28 100644 --- a/FitnessSync/backend/src/models/__init__.py +++ b/FitnessSync/backend/src/models/__init__.py @@ -12,4 +12,6 @@ from .sync_log import SyncLog from .activity_state import GarminActivityState from .health_state import HealthSyncState from .scheduled_job import ScheduledJob -from .bike_setup import BikeSetup \ No newline at end of file +from .bike_setup import BikeSetup +from .segment import Segment +from .segment_effort import SegmentEffort \ No newline at end of file diff --git a/FitnessSync/backend/src/models/__pycache__/__init__.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/__init__.cpython-311.pyc index ee1a4dc..f006722 100644 Binary files a/FitnessSync/backend/src/models/__pycache__/__init__.cpython-311.pyc and b/FitnessSync/backend/src/models/__pycache__/__init__.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/models/__pycache__/__init__.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/__init__.cpython-313.pyc index 4fb45ed..036fe0c 100644 Binary files a/FitnessSync/backend/src/models/__pycache__/__init__.cpython-313.pyc and b/FitnessSync/backend/src/models/__pycache__/__init__.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/models/__pycache__/segment.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/segment.cpython-311.pyc new file mode 100644 index 0000000..f62aee7 Binary files /dev/null and b/FitnessSync/backend/src/models/__pycache__/segment.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/models/__pycache__/segment.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/segment.cpython-313.pyc new file mode 100644 index 0000000..ee58add Binary files /dev/null and b/FitnessSync/backend/src/models/__pycache__/segment.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/models/__pycache__/segment_effort.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/segment_effort.cpython-311.pyc new file mode 100644 index 0000000..9b832e0 Binary files /dev/null and b/FitnessSync/backend/src/models/__pycache__/segment_effort.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/models/__pycache__/segment_effort.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/segment_effort.cpython-313.pyc new file mode 100644 index 0000000..543c93c Binary files /dev/null and b/FitnessSync/backend/src/models/__pycache__/segment_effort.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/models/segment.py b/FitnessSync/backend/src/models/segment.py new file mode 100644 index 0000000..4e603d1 --- /dev/null +++ b/FitnessSync/backend/src/models/segment.py @@ -0,0 +1,23 @@ +from sqlalchemy import Column, Integer, String, Float, Text, DateTime, JSON +from sqlalchemy.sql import func +from ..models import Base + +class Segment(Base): + __tablename__ = "segments" + + id = Column(Integer, primary_key=True, index=True) + name = Column(String, nullable=False) + description = Column(String, nullable=True) + distance = Column(Float, nullable=False) # in meters + avg_grade = Column(Float, nullable=True) # %. e.g. 5.5 + elevation_gain = Column(Float, nullable=True) # meters + + # Store simplified geometry as List[[lon, lat]] or similar + points = Column(JSON, nullable=False) + + # Bounding box for fast filtering: [min_lat, min_lon, max_lat, max_lon] + bounds = Column(JSON, nullable=False) + + activity_type = Column(String, nullable=False) # 'cycling', 'running' + + created_at = Column(DateTime(timezone=True), server_default=func.now()) diff --git a/FitnessSync/backend/src/models/segment_effort.py b/FitnessSync/backend/src/models/segment_effort.py new file mode 100644 index 0000000..32562d9 --- /dev/null +++ b/FitnessSync/backend/src/models/segment_effort.py @@ -0,0 +1,26 @@ +from sqlalchemy import Column, Integer, String, Float, DateTime, ForeignKey +from sqlalchemy.orm import relationship +from sqlalchemy.sql import func +from ..models import Base + +class SegmentEffort(Base): + __tablename__ = "segment_efforts" + + id = Column(Integer, primary_key=True, index=True) + segment_id = Column(Integer, ForeignKey("segments.id"), nullable=False) + activity_id = Column(Integer, ForeignKey("activities.id"), nullable=False) + + elapsed_time = Column(Integer, nullable=False) # seconds + start_time = Column(DateTime, nullable=False) # Absolute start time of the effort + end_time = Column(DateTime, nullable=False) + + avg_power = Column(Integer, nullable=True) + avg_hr = Column(Integer, nullable=True) + + # Potential for ranking (1 = KOM/PR, etc.) - calculated dynamically or stored + kom_rank = Column(Integer, nullable=True) + + created_at = Column(DateTime(timezone=True), server_default=func.now()) + + segment = relationship("Segment") + activity = relationship("Activity") diff --git a/FitnessSync/backend/src/routers/__pycache__/web.cpython-311.pyc b/FitnessSync/backend/src/routers/__pycache__/web.cpython-311.pyc index 12d223e..bb8d378 100644 Binary files a/FitnessSync/backend/src/routers/__pycache__/web.cpython-311.pyc and b/FitnessSync/backend/src/routers/__pycache__/web.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/routers/web.py b/FitnessSync/backend/src/routers/web.py index adbc245..054eb41 100644 --- a/FitnessSync/backend/src/routers/web.py +++ b/FitnessSync/backend/src/routers/web.py @@ -12,6 +12,10 @@ async def read_root(request: Request): async def activities_page(request: Request): return templates.TemplateResponse("activities.html", {"request": request}) +@router.get("/segments") +async def segments_page(request: Request): + return templates.TemplateResponse("segments.html", {"request": request}) + @router.get("/setup") async def setup_page(request: Request): return templates.TemplateResponse("setup.html", {"request": request}) diff --git a/FitnessSync/backend/src/services/__pycache__/job_manager.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/job_manager.cpython-311.pyc index 492e064..f725221 100644 Binary files a/FitnessSync/backend/src/services/__pycache__/job_manager.cpython-311.pyc and b/FitnessSync/backend/src/services/__pycache__/job_manager.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/services/__pycache__/parsers.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/parsers.cpython-311.pyc new file mode 100644 index 0000000..c53514a Binary files /dev/null and b/FitnessSync/backend/src/services/__pycache__/parsers.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/services/__pycache__/parsers.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/parsers.cpython-313.pyc new file mode 100644 index 0000000..a265331 Binary files /dev/null and b/FitnessSync/backend/src/services/__pycache__/parsers.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/services/__pycache__/scheduler.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/scheduler.cpython-311.pyc index 3f5aa05..12e0e81 100644 Binary files a/FitnessSync/backend/src/services/__pycache__/scheduler.cpython-311.pyc and b/FitnessSync/backend/src/services/__pycache__/scheduler.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/services/__pycache__/segment_matcher.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/segment_matcher.cpython-311.pyc new file mode 100644 index 0000000..2f2ee49 Binary files /dev/null and b/FitnessSync/backend/src/services/__pycache__/segment_matcher.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/services/__pycache__/segment_matcher.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/segment_matcher.cpython-313.pyc new file mode 100644 index 0000000..9a6ccec Binary files /dev/null and b/FitnessSync/backend/src/services/__pycache__/segment_matcher.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/services/garmin/__pycache__/client.cpython-311.pyc b/FitnessSync/backend/src/services/garmin/__pycache__/client.cpython-311.pyc index 86a5981..ec4a4a4 100644 Binary files a/FitnessSync/backend/src/services/garmin/__pycache__/client.cpython-311.pyc and b/FitnessSync/backend/src/services/garmin/__pycache__/client.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/services/garmin/__pycache__/client.cpython-313.pyc b/FitnessSync/backend/src/services/garmin/__pycache__/client.cpython-313.pyc index 6742d58..a745568 100644 Binary files a/FitnessSync/backend/src/services/garmin/__pycache__/client.cpython-313.pyc and b/FitnessSync/backend/src/services/garmin/__pycache__/client.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/services/garmin/client.py b/FitnessSync/backend/src/services/garmin/client.py index b10711e..0dbf011 100644 --- a/FitnessSync/backend/src/services/garmin/client.py +++ b/FitnessSync/backend/src/services/garmin/client.py @@ -22,6 +22,25 @@ class GarminClient(AuthMixin, DataMixin): if is_china: garth.configure(domain="garmin.cn") + + # [TIMEOUT FIX] Inject default timeout for all requests + # GarminConnect uses self.client.garth.sess for requests + # We wrap the request method to ensure a timeout is always present + original_request = self.client.garth.sess.request + + def request_with_timeout(method, url, *args, **kwargs): + if 'timeout' not in kwargs: + kwargs['timeout'] = 30 # Default 30s timeout + try: + return original_request(method, url, *args, **kwargs) + except Exception as e: + # Log actual timeout for debugging + if "timeout" in str(e).lower(): + logger.warning(f"Garmin API Timeout for {method} {url}") + raise e + + self.client.garth.sess.request = request_with_timeout + if username and password: logger.info(f"GarminClient initialized for user: {username}") diff --git a/FitnessSync/backend/src/services/job_manager.py b/FitnessSync/backend/src/services/job_manager.py index e7914ef..94fd42a 100644 --- a/FitnessSync/backend/src/services/job_manager.py +++ b/FitnessSync/backend/src/services/job_manager.py @@ -220,4 +220,24 @@ class JobManager: job.end_time = datetime.now() db.commit() + def force_fail_job(self, job_id: str): + """ + Forcefully mark a job as failed in the database. + This does not guarantee the underlying thread stops immediately, + but it releases the UI state. + """ + with self._get_db() as db: + job = db.query(Job).filter(Job.id == job_id).first() + if job: + # We update status regardless of current state if user wants to force it + prev_status = job.status + job.status = "failed" + job.message = f"Forcefully killed by user (was {prev_status})" + job.end_time = datetime.now() + job.cancel_requested = True # Hint to thread if it's still alive + db.commit() + logger.warning(f"Job {job_id} was forcefully killed by user.") + return True + return False + job_manager = JobManager() diff --git a/FitnessSync/backend/src/services/parsers.py b/FitnessSync/backend/src/services/parsers.py new file mode 100644 index 0000000..d864a45 --- /dev/null +++ b/FitnessSync/backend/src/services/parsers.py @@ -0,0 +1,148 @@ +import io +import fitdecode +import xml.etree.ElementTree as ET +import logging +from typing import List, Dict, Any, Optional +from datetime import datetime + +logger = logging.getLogger(__name__) + +def extract_activity_data(file_content: bytes, file_type: str) -> Dict[str, List[Any]]: + """ + Extracts all relevant streams: points (lat, lon, ele), timestamps, hr, power. + Returns: { + 'points': [[lon, lat, ele], ...], + 'timestamps': [datetime, ...], + 'heart_rate': [int, ...], + 'power': [int, ...] + } + """ + if file_type == 'fit': + return _extract_data_from_fit(file_content) + elif file_type == 'tcx': + return _extract_data_from_tcx(file_content) + return {'points': [], 'timestamps': [], 'heart_rate': [], 'power': []} + +def extract_points_from_file(file_content: bytes, file_type: str) -> List[List[float]]: + # Wrapper for backward compatibility + data = extract_activity_data(file_content, file_type) + return data['points'] + +def extract_timestamps_from_file(file_content: bytes, file_type: str) -> List[Optional[datetime]]: + # Wrapper for backward compatibility + data = extract_activity_data(file_content, file_type) + return data['timestamps'] + +def _extract_data_from_fit(file_content: bytes) -> Dict[str, List[Any]]: + data = {'points': [], 'timestamps': [], 'heart_rate': [], 'power': []} + try: + with io.BytesIO(file_content) as f: + with fitdecode.FitReader(f) as fit: + for frame in fit: + if frame.frame_type == fitdecode.FIT_FRAME_DATA and frame.name == 'record': + # We only collect data if position is valid, to keep streams aligned with points? + # Or should we collect everything and align by index? + # Usually points extraction filtered by lat/lon. If we want aligned arrays, we must apply same filter. + + if frame.has_field('position_lat') and frame.has_field('position_long'): + lat_sc = frame.get_value('position_lat') + lon_sc = frame.get_value('position_long') + + if lat_sc is not None and lon_sc is not None: + lat = lat_sc * (180.0 / 2**31) + lon = lon_sc * (180.0 / 2**31) + + ele = None + if frame.has_field('enhanced_altitude'): + ele = frame.get_value('enhanced_altitude') + elif frame.has_field('altitude'): + ele = frame.get_value('altitude') + + data['points'].append([lon, lat, ele] if ele is not None else [lon, lat]) + + # Timestamps + ts = frame.get_value('timestamp') if frame.has_field('timestamp') else None + data['timestamps'].append(ts) + + # HR + hr = frame.get_value('heart_rate') if frame.has_field('heart_rate') else None + data['heart_rate'].append(hr) + + # Power + pwr = frame.get_value('power') if frame.has_field('power') else None + data['power'].append(pwr) + + except Exception as e: + logger.error(f"Error parsing FIT file: {e}") + return data + +def _extract_points_from_fit(file_content: bytes) -> List[List[float]]: + # Deprecated internal use, redirected + return _extract_data_from_fit(file_content)['points'] + +def _extract_data_from_tcx(file_content: bytes) -> Dict[str, List[Any]]: + data = {'points': [], 'timestamps': [], 'heart_rate': [], 'power': []} + try: + root = ET.fromstring(file_content) + ns = {'ns': 'http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2'} + # TCX namespaces can be tricky. Using simple tag checks for valid coords. + + for trkpt in root.iter(): + if trkpt.tag.endswith('Trackpoint'): + lat = None + lon = None + ele = None + ts = None + hr = None + pwr = None + + for child in trkpt.iter(): + if child.tag.endswith('LatitudeDegrees'): + try: lat = float(child.text) + except: pass + elif child.tag.endswith('LongitudeDegrees'): + try: lon = float(child.text) + except: pass + elif child.tag.endswith('AltitudeMeters'): + try: ele = float(child.text) + except: pass + elif child.tag.endswith('Time'): + try: + # ISO format + ts = datetime.fromisoformat(child.text.replace('Z', '+00:00')) + except: pass + elif child.tag.endswith('HeartRateBpm'): + for val in child: + if val.tag.endswith('Value'): + try: hr = int(val.text) + except: pass + elif child.tag.endswith('Watts'): # Extension? + try: pwr = int(child.text) + except: pass + + # TCX: Watts often in Extensions/TPX + if pwr is None: + for ext in trkpt.iter(): + if ext.tag.endswith('Watts'): + try: pwr = int(ext.text) + except: pass + + if lat is not None and lon is not None: + data['points'].append([lon, lat, ele] if ele is not None else [lon, lat]) + data['timestamps'].append(ts) + data['heart_rate'].append(hr) + data['power'].append(pwr) + + except Exception as e: + logger.error(f"Error parsing TCX file: {e}") + return data + +def _extract_points_from_tcx(file_content: bytes) -> List[List[float]]: + return _extract_data_from_tcx(file_content)['points'] + +def extract_timestamps_from_file(file_content: bytes, file_type: str) -> List[Optional[datetime]]: + if file_type == 'fit': + return _extract_timestamps_from_fit(file_content) + return [] + + diff --git a/FitnessSync/backend/src/services/scheduler.py b/FitnessSync/backend/src/services/scheduler.py index fd7eec3..4705d1f 100644 --- a/FitnessSync/backend/src/services/scheduler.py +++ b/FitnessSync/backend/src/services/scheduler.py @@ -157,5 +157,17 @@ class SchedulerService: job_record.next_run = datetime.now() + timedelta(minutes=job_record.interval_minutes) # session commit happens in caller loop + def trigger_job(self, job_id: int) -> bool: + """Manually trigger a scheduled job immediately.""" + with self.db_manager.get_db_session() as session: + job = session.query(ScheduledJob).filter(ScheduledJob.id == job_id).first() + if not job: + return False + + logger.info(f"Manually triggering job {job_id}") + self._execute_job(session, job) + session.commit() + return True + # Global instance scheduler = SchedulerService() diff --git a/FitnessSync/backend/src/services/segment_matcher.py b/FitnessSync/backend/src/services/segment_matcher.py new file mode 100644 index 0000000..06e5916 --- /dev/null +++ b/FitnessSync/backend/src/services/segment_matcher.py @@ -0,0 +1,282 @@ +from typing import List, Optional, Tuple +from datetime import timedelta +import logging +from sqlalchemy.orm import Session +from sqlalchemy import text # correct import +import json + +from ..models.activity import Activity +from ..models.segment import Segment +from ..models.segment_effort import SegmentEffort +from ..utils.geo import haversine_distance, calculate_bounds, perpendicular_distance +from ..services.parsers import extract_timestamps_from_file + +logger = logging.getLogger(__name__) + +class SegmentMatcher: + def __init__(self, db: Session): + self.db = db + + def match_activity(self, activity: Activity, points: List[List[float]]) -> List[SegmentEffort]: + """ + Check if the activity matches any known segments. + points: List of [lon, lat] + """ + if not points or len(points) < 2: + return [] + + # 1. Calculate bounds of activity for fast filtering + act_bounds = calculate_bounds(points) # [min_lat, min_lon, max_lat, max_lon] + + # 2. Query potential segments from DB (simple bbox overlap) + # We'll just fetch all segments for now or use a generous overlap check + # if we had PostGIS. Since we use JSON bounds, we can't easily query overlap in SQL + # without special extensions. We'll fetch all and filter in Python. + # Ideally, we'd use PostGIS geometry types. + + segments = self.db.query(Segment).filter( + Segment.activity_type == activity.activity_type + ).all() + + matched_efforts = [] + + print(f"DEBUG SEGMENT MATCH: Checking {len(segments)} segments against Activity {activity.id} Bounds={act_bounds}") + + for segment in segments: + # print(f"DEBUG: Checking Segment {segment.name} Bounds={segment.bounds}") + seg_bounds = json.loads(segment.bounds) if isinstance(segment.bounds, str) else segment.bounds + + if self._check_bounds_overlap(act_bounds, seg_bounds): + try: + seg_points = json.loads(segment.points) if isinstance(segment.points, str) else segment.points + print(f"DEBUG: Overlap OK. Matching {segment.name}...") + indices = self._match_segment(segment, seg_points, activity, points) + if indices: + start_idx, end_idx = indices + print(f"DEBUG: MATCH FOUND for {segment.name}! Indices {start_idx}-{end_idx}") + effort = self._create_effort(segment, activity, start_idx, end_idx) + if effort: + matched_efforts.append(effort) + except Exception as e: + logger.error(f"Error matching segment {segment.id}: {e}") + + if matched_efforts: + logger.info(f"Activity {activity.id} matched {len(matched_efforts)} segments.") + print(f"DEBUG SEGMENT MATCH: Matched {len(matched_efforts)} segments for Activity {activity.id}. Saving...") + self.db.add_all(matched_efforts) + self.db.commit() + else: + print(f"DEBUG SEGMENT MATCH: No segments matched for Activity {activity.id}") + + return matched_efforts + + def _create_effort(self, segment, activity, start_idx, end_idx) -> Optional[SegmentEffort]: + # Extract timestamps + # Need to re-parse file to get timestamps? Or cached? + # We can implement a cached reader later. + if not activity.file_content: + return None + + from ..services.parsers import extract_activity_data + + if not activity.file_content: + return None + + data = extract_activity_data(activity.file_content, activity.file_type) + timestamps = data['timestamps'] + + if not timestamps or len(timestamps) <= end_idx: + logger.warning("Could not extract enough timestamps for segment match.") + return None + + start_ts = timestamps[start_idx] + end_ts = timestamps[end_idx] + + if not start_ts or not end_ts: + return None + + elapsed = (end_ts - start_ts).total_seconds() + + # Calculate Averages + avg_hr = None + avg_pwr = None + + # Slice lists + eff_hr = data['heart_rate'][start_idx : end_idx+1] + eff_pwr = data['power'][start_idx : end_idx+1] + + # Filter None + valid_hr = [x for x in eff_hr if x is not None] + valid_pwr = [x for x in eff_pwr if x is not None] + + if valid_hr: + avg_hr = int(sum(valid_hr) / len(valid_hr)) + if valid_pwr: + avg_pwr = int(sum(valid_pwr) / len(valid_pwr)) + + return SegmentEffort( + segment_id=segment.id, + activity_id=activity.id, + elapsed_time=elapsed, + start_time=start_ts, + end_time=end_ts, + avg_hr=avg_hr, + avg_power=avg_pwr, + kom_rank=None # Placeholder + ) + + def _check_bounds_overlap(self, b1: List[float], b2: List[float]) -> bool: + # b: [min_lat, min_lon, max_lat, max_lon] + # Overlap if not (b1_max < b2_min or b1_min > b2_max) for both dims + if not b1 or not b2: return False + + lat_separate = b1[2] < b2[0] or b1[0] > b2[2] + lon_separate = b1[3] < b2[1] or b1[1] > b2[3] + + return not (lat_separate or lon_separate) + + def _match_segment(self, segment: Segment, seg_points: List[List[float]], activity: Activity, act_points: List[List[float]]) -> Optional[Tuple[int, int]]: + """ + Core matching logic. + """ + if not seg_points or len(seg_points) < 2: return None + + # Parameters + ENTRY_RADIUS = 25.0 # meters + CORRIDOR_RADIUS = 35.0 # meters + # COMPLETION_THRESHOLD = 0.95 # meters covered? Or just endpoint reached? + # User specified: "reaches end point and covered at least 95%" + + start_node = seg_points[0] # [lon, lat] + end_node = seg_points[-1] + + # 1. Find potential start indices in activity + start_candidates = [] + for i, p in enumerate(act_points): + dist = haversine_distance(p[1], p[0], start_node[1], start_node[0]) + if dist <= ENTRY_RADIUS: + start_candidates.append(i) + + if not start_candidates: + return None + + # For each candidate, try to trace the segment + for start_idx in start_candidates: + # OPTION: We could just look for the end point later in the stream + # But "Continuous Tracking" is required. + + # Simplified approach: + # 1. Start match. + # 2. Advance through segment points as we advance through activity points. + # 3. If we deviate > CORRIDOR, fail. + # 4. If we reach end node within radius, success. + + # Let's try a robust "closest point" progression. + + act_idx = start_idx + seg_idx = 0 + + # Track start time + # We need times for points. Activity points passed in match_activity usually don't have time attached? + # Wait, Activity raw_points usually don't have time. Streams do. + # Activity "file_content" produces streams. + # But calculating "elapsed_time" needs timestamps. + # The current `act_points` arg is just geometry. + # I need `match_activity` to accept streams or time-aware points. + # I will modify signature later. For now, assume I can get times. + # I will punt on exact time calc in this sub-function and re-fetch if match is geometric. + + # Simple geometric check first + is_match = True + last_act_match_idx = act_idx + + # Greedily match segment points + for sp in seg_points[1:]: + # Find sp in future act_points within corridor + found_next = False + # Search forward a bit? Or just entire rest? + # Optimization: Limit search window? + + # Look ahead in activity + # If we scan too far without getting close to 'sp', we might have deviated. + # But user might stop for coffee. + # Requirement: "Continuous Tracking: stay within 35m". + + # This implies we must verify EVERY activity point between Start and End is within 35m of the Segment Path. + # That's strict. + + # Let's pivot: + # 1. Find user reaching Start (confirmed). + # 2. Find user reaching End (subsequent index). + # 3. Check points in between are within bounds? + + pass + + # Re-reading requirements: + # "Entry Detection: Find the index... within 25m" + # "Continuous Tracking: Once entered, the user must stay within 35m of the Master Segment's path." + # "Completion: Reaches end point... 95% distance." + + # Proper implementation: + # Iterate activity points from start_idx. + # Project each point to segment path. If dist > 35m, break/fail. + # If dist(p, end_node) < 25m, we finished. + + current_seg_dist = 0.0 # Distance along segment + # We need to ensure we travel along the segment, not just stay in a small circle. + + # This logic is complex to implement robustly in one shot. + # I'll implement a simplified version: + # Find Start, Find End. Check path in between. + + # 1. Find Start + start_p = act_points[start_idx] + + # 2. Find End after Start + max_search = len(act_points) + end_candidate_idx = -1 + + # Track accumulated distance for this effort + effort_accum_dist = 0.0 + + for j in range(start_idx + 1, max_search): + p = act_points[j] + prev_p = act_points[j-1] + + # Accumulate distance + step_dist = haversine_distance(p[1], p[0], prev_p[1], prev_p[0]) + effort_accum_dist += step_dist + + d_end = haversine_distance(p[1], p[0], end_node[1], end_node[0]) + + # Check deviation + if self._min_dist_to_segment_path(p, seg_points) > CORRIDOR_RADIUS: + # Deviated + break # Stop searching this start candidate + + # Check for completion: + # 1. Within radius of end node + # 2. Covered at least 90% of segment distance (handles loops) + if d_end <= ENTRY_RADIUS: + # Ensure we aren't just matching the start of a loop immediately + if effort_accum_dist >= 0.8 * segment.distance: + # Found end with sufficient distance! + end_candidate_idx = j + break + + if end_candidate_idx != -1: + return (start_idx, end_candidate_idx) + + return None + + def _min_dist_to_segment_path(self, point: List[float], seg_points: List[List[float]]) -> float: + """ + Distance from point to polyline. + """ + min_d = float('inf') + for i in range(len(seg_points) - 1): + d = perpendicular_distance(point, seg_points[i], seg_points[i+1]) + if d < min_d: + min_d = d + return min_d + diff --git a/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-311.pyc b/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-311.pyc index f21d257..edb828d 100644 Binary files a/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-311.pyc and b/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/services/sync/activity.py b/FitnessSync/backend/src/services/sync/activity.py index bb737b7..2574a51 100644 --- a/FitnessSync/backend/src/services/sync/activity.py +++ b/FitnessSync/backend/src/services/sync/activity.py @@ -261,6 +261,20 @@ class GarminActivitySync: self.logger.warning(f"Failed to redownload {activity_id}") return False + self.db_session.flush() # Commit file changes so it's fresh + + # TRIGGER SEGMENT MATCHING + try: + from ..segment_matcher import SegmentMatcher + from ...services.parsers import extract_points_from_file + + points = extract_points_from_file(activity.file_content, activity.file_type) + if points and len(points) > 10: + matcher = SegmentMatcher(self.db_session) + matcher.match_activity(activity, points) + except Exception as sm_e: + self.logger.error(f"Segment matching failed for {activity_id}: {sm_e}") + self.db_session.commit() return True diff --git a/FitnessSync/backend/src/utils/__pycache__/geo.cpython-311.pyc b/FitnessSync/backend/src/utils/__pycache__/geo.cpython-311.pyc new file mode 100644 index 0000000..57e9c24 Binary files /dev/null and b/FitnessSync/backend/src/utils/__pycache__/geo.cpython-311.pyc differ diff --git a/FitnessSync/backend/src/utils/__pycache__/geo.cpython-313.pyc b/FitnessSync/backend/src/utils/__pycache__/geo.cpython-313.pyc new file mode 100644 index 0000000..5ded0de Binary files /dev/null and b/FitnessSync/backend/src/utils/__pycache__/geo.cpython-313.pyc differ diff --git a/FitnessSync/backend/src/utils/geo.py b/FitnessSync/backend/src/utils/geo.py new file mode 100644 index 0000000..14b7c97 --- /dev/null +++ b/FitnessSync/backend/src/utils/geo.py @@ -0,0 +1,98 @@ +import math +from typing import List, Tuple + +def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> float: + """ + Calculate the great circle distance between two points + on the earth (specified in decimal degrees) + """ + # Convert decimal degrees to radians + lon1, lat1, lon2, lat2 = map(math.radians, [lon1, lat2, lon2, lat2]) + + # Haversine formula + dlon = lon2 - lon1 + dlat = lat2 - lat1 + a = math.sin(dlat/2)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2)**2 + c = 2 * math.asin(math.sqrt(a)) + r = 6371000 # Radius of earth in meters + return c * r + +def perpendicular_distance(point: List[float], line_start: List[float], line_end: List[float]) -> float: + """ + Calculate perpendicular distance from point to line segment. + Points are [lon, lat]. + Approximation using Euclidean distance on coordinates - sufficient for small segments? + Actually for geodesic RDP, we should map to meters or use cross track distance. + For simplicity and speed on GPS traces, projecting to flat plane (Web Mercator-ish) or + just using degrees (if not near poles) is common fast RDP. + Given lat/lon, degrees are different lengths. + Let's convert to crude meters x/y relative to start for RDP. + """ + # Convert to local meters relative to line_start + # lat_m ~ 111139 + # lon_m ~ 111139 * cos(lat) + + ref_lat = line_start[1] + lat_scale = 111139 + lon_scale = 111139 * math.cos(math.radians(ref_lat)) + + def to_xy(p): + return [(p[0] - line_start[0]) * lon_scale, (p[1] - line_start[1]) * lat_scale] + + p = to_xy(point) + a = to_xy(line_start) # 0,0 + b = to_xy(line_end) + + # Distance from point p to line segment ab + # If a==b, dist(p, a) + l2 = (b[0]-a[0])**2 + (b[1]-a[1])**2 + if l2 == 0: return math.sqrt(p[0]**2 + p[1]**2) + + # Projection t of p onto line ab + t = ((p[0]-a[0])*(b[0]-a[0]) + (p[1]-a[1])*(b[1]-a[1])) / l2 + t = max(0, min(1, t)) + + proj = [a[0] + t * (b[0]-a[0]), a[1] + t * (b[1]-a[1])] + return math.sqrt((p[0]-proj[0])**2 + (p[1]-proj[1])**2) + +def ramer_douglas_peucker(points: List[List[float]], epsilon: float) -> List[List[float]]: + """ + Simplify a list of [lon, lat] points using RDP algorithm. + epsilon is in meters. + """ + if len(points) < 3: + return points + + dmax = 0.0 + index = 0 + end = len(points) - 1 + + # Find the point with the maximum distance + for i in range(1, end): + d = perpendicular_distance(points[i], points[0], points[end]) + if d > dmax: + index = i + dmax = d + + # If max distance is greater than epsilon, recursively simplify + if dmax > epsilon: + # Recursive call + rec_results1 = ramer_douglas_peucker(points[:index+1], epsilon) + rec_results2 = ramer_douglas_peucker(points[index:], epsilon) + + # Build the result list + return rec_results1[:-1] + rec_results2 + else: + return [points[0], points[end]] + +def calculate_bounds(points: List[List[float]]) -> List[float]: + """ + Return [min_lat, min_lon, max_lat, max_lon] + points are [lon, lat] + """ + if not points: + return [0, 0, 0, 0] + + lons = [p[0] for p in points] + lats = [p[1] for p in points] + return [min(lats), min(lons), max(lats), max(lons)] diff --git a/FitnessSync/backend/templates/activity_view.html b/FitnessSync/backend/templates/activity_view.html index 94c58d0..793c501 100644 --- a/FitnessSync/backend/templates/activity_view.html +++ b/FitnessSync/backend/templates/activity_view.html @@ -56,6 +56,9 @@ + @@ -113,6 +116,35 @@

Detailed Metrics

+ +
+
+
+
Matched Segments
+ +
+
+
+ + + + + + + + + + + + + + +
SegmentTimeAwardsRank
Loading segments...
+
+
+
+
+
@@ -417,6 +449,7 @@ const res = await fetch(`/api/activities/${activityId}/details`); if (!res.ok) throw new Error("Failed to load details"); const data = await res.json(); + window.currentDbId = data.id; // Store for segment creation // Header document.getElementById('act-name').textContent = data.activity_name || 'Untitled Activity'; @@ -460,6 +493,11 @@ document.getElementById('m-bike-info').innerHTML = txt; } + // Load Efforts + if (window.currentDbId) { + loadEfforts(window.currentDbId); + } + } catch (e) { console.error(e); showToast("Error", "Failed to load activity details", "error"); @@ -472,6 +510,10 @@ if (res.ok) { const geojson = await res.json(); if (geojson.features && geojson.features.length > 0 && geojson.features[0].geometry.coordinates.length > 0) { + // GeoJSON coords are [lon, lat]. Leaflet wants [lat, lon] + const coords = geojson.features[0].geometry.coordinates; + trackPoints = coords.map(p => [p[1], p[0]]); + const layer = L.geoJSON(geojson, { style: { color: 'red', weight: 4, opacity: 0.7 } }).addTo(map); @@ -492,5 +534,190 @@ } function formatDuration(s) { if (!s) return '-'; const h = Math.floor(s / 3600), m = Math.floor((s % 3600) / 60), sec = s % 60; return `${h}h ${m}m ${sec}s`; } + + // Segment Creation Logic + let segmentMode = false; + let startMarker = null; + let endMarker = null; + let trackPoints = []; // List of [lat, lon] from GeoJSON + let startIndex = 0; + let endIndex = 0; + + function toggleSegmentMode() { + segmentMode = !segmentMode; + const btn = document.getElementById('create-segment-btn'); + if (segmentMode) { + btn.classList.add('active'); + btn.innerHTML = ' Save Segment'; + btn.onclick = saveSegment; + initSegmentMarkers(); + } else { + // Cancelled + btn.classList.remove('active'); + btn.innerHTML = ' Create Segment'; + btn.onclick = toggleSegmentMode; + removeSegmentMarkers(); + } + } + + function removeSegmentMarkers() { + if (startMarker) map.removeLayer(startMarker); + if (endMarker) map.removeLayer(endMarker); + startMarker = null; + endMarker = null; + } + + function initSegmentMarkers() { + if (trackPoints.length < 2) { + alert("Not enough points to create a segment."); + toggleSegmentMode(); + return; + } + + // Default positions: 20% and 80% + startIndex = Math.floor(trackPoints.length * 0.2); + endIndex = Math.floor(trackPoints.length * 0.8); + + const startIcon = L.divIcon({ className: 'bg-success rounded-circle border border-white', iconSize: [12, 12] }); + const endIcon = L.divIcon({ className: 'bg-danger rounded-circle border border-white', iconSize: [12, 12] }); + + startMarker = L.marker(trackPoints[startIndex], { draggable: true, icon: startIcon }).addTo(map); + endMarker = L.marker(trackPoints[endIndex], { draggable: true, icon: endIcon }).addTo(map); + + // Snap logic + function setupDrag(marker, isStart) { + marker.on('drag', function (e) { + const ll = e.latlng; + let closestDist = Infinity; + let closestIdx = -1; + // Simple snap for visual feedback during drag + for (let i = 0; i < trackPoints.length; i++) { + const d = map.distance(ll, trackPoints[i]); + if (d < closestDist) { + closestDist = d; + closestIdx = i; + } + } + // Optional: visual snap? Leaflet handles drag msg. + }); + + marker.on('dragend', function (e) { + const ll = e.target.getLatLng(); + let closestDist = Infinity; + let closestIdx = -1; + + // constrain search + let searchStart = 0; + let searchEnd = trackPoints.length; + + if (isStart) { + // Start marker: can search 0 to trackPoints.length + // Heuristic: If we are modifying Start, look for points < endIndex (if valid). + if (endIndex > 0) searchEnd = endIndex; + } else { + // End marker + if (startIndex >= 0) searchStart = startIndex; + } + + // "Stickiness" logic + const currentIndex = isStart ? startIndex : endIndex; + const indexPenalty = 0.0001; + + for (let i = searchStart; i < searchEnd; i++) { + const d_spatial = map.distance(ll, trackPoints[i]); + const d_index = Math.abs(i - currentIndex); + + const score = d_spatial + (d_index * indexPenalty); + + if (score < closestDist) { + closestDist = score; + closestIdx = i; + } + } + + if (closestIdx !== -1) { + marker.setLatLng(trackPoints[closestIdx]); + if (isStart) startIndex = closestIdx; + else endIndex = closestIdx; + } + }); + } + + setupDrag(startMarker, true); + setupDrag(endMarker, false); + } + + async function saveSegment() { + if (startIndex >= endIndex) { + alert("Start point must be before End point."); + return; + } + + const name = prompt("Enter Segment Name:"); + if (!name) return; + + try { + const res = await fetch('/api/segments/create', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + name: name, + activity_id: window.currentDbId, + start_index: startIndex, + end_index: endIndex + }) + }); + + if (res.ok) { + alert("Segment created!"); + toggleSegmentMode(); // Reset UI + } else { + const err = await res.json(); + alert("Error: " + err.detail); + } + // Load Segments + loadEfforts(window.currentDbId); + + } catch (e) { + console.error(e); + alert("Error loading activity: " + e.message); + } + } + + async function loadEfforts(dbId) { + const tbody = document.querySelector('#efforts-table tbody'); + try { + const res = await fetch(`/api/activities/${dbId}/efforts`); + if (res.ok) { + const efforts = await res.json(); + tbody.innerHTML = ''; + if (efforts.length === 0) { + tbody.innerHTML = 'No segments matched.'; + return; + } + + efforts.forEach(eff => { + const tr = document.createElement('tr'); + let awards = ''; + if (eff.is_kom) awards += ' CR'; + if (eff.is_pr) awards += ' PR'; + + tr.innerHTML = ` + ${eff.segment_name} + ${formatDuration(eff.elapsed_time)} + ${awards} + ${eff.kom_rank ? '#' + eff.kom_rank : '-'} + `; + tbody.appendChild(tr); + }); + } else { + tbody.innerHTML = 'Failed to load segments.'; + } + } catch (e) { + tbody.innerHTML = 'Error loading segments.'; + } + } + + // Leaflet Map Init {% endblock %} \ No newline at end of file diff --git a/FitnessSync/backend/templates/base.html b/FitnessSync/backend/templates/base.html index 089215d..7d0ac8a 100644 --- a/FitnessSync/backend/templates/base.html +++ b/FitnessSync/backend/templates/base.html @@ -72,6 +72,9 @@ Activities +