added segments
This commit is contained in:
Binary file not shown.
Binary file not shown.
BIN
FitnessSync/backend/src/api/__pycache__/segments.cpython-311.pyc
Normal file
BIN
FitnessSync/backend/src/api/__pycache__/segments.cpython-311.pyc
Normal file
Binary file not shown.
Binary file not shown.
@@ -11,10 +11,8 @@ from ..utils.config import config
|
||||
# New Sync Imports
|
||||
from ..services.job_manager import job_manager
|
||||
from ..models.activity_state import GarminActivityState
|
||||
import fitdecode
|
||||
import io
|
||||
import xml.etree.ElementTree as ET
|
||||
from datetime import datetime
|
||||
from ..services.parsers import extract_points_from_file
|
||||
|
||||
|
||||
router = APIRouter()
|
||||
@@ -480,64 +478,7 @@ async def get_sync_status_summary(db: Session = Depends(get_db)):
|
||||
return {}
|
||||
|
||||
|
||||
def _extract_points_from_fit(file_content: bytes) -> List[List[float]]:
|
||||
"""
|
||||
Extract [lon, lat] points from a FIT file content.
|
||||
Returns a list of [lon, lat].
|
||||
"""
|
||||
points = []
|
||||
try:
|
||||
with io.BytesIO(file_content) as f:
|
||||
with fitdecode.FitReader(f) as fit:
|
||||
for frame in fit:
|
||||
if frame.frame_type == fitdecode.FIT_FRAME_DATA and frame.name == 'record':
|
||||
# Check for position_lat and position_long
|
||||
# Garmin stores lat/long as semicircles. Convert to degrees: semicircle * (180 / 2^31)
|
||||
if frame.has_field('position_lat') and frame.has_field('position_long'):
|
||||
lat_sc = frame.get_value('position_lat')
|
||||
lon_sc = frame.get_value('position_long')
|
||||
|
||||
if lat_sc is not None and lon_sc is not None:
|
||||
lat = lat_sc * (180.0 / 2**31)
|
||||
lon = lon_sc * (180.0 / 2**31)
|
||||
points.append([lon, lat])
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing FIT file: {e}")
|
||||
# Return what we have or empty
|
||||
return points
|
||||
|
||||
def _extract_points_from_tcx(file_content: bytes) -> List[List[float]]:
|
||||
"""
|
||||
Extract [lon, lat] points from a TCX file content.
|
||||
"""
|
||||
points = []
|
||||
try:
|
||||
# TCX is XML
|
||||
# Namespace usually exists
|
||||
root = ET.fromstring(file_content)
|
||||
# Namespaces are annoying in ElementTree, usually {http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2}
|
||||
# We can just iterate and ignore namespace or handle it.
|
||||
# Let's try ignoring namespace by using local-name() in xpath if lxml, but this is stdlib ET.
|
||||
# Just strip namespace for simplicity
|
||||
|
||||
for trkpt in root.iter():
|
||||
if trkpt.tag.endswith('Trackpoint'):
|
||||
lat = None
|
||||
lon = None
|
||||
for child in trkpt.iter():
|
||||
if child.tag.endswith('LatitudeDegrees'):
|
||||
try: lat = float(child.text)
|
||||
except: pass
|
||||
elif child.tag.endswith('LongitudeDegrees'):
|
||||
try: lon = float(child.text)
|
||||
except: pass
|
||||
|
||||
if lat is not None and lon is not None:
|
||||
points.append([lon, lat])
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing TCX file: {e}")
|
||||
return points
|
||||
|
||||
@router.get("/activities/{activity_id}/geojson")
|
||||
async def get_activity_geojson(activity_id: str, db: Session = Depends(get_db)):
|
||||
@@ -550,14 +491,9 @@ async def get_activity_geojson(activity_id: str, db: Session = Depends(get_db)):
|
||||
raise HTTPException(status_code=404, detail="Activity or file content not found")
|
||||
|
||||
points = []
|
||||
if activity.file_type == 'fit':
|
||||
points = _extract_points_from_fit(activity.file_content)
|
||||
elif activity.file_type == 'tcx':
|
||||
points = _extract_points_from_tcx(activity.file_content)
|
||||
if activity.file_type in ['fit', 'tcx']:
|
||||
points = extract_points_from_file(activity.file_content, activity.file_type)
|
||||
else:
|
||||
# Try FIT or TCX anyway?
|
||||
# Default to FIT check headers?
|
||||
# For now just log warning
|
||||
logger.warning(f"Unsupported file type for map: {activity.file_type}")
|
||||
|
||||
if not points:
|
||||
|
||||
@@ -129,3 +129,11 @@ def delete_scheduled_job(job_id: int, db: Session = Depends(get_db)):
|
||||
db.delete(job)
|
||||
db.commit()
|
||||
return None
|
||||
|
||||
@router.post("/scheduling/jobs/{job_id}/run", status_code=200)
|
||||
def run_scheduled_job(job_id: int):
|
||||
"""Manually trigger a scheduled job."""
|
||||
from ..services.scheduler import scheduler
|
||||
if scheduler.trigger_job(job_id):
|
||||
return {"status": "triggered", "message": f"Job {job_id} triggered successfully"}
|
||||
raise HTTPException(status_code=404, detail="Job not found")
|
||||
|
||||
226
FitnessSync/backend/src/api/segments.py
Normal file
226
FitnessSync/backend/src/api/segments.py
Normal file
@@ -0,0 +1,226 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from typing import List, Optional
|
||||
from sqlalchemy.orm import Session
|
||||
from ..models.segment import Segment
|
||||
from ..models.segment_effort import SegmentEffort
|
||||
from ..services.postgresql_manager import PostgreSQLManager
|
||||
from ..utils.config import config
|
||||
from pydantic import BaseModel
|
||||
import json
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
def get_db():
|
||||
db_manager = PostgreSQLManager(config.DATABASE_URL)
|
||||
with db_manager.get_db_session() as session:
|
||||
yield session
|
||||
|
||||
class SegmentCreate(BaseModel):
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
activity_id: int
|
||||
start_index: int
|
||||
end_index: int
|
||||
|
||||
class SegmentEffortResponse(BaseModel):
|
||||
id: int
|
||||
segment_id: int
|
||||
segment_name: str
|
||||
activity_id: int
|
||||
elapsed_time: float
|
||||
start_time: Optional[str]
|
||||
end_time: Optional[str]
|
||||
avg_hr: Optional[int] = None
|
||||
avg_power: Optional[int] = None
|
||||
kom_rank: Optional[int]
|
||||
pr_rank: Optional[int]
|
||||
is_kom: bool
|
||||
is_pr: bool
|
||||
|
||||
|
||||
class SegmentResponse(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
distance: float
|
||||
elevation_gain: Optional[float]
|
||||
activity_type: str
|
||||
points: List[List[float]]
|
||||
|
||||
@router.post("/segments/create")
|
||||
def create_segment(payload: SegmentCreate, db: Session = Depends(get_db)):
|
||||
"""Create a new segment from an activity."""
|
||||
from ..models.activity import Activity
|
||||
from ..services.parsers import extract_points_from_file
|
||||
from ..utils.geo import ramer_douglas_peucker, calculate_bounds
|
||||
|
||||
activity = db.query(Activity).filter(Activity.id == payload.activity_id).first()
|
||||
if not activity:
|
||||
raise HTTPException(status_code=404, detail="Activity not found")
|
||||
|
||||
points = extract_points_from_file(activity.file_content, activity.file_type)
|
||||
|
||||
print(f"DEBUG CREATE SEGMENT: ID={activity.id} Name={payload.name} Start={payload.start_index} End={payload.end_index} TotalPoints={len(points)}")
|
||||
|
||||
if not points or len(points) <= payload.end_index:
|
||||
print(f"DEBUG ERROR: Invalid indices. Points len={len(points)}")
|
||||
raise HTTPException(status_code=400, detail="Invalid points or indices")
|
||||
|
||||
# Slice points
|
||||
segment_points = points[payload.start_index : payload.end_index + 1]
|
||||
|
||||
# Simplify (RDP) - epsilon ~10 meters?
|
||||
simplified_points = ramer_douglas_peucker(segment_points, epsilon=10.0)
|
||||
|
||||
# Calculate bounds
|
||||
bounds = calculate_bounds(segment_points)
|
||||
|
||||
# Distance/Elevation
|
||||
# Simple haversine sum for distance
|
||||
from ..utils.geo import haversine_distance
|
||||
dist = 0.0
|
||||
elev_gain = 0.0
|
||||
|
||||
for i in range(len(segment_points)-1):
|
||||
p1 = segment_points[i]
|
||||
p2 = segment_points[i+1]
|
||||
dist += haversine_distance(p1[1], p1[0], p2[1], p2[0])
|
||||
|
||||
# Elevation gain (if ele data exists)
|
||||
# Check if points have z-coord
|
||||
if len(p1) > 2 and len(p2) > 2 and p1[2] is not None and p2[2] is not None:
|
||||
diff = p2[2] - p1[2]
|
||||
if diff > 0:
|
||||
elev_gain += diff
|
||||
|
||||
# Create Segment
|
||||
segment = Segment(
|
||||
name=payload.name,
|
||||
description=payload.description,
|
||||
distance=dist,
|
||||
elevation_gain=elev_gain,
|
||||
activity_type=activity.activity_type or 'cycling',
|
||||
points=json.dumps(simplified_points),
|
||||
bounds=json.dumps(bounds)
|
||||
)
|
||||
db.add(segment)
|
||||
db.commit()
|
||||
db.refresh(segment)
|
||||
|
||||
# Trigger matching for this activity immediately
|
||||
try:
|
||||
from ..services.segment_matcher import SegmentMatcher
|
||||
matcher = SegmentMatcher(db)
|
||||
# We need activity points - reuse points list
|
||||
matcher.match_activity(activity, points)
|
||||
except Exception as e:
|
||||
# Log error but don't fail the request since segment is created
|
||||
print(f"Error executing immediate match: {e}")
|
||||
|
||||
return {"message": "Segment created", "id": segment.id}
|
||||
|
||||
@router.get("/segments", response_model=List[SegmentResponse])
|
||||
def list_segments(db: Session = Depends(get_db)):
|
||||
segments = db.query(Segment).all()
|
||||
res = []
|
||||
for s in segments:
|
||||
pts = json.loads(s.points) if isinstance(s.points, str) else s.points
|
||||
res.append(SegmentResponse(
|
||||
id=s.id,
|
||||
name=s.name,
|
||||
distance=s.distance,
|
||||
elevation_gain=s.elevation_gain,
|
||||
activity_type=s.activity_type,
|
||||
points=pts
|
||||
))
|
||||
return res
|
||||
|
||||
@router.get("/activities/{activity_id}/efforts", response_model=List[SegmentEffortResponse])
|
||||
def get_activity_efforts(activity_id: int, db: Session = Depends(get_db)):
|
||||
"""Get all segment efforts for a specific activity."""
|
||||
from ..models.activity import Activity
|
||||
# Check if activity exists
|
||||
activity = db.query(Activity).filter(Activity.id == activity_id).first()
|
||||
if not activity:
|
||||
# Try garmin_activity_id string lookup if int fails?
|
||||
# But payload says int. Let's support int from ID.
|
||||
raise HTTPException(status_code=404, detail="Activity not found")
|
||||
|
||||
efforts = db.query(SegmentEffort).filter(SegmentEffort.activity_id == activity.id).all()
|
||||
|
||||
# Enrich with segment name
|
||||
responses = []
|
||||
for effort in efforts:
|
||||
responses.append(SegmentEffortResponse(
|
||||
id=effort.id,
|
||||
segment_id=effort.segment_id,
|
||||
segment_name=effort.segment.name,
|
||||
activity_id=effort.activity_id,
|
||||
elapsed_time=effort.elapsed_time,
|
||||
start_time=effort.start_time.isoformat() if effort.start_time else None,
|
||||
end_time=effort.end_time.isoformat() if effort.end_time else None,
|
||||
avg_hr=effort.avg_hr,
|
||||
avg_power=effort.avg_power,
|
||||
kom_rank=effort.kom_rank,
|
||||
pr_rank=None, # Placeholder
|
||||
is_kom=(effort.kom_rank == 1) if effort.kom_rank else False,
|
||||
is_pr=False # Placeholder
|
||||
))
|
||||
return responses
|
||||
|
||||
@router.delete("/segments/{segment_id}")
|
||||
def delete_segment(segment_id: int, db: Session = Depends(get_db)):
|
||||
"""Delete a segment and matching efforts."""
|
||||
segment = db.query(Segment).filter(Segment.id == segment_id).first()
|
||||
if not segment:
|
||||
raise HTTPException(status_code=404, detail="Segment not found")
|
||||
|
||||
# Cascade delete efforts? Or model handles it?
|
||||
# Usually need explicit delete if not set up in FK
|
||||
db.query(SegmentEffort).filter(SegmentEffort.segment_id == segment.id).delete()
|
||||
db.delete(segment)
|
||||
db.commit()
|
||||
|
||||
return {"message": "Segment deleted"}
|
||||
|
||||
@router.get("/segments/{segment_id}/efforts", response_model=List[SegmentEffortResponse])
|
||||
def get_segment_leaderboard(segment_id: int, db: Session = Depends(get_db)):
|
||||
"""Get all efforts for a segment, ordered by time (Leaderboard)."""
|
||||
segment = db.query(Segment).filter(Segment.id == segment_id).first()
|
||||
if not segment:
|
||||
raise HTTPException(status_code=404, detail="Segment not found")
|
||||
|
||||
efforts = db.query(SegmentEffort).filter(SegmentEffort.segment_id == segment_id).order_by(SegmentEffort.elapsed_time.asc()).all()
|
||||
|
||||
responses = []
|
||||
for effort in efforts:
|
||||
responses.append(SegmentEffortResponse(
|
||||
id=effort.id,
|
||||
segment_id=effort.segment_id,
|
||||
segment_name=segment.name,
|
||||
activity_id=effort.activity_id,
|
||||
elapsed_time=effort.elapsed_time,
|
||||
start_time=effort.start_time.isoformat() if effort.start_time else None,
|
||||
end_time=effort.end_time.isoformat() if effort.end_time else None,
|
||||
avg_hr=effort.avg_hr,
|
||||
avg_power=effort.avg_power,
|
||||
kom_rank=effort.kom_rank,
|
||||
pr_rank=None,
|
||||
is_kom=(effort.kom_rank == 1) if effort.kom_rank else False,
|
||||
is_pr=False
|
||||
))
|
||||
return responses
|
||||
|
||||
@router.post("/segments/scan")
|
||||
def scan_segments(db: Session = Depends(get_db)):
|
||||
"""Trigger a background job to scan all activities for segment matches."""
|
||||
from ..services.job_manager import job_manager
|
||||
from ..jobs.segment_matching_job import run_segment_matching_job
|
||||
import threading
|
||||
|
||||
job_id = job_manager.create_job("segment_match_all")
|
||||
|
||||
# Run in background
|
||||
thread = threading.Thread(target=job_manager.run_serialized, args=(job_id, run_segment_matching_job))
|
||||
thread.start()
|
||||
|
||||
return {"message": "Segment scan started", "job_id": job_id}
|
||||
@@ -124,6 +124,12 @@ def resume_job(job_id: str):
|
||||
def cancel_job(job_id: str):
|
||||
if job_manager.request_cancel(job_id):
|
||||
return {"status": "cancelling", "message": f"Cancellation requested for job {job_id}"}
|
||||
raise HTTPException(status_code=404, detail="Job not found or not active")
|
||||
|
||||
@router.post("/jobs/{job_id}/force-kill")
|
||||
def force_kill_job(job_id: str):
|
||||
if job_manager.force_fail_job(job_id):
|
||||
return {"status": "failed", "message": f"Job {job_id} forcefully killed"}
|
||||
raise HTTPException(status_code=404, detail="Job not found")
|
||||
|
||||
import time
|
||||
|
||||
Reference in New Issue
Block a user