added activity view
This commit is contained in:
@@ -1,4 +1,4 @@
|
||||
from fastapi import APIRouter, Query, Response, HTTPException, Depends
|
||||
from fastapi import APIRouter, Query, Response, HTTPException, Depends, BackgroundTasks
|
||||
from pydantic import BaseModel
|
||||
from typing import List, Optional, Dict, Any
|
||||
from sqlalchemy import func
|
||||
@@ -8,6 +8,15 @@ from ..services.postgresql_manager import PostgreSQLManager
|
||||
from sqlalchemy.orm import Session
|
||||
from ..utils.config import config
|
||||
|
||||
# New Sync Imports
|
||||
from ..services.job_manager import job_manager
|
||||
from ..models.activity_state import GarminActivityState
|
||||
import fitdecode
|
||||
import io
|
||||
import xml.etree.ElementTree as ET
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -17,6 +26,13 @@ def get_db():
|
||||
with db_manager.get_db_session() as session:
|
||||
yield session
|
||||
|
||||
class BikeSetupInfo(BaseModel):
|
||||
id: int
|
||||
frame: str
|
||||
chainring: int
|
||||
rear_cog: int
|
||||
name: Optional[str] = None
|
||||
|
||||
class ActivityResponse(BaseModel):
|
||||
id: Optional[int] = None
|
||||
garmin_activity_id: Optional[str] = None
|
||||
@@ -28,6 +44,28 @@ class ActivityResponse(BaseModel):
|
||||
file_type: Optional[str] = None
|
||||
download_status: Optional[str] = None
|
||||
downloaded_at: Optional[str] = None
|
||||
bike_setup: Optional[BikeSetupInfo] = None
|
||||
|
||||
class ActivityDetailResponse(ActivityResponse):
|
||||
distance: Optional[float] = None
|
||||
calories: Optional[float] = None
|
||||
avg_hr: Optional[int] = None
|
||||
max_hr: Optional[int] = None
|
||||
avg_speed: Optional[float] = None
|
||||
max_speed: Optional[float] = None
|
||||
elevation_gain: Optional[float] = None
|
||||
elevation_loss: Optional[float] = None
|
||||
avg_cadence: Optional[int] = None
|
||||
max_cadence: Optional[int] = None
|
||||
steps: Optional[int] = None
|
||||
aerobic_te: Optional[float] = None
|
||||
anaerobic_te: Optional[float] = None
|
||||
avg_power: Optional[int] = None
|
||||
max_power: Optional[int] = None
|
||||
norm_power: Optional[int] = None
|
||||
tss: Optional[float] = None
|
||||
vo2_max: Optional[float] = None
|
||||
|
||||
|
||||
@router.get("/activities/list", response_model=List[ActivityResponse])
|
||||
async def list_activities(
|
||||
@@ -36,28 +74,60 @@ async def list_activities(
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Return metadata for all downloaded/available activities.
|
||||
Return metadata for all scanned activities, indicating download status.
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Listing activities with limit={limit}, offset={offset}")
|
||||
|
||||
# Query the database for activities
|
||||
activities = db.query(Activity).offset(offset).limit(limit).all()
|
||||
# Query GarminActivityState (all known activities)
|
||||
# Left join with Activity to get file status
|
||||
|
||||
results = (
|
||||
db.query(GarminActivityState, Activity)
|
||||
.outerjoin(Activity, GarminActivityState.garmin_activity_id == Activity.garmin_activity_id)
|
||||
.order_by(GarminActivityState.start_time.desc())
|
||||
.offset(offset)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
|
||||
# Convert SQLAlchemy objects to Pydantic models
|
||||
activity_responses = []
|
||||
for activity in activities:
|
||||
for state, activity in results:
|
||||
# Determine logic
|
||||
# If activity exists in 'Activity' table, use its details?
|
||||
# Or prefer GarminActivityState metadata?
|
||||
# State metadata is from scan (Garth). Activity is from file parse (db import).
|
||||
# Usually Activity data is richer IF downloaded.
|
||||
|
||||
is_downloaded = (
|
||||
activity is not None and
|
||||
activity.download_status == 'downloaded' and
|
||||
activity.file_content is not None
|
||||
)
|
||||
|
||||
download_status = 'downloaded' if is_downloaded else 'pending'
|
||||
# Or use state.sync_status? state.sync_status is 'new', 'synced'.
|
||||
# 'synced' usually means downloaded.
|
||||
|
||||
# Construct response
|
||||
activity_responses.append(
|
||||
ActivityResponse(
|
||||
id=activity.id,
|
||||
garmin_activity_id=activity.garmin_activity_id,
|
||||
activity_name=activity.activity_name,
|
||||
activity_type=activity.activity_type,
|
||||
start_time=activity.start_time.isoformat() if activity.start_time else None,
|
||||
duration=activity.duration,
|
||||
file_type=activity.file_type,
|
||||
download_status=activity.download_status,
|
||||
downloaded_at=activity.downloaded_at.isoformat() if activity.downloaded_at else None
|
||||
id=activity.id if activity else None,
|
||||
garmin_activity_id=state.garmin_activity_id,
|
||||
activity_name=state.activity_name,
|
||||
activity_type=state.activity_type,
|
||||
start_time=state.start_time.isoformat() if state.start_time else None,
|
||||
duration=activity.duration if activity else None, # Duration might only be in file parse? Or scan could get it? Scan currently doesn't fetch duration.
|
||||
file_type=activity.file_type if activity else None,
|
||||
download_status=download_status,
|
||||
downloaded_at=activity.downloaded_at.isoformat() if (activity and activity.downloaded_at) else None,
|
||||
bike_setup=BikeSetupInfo(
|
||||
id=activity.bike_setup.id,
|
||||
frame=activity.bike_setup.frame,
|
||||
chainring=activity.bike_setup.chainring,
|
||||
rear_cog=activity.bike_setup.rear_cog,
|
||||
name=activity.bike_setup.name
|
||||
) if (activity and activity.bike_setup) else None
|
||||
)
|
||||
)
|
||||
|
||||
@@ -117,7 +187,14 @@ async def query_activities(
|
||||
duration=activity.duration,
|
||||
file_type=activity.file_type,
|
||||
download_status=activity.download_status,
|
||||
downloaded_at=activity.downloaded_at.isoformat() if activity.downloaded_at else None
|
||||
downloaded_at=activity.downloaded_at.isoformat() if activity.downloaded_at else None,
|
||||
bike_setup=BikeSetupInfo(
|
||||
id=activity.bike_setup.id,
|
||||
frame=activity.bike_setup.frame,
|
||||
chainring=activity.bike_setup.chainring,
|
||||
rear_cog=activity.bike_setup.rear_cog,
|
||||
name=activity.bike_setup.name
|
||||
) if activity.bike_setup else None
|
||||
)
|
||||
)
|
||||
|
||||
@@ -172,6 +249,78 @@ async def download_activity(activity_id: str, db: Session = Depends(get_db)):
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error downloading activity: {str(e)}")
|
||||
|
||||
@router.get("/activities/{activity_id}/details", response_model=ActivityDetailResponse)
|
||||
async def get_activity_details(activity_id: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Get full details for a specific activity.
|
||||
"""
|
||||
try:
|
||||
activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first()
|
||||
if not activity:
|
||||
raise HTTPException(status_code=404, detail="Activity not found")
|
||||
|
||||
# Fallback: Extraction from file if DB fields are missing
|
||||
overrides = {}
|
||||
if activity.file_content and (activity.distance is None or activity.elevation_gain is None or activity.avg_hr is None):
|
||||
try:
|
||||
if activity.file_type == 'fit':
|
||||
overrides = _extract_summary_from_fit(activity.file_content)
|
||||
elif activity.file_type == 'tcx':
|
||||
# overrides = _extract_summary_from_tcx(activity.file_content) # Optional TODO
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to extract summary from file: {e}")
|
||||
|
||||
# Helper to merge DB value or Override
|
||||
def val(attr, key):
|
||||
v = getattr(activity, attr)
|
||||
if v is not None: return v
|
||||
return overrides.get(key)
|
||||
|
||||
return ActivityDetailResponse(
|
||||
id=activity.id,
|
||||
garmin_activity_id=activity.garmin_activity_id,
|
||||
activity_name=activity.activity_name,
|
||||
activity_type=activity.activity_type,
|
||||
start_time=activity.start_time.isoformat() if activity.start_time else None,
|
||||
duration=val('duration', 'total_timer_time'),
|
||||
file_type=activity.file_type,
|
||||
download_status=activity.download_status,
|
||||
downloaded_at=activity.downloaded_at.isoformat() if activity.downloaded_at else None,
|
||||
# Extended metrics
|
||||
distance=val('distance', 'total_distance'),
|
||||
calories=val('calories', 'total_calories'),
|
||||
avg_hr=val('avg_hr', 'avg_heart_rate'),
|
||||
max_hr=val('max_hr', 'max_heart_rate'),
|
||||
avg_speed=val('avg_speed', 'enhanced_avg_speed'), # fallback to avg_speed handled in extractor
|
||||
max_speed=val('max_speed', 'enhanced_max_speed'),
|
||||
elevation_gain=val('elevation_gain', 'total_ascent'),
|
||||
elevation_loss=val('elevation_loss', 'total_descent'),
|
||||
avg_cadence=val('avg_cadence', 'avg_cadence'),
|
||||
max_cadence=val('max_cadence', 'max_cadence'),
|
||||
steps=activity.steps, # No session step count usually
|
||||
aerobic_te=val('aerobic_te', 'total_training_effect'),
|
||||
anaerobic_te=val('anaerobic_te', 'total_anaerobic_training_effect'),
|
||||
avg_power=val('avg_power', 'avg_power'),
|
||||
max_power=val('max_power', 'max_power'),
|
||||
norm_power=val('norm_power', 'normalized_power'),
|
||||
tss=val('tss', 'training_stress_score'),
|
||||
vo2_max=activity.vo2_max, # Usually not in simple session msg directly but maybe
|
||||
bike_setup=BikeSetupInfo(
|
||||
id=activity.bike_setup.id,
|
||||
frame=activity.bike_setup.frame,
|
||||
chainring=activity.bike_setup.chainring,
|
||||
rear_cog=activity.bike_setup.rear_cog,
|
||||
name=activity.bike_setup.name
|
||||
) if activity.bike_setup else None
|
||||
)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting activity details: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# Import necessary auth dependencies
|
||||
from ..models.api_token import APIToken
|
||||
import garth
|
||||
@@ -238,4 +387,419 @@ async def redownload_activity_endpoint(activity_id: str, db: Session = Depends(g
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error in redownload_activity_endpoint: {e}")
|
||||
raise HTTPException(status_code=500, detail=f"Error processing redownload: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail=f"Error processing redownload: {str(e)}")
|
||||
|
||||
# New Sync Endpoints
|
||||
|
||||
def run_scan_job(job_id: str, days_back: int, db_session_factory):
|
||||
"""Background task wrapper for scan"""
|
||||
try:
|
||||
from ..services.garmin.client import GarminClient
|
||||
from ..services.sync_app import SyncApp
|
||||
except Exception as e:
|
||||
logger.error(f"Import error in background job: {e}")
|
||||
job_manager.fail_job(job_id, f"Import error: {str(e)}")
|
||||
return
|
||||
|
||||
try:
|
||||
with db_session_factory() as db:
|
||||
garmin_client = GarminClient()
|
||||
sync_app = SyncApp(db, garmin_client)
|
||||
|
||||
job_manager.update_job(job_id, status="running", progress=0)
|
||||
sync_app.scan_activities(days_back=days_back)
|
||||
job_manager.complete_job(job_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Scan job failed: {e}")
|
||||
job_manager.fail_job(job_id, str(e))
|
||||
|
||||
def run_sync_job(job_id: str, limit: int, db_session_factory):
|
||||
"""Background task wrapper for sync pending"""
|
||||
try:
|
||||
from ..services.garmin.client import GarminClient
|
||||
from ..services.sync_app import SyncApp
|
||||
except Exception as e:
|
||||
logger.error(f"Import error in background job: {e}")
|
||||
job_manager.fail_job(job_id, f"Import error: {str(e)}")
|
||||
return
|
||||
|
||||
with db_session_factory() as db:
|
||||
try:
|
||||
garmin_client = GarminClient()
|
||||
sync_app = SyncApp(db, garmin_client)
|
||||
|
||||
# sync_pending_activities handles job updates
|
||||
sync_app.sync_pending_activities(limit=limit, job_id=job_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Sync job failed: {e}")
|
||||
job_manager.fail_job(job_id, str(e))
|
||||
|
||||
|
||||
@router.post("/activities/sync/scan")
|
||||
async def scan_activities_trigger(
|
||||
background_tasks: BackgroundTasks,
|
||||
days_back: int = Query(30, description="Number of days to scan back for new activities")
|
||||
):
|
||||
"""Trigger background scan of metadata"""
|
||||
job_id = job_manager.create_job("scan_activities")
|
||||
|
||||
# We need a new session for the background task
|
||||
db_manager = PostgreSQLManager(config.DATABASE_URL)
|
||||
# Use context manager in wrapper
|
||||
|
||||
background_tasks.add_task(run_scan_job, job_id, days_back, db_manager.get_db_session)
|
||||
return {"job_id": job_id, "status": "started"}
|
||||
|
||||
@router.post("/activities/sync/pending")
|
||||
async def sync_pending_trigger(
|
||||
background_tasks: BackgroundTasks,
|
||||
limit: Optional[int] = Query(None, description="Limit number of activities to sync")
|
||||
):
|
||||
"""Trigger background sync of pending activities"""
|
||||
job_id = job_manager.create_job("sync_pending_activities")
|
||||
|
||||
db_manager = PostgreSQLManager(config.DATABASE_URL)
|
||||
background_tasks.add_task(run_sync_job, job_id, limit, db_manager.get_db_session)
|
||||
return {"job_id": job_id, "status": "started"}
|
||||
|
||||
@router.get("/activities/sync/status")
|
||||
async def get_sync_status_summary(db: Session = Depends(get_db)):
|
||||
"""Get counts of activities by sync status"""
|
||||
try:
|
||||
from sqlalchemy import func
|
||||
stats = db.query(
|
||||
GarminActivityState.sync_status,
|
||||
func.count(GarminActivityState.garmin_activity_id)
|
||||
).group_by(GarminActivityState.sync_status).all()
|
||||
|
||||
return {s[0]: s[1] for s in stats}
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting sync status: {e}")
|
||||
return {}
|
||||
|
||||
|
||||
def _extract_points_from_fit(file_content: bytes) -> List[List[float]]:
|
||||
"""
|
||||
Extract [lon, lat] points from a FIT file content.
|
||||
Returns a list of [lon, lat].
|
||||
"""
|
||||
points = []
|
||||
try:
|
||||
with io.BytesIO(file_content) as f:
|
||||
with fitdecode.FitReader(f) as fit:
|
||||
for frame in fit:
|
||||
if frame.frame_type == fitdecode.FIT_FRAME_DATA and frame.name == 'record':
|
||||
# Check for position_lat and position_long
|
||||
# Garmin stores lat/long as semicircles. Convert to degrees: semicircle * (180 / 2^31)
|
||||
if frame.has_field('position_lat') and frame.has_field('position_long'):
|
||||
lat_sc = frame.get_value('position_lat')
|
||||
lon_sc = frame.get_value('position_long')
|
||||
|
||||
if lat_sc is not None and lon_sc is not None:
|
||||
lat = lat_sc * (180.0 / 2**31)
|
||||
lon = lon_sc * (180.0 / 2**31)
|
||||
points.append([lon, lat])
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing FIT file: {e}")
|
||||
# Return what we have or empty
|
||||
return points
|
||||
|
||||
def _extract_points_from_tcx(file_content: bytes) -> List[List[float]]:
|
||||
"""
|
||||
Extract [lon, lat] points from a TCX file content.
|
||||
"""
|
||||
points = []
|
||||
try:
|
||||
# TCX is XML
|
||||
# Namespace usually exists
|
||||
root = ET.fromstring(file_content)
|
||||
# Namespaces are annoying in ElementTree, usually {http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2}
|
||||
# We can just iterate and ignore namespace or handle it.
|
||||
# Let's try ignoring namespace by using local-name() in xpath if lxml, but this is stdlib ET.
|
||||
# Just strip namespace for simplicity
|
||||
|
||||
for trkpt in root.iter():
|
||||
if trkpt.tag.endswith('Trackpoint'):
|
||||
lat = None
|
||||
lon = None
|
||||
for child in trkpt.iter():
|
||||
if child.tag.endswith('LatitudeDegrees'):
|
||||
try: lat = float(child.text)
|
||||
except: pass
|
||||
elif child.tag.endswith('LongitudeDegrees'):
|
||||
try: lon = float(child.text)
|
||||
except: pass
|
||||
|
||||
if lat is not None and lon is not None:
|
||||
points.append([lon, lat])
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing TCX file: {e}")
|
||||
return points
|
||||
|
||||
@router.get("/activities/{activity_id}/geojson")
|
||||
async def get_activity_geojson(activity_id: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Return GeoJSON LineString for the activity track.
|
||||
"""
|
||||
try:
|
||||
activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first()
|
||||
if not activity or not activity.file_content:
|
||||
raise HTTPException(status_code=404, detail="Activity or file content not found")
|
||||
|
||||
points = []
|
||||
if activity.file_type == 'fit':
|
||||
points = _extract_points_from_fit(activity.file_content)
|
||||
elif activity.file_type == 'tcx':
|
||||
points = _extract_points_from_tcx(activity.file_content)
|
||||
else:
|
||||
# Try FIT or TCX anyway?
|
||||
# Default to FIT check headers?
|
||||
# For now just log warning
|
||||
logger.warning(f"Unsupported file type for map: {activity.file_type}")
|
||||
|
||||
if not points:
|
||||
return {"type": "FeatureCollection", "features": []}
|
||||
|
||||
return {
|
||||
"type": "FeatureCollection",
|
||||
"features": [{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"color": "red"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "LineString",
|
||||
"coordinates": points
|
||||
}
|
||||
}]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating GeoJSON: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
def _extract_streams_from_fit(file_content: bytes) -> Dict[str, List[Any]]:
|
||||
streams = {
|
||||
"time": [],
|
||||
"heart_rate": [],
|
||||
"power": [],
|
||||
"altitude": [],
|
||||
"speed": [],
|
||||
"cadence": []
|
||||
}
|
||||
try:
|
||||
start_time = None
|
||||
with io.BytesIO(file_content) as f:
|
||||
with fitdecode.FitReader(f) as fit:
|
||||
for frame in fit:
|
||||
if frame.frame_type == fitdecode.FIT_FRAME_DATA and frame.name == 'record':
|
||||
timestamp = frame.get_value('timestamp')
|
||||
if not start_time and timestamp:
|
||||
start_time = timestamp
|
||||
|
||||
if timestamp and start_time:
|
||||
# Relative time in seconds
|
||||
t = (timestamp - start_time).total_seconds()
|
||||
|
||||
# Helper to safely get value with fallback
|
||||
def get_val(frame, keys):
|
||||
for k in keys:
|
||||
if frame.has_field(k):
|
||||
return frame.get_value(k)
|
||||
return None
|
||||
|
||||
streams["time"].append(t)
|
||||
streams["heart_rate"].append(get_val(frame, ['heart_rate']))
|
||||
streams["power"].append(get_val(frame, ['power']))
|
||||
streams["altitude"].append(get_val(frame, ['enhanced_altitude', 'altitude']))
|
||||
streams["speed"].append(get_val(frame, ['enhanced_speed', 'speed'])) # m/s (enhanced is also m/s)
|
||||
streams["cadence"].append(get_val(frame, ['cadence']))
|
||||
except Exception as e:
|
||||
logger.error(f"Error extracting streams from FIT: {e}")
|
||||
return streams
|
||||
|
||||
def _extract_summary_from_fit(file_content: bytes) -> Dict[str, Any]:
|
||||
summary = {}
|
||||
try:
|
||||
with io.BytesIO(file_content) as f:
|
||||
with fitdecode.FitReader(f) as fit:
|
||||
for frame in fit:
|
||||
if frame.frame_type == fitdecode.FIT_FRAME_DATA and frame.name == 'session':
|
||||
# Prefer enhanced fields
|
||||
def get(keys):
|
||||
for k in keys:
|
||||
if frame.has_field(k): return frame.get_value(k)
|
||||
return None
|
||||
|
||||
summary['total_distance'] = get(['total_distance'])
|
||||
summary['total_timer_time'] = get(['total_timer_time', 'total_elapsed_time'])
|
||||
summary['total_calories'] = get(['total_calories'])
|
||||
summary['avg_heart_rate'] = get(['avg_heart_rate'])
|
||||
summary['max_heart_rate'] = get(['max_heart_rate'])
|
||||
summary['avg_cadence'] = get(['avg_cadence'])
|
||||
summary['max_cadence'] = get(['max_cadence'])
|
||||
summary['avg_power'] = get(['avg_power'])
|
||||
summary['max_power'] = get(['max_power'])
|
||||
summary['total_ascent'] = get(['total_ascent'])
|
||||
summary['total_descent'] = get(['total_descent'])
|
||||
summary['enhanced_avg_speed'] = get(['enhanced_avg_speed', 'avg_speed'])
|
||||
summary['enhanced_max_speed'] = get(['enhanced_max_speed', 'max_speed'])
|
||||
summary['normalized_power'] = get(['normalized_power'])
|
||||
summary['training_stress_score'] = get(['training_stress_score'])
|
||||
summary['total_training_effect'] = get(['total_training_effect'])
|
||||
summary['total_anaerobic_training_effect'] = get(['total_anaerobic_training_effect'])
|
||||
|
||||
# Stop after first session message (usually only one per file, or first is summary)
|
||||
# Actually FIT can have multiple sessions (multipsport). We'll take the first for now.
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error(f"Error extraction summary from FIT: {e}")
|
||||
return summary
|
||||
|
||||
def _extract_streams_from_tcx(file_content: bytes) -> Dict[str, List[Any]]:
|
||||
streams = {
|
||||
"time": [],
|
||||
"heart_rate": [],
|
||||
"power": [],
|
||||
"altitude": [],
|
||||
"speed": [],
|
||||
"cadence": []
|
||||
}
|
||||
try:
|
||||
root = ET.fromstring(file_content)
|
||||
# Namespace strip hack
|
||||
start_time = None
|
||||
|
||||
for trkpt in root.iter():
|
||||
if trkpt.tag.endswith('Trackpoint'):
|
||||
timestamp_str = None
|
||||
hr = None
|
||||
pwr = None
|
||||
alt = None
|
||||
cad = None
|
||||
spd = None
|
||||
|
||||
for child in trkpt.iter():
|
||||
if child.tag.endswith('Time'):
|
||||
timestamp_str = child.text
|
||||
elif child.tag.endswith('AltitudeMeters'):
|
||||
try: alt = float(child.text)
|
||||
except: pass
|
||||
elif child.tag.endswith('HeartRateBpm'):
|
||||
for val in child:
|
||||
if val.tag.endswith('Value'):
|
||||
try: hr = int(val.text)
|
||||
except: pass
|
||||
elif child.tag.endswith('Cadence'): # Standard TCX cadence
|
||||
try: cad = int(child.text)
|
||||
except: pass
|
||||
elif child.tag.endswith('Extensions'):
|
||||
# TPX extensions for speed/power
|
||||
for ext in child.iter():
|
||||
if ext.tag.endswith('Speed'):
|
||||
try: spd = float(ext.text)
|
||||
except: pass
|
||||
elif ext.tag.endswith('Watts'):
|
||||
try: pwr = int(ext.text)
|
||||
except: pass
|
||||
|
||||
if timestamp_str:
|
||||
try:
|
||||
# TCX time format is ISO8601 usually
|
||||
ts = datetime.fromisoformat(timestamp_str.replace('Z', '+00:00'))
|
||||
if not start_time:
|
||||
start_time = ts
|
||||
|
||||
streams["time"].append((ts - start_time).total_seconds())
|
||||
streams["heart_rate"].append(hr)
|
||||
streams["power"].append(pwr)
|
||||
streams["altitude"].append(alt)
|
||||
streams["speed"].append(spd)
|
||||
streams["cadence"].append(cad)
|
||||
except: pass
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error extracting streams from TCX: {e}")
|
||||
return streams
|
||||
|
||||
|
||||
@router.get("/activities/{activity_id}/streams")
|
||||
async def get_activity_streams(activity_id: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Return time series data for charts.
|
||||
"""
|
||||
try:
|
||||
activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first()
|
||||
if not activity or not activity.file_content:
|
||||
raise HTTPException(status_code=404, detail="Activity or file content not found")
|
||||
|
||||
streams = {}
|
||||
if activity.file_type == 'fit':
|
||||
streams = _extract_streams_from_fit(activity.file_content)
|
||||
elif activity.file_type == 'tcx':
|
||||
streams = _extract_streams_from_tcx(activity.file_content)
|
||||
else:
|
||||
logger.warning(f"Unsupported file type for streams: {activity.file_type}")
|
||||
|
||||
return streams
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting streams: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.get("/activities/{activity_id}/navigation")
|
||||
async def get_activity_navigation(activity_id: str, db: Session = Depends(get_db)):
|
||||
"""
|
||||
Return next/prev activity IDs.
|
||||
"""
|
||||
try:
|
||||
current = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first()
|
||||
if not current:
|
||||
raise HTTPException(status_code=404, detail="Activity not found")
|
||||
|
||||
# Global Prev (Older)
|
||||
prev_act = (
|
||||
db.query(Activity)
|
||||
.filter(Activity.start_time < current.start_time)
|
||||
.order_by(Activity.start_time.desc())
|
||||
.first()
|
||||
)
|
||||
|
||||
# Global Next (Newer)
|
||||
next_act = (
|
||||
db.query(Activity)
|
||||
.filter(Activity.start_time > current.start_time)
|
||||
.order_by(Activity.start_time.asc())
|
||||
.first()
|
||||
)
|
||||
|
||||
# Same Type Prev
|
||||
prev_type_act = (
|
||||
db.query(Activity)
|
||||
.filter(Activity.start_time < current.start_time)
|
||||
.filter(Activity.activity_type == current.activity_type)
|
||||
.order_by(Activity.start_time.desc())
|
||||
.first()
|
||||
)
|
||||
|
||||
# Same Type Next
|
||||
next_type_act = (
|
||||
db.query(Activity)
|
||||
.filter(Activity.start_time > current.start_time)
|
||||
.filter(Activity.activity_type == current.activity_type)
|
||||
.order_by(Activity.start_time.asc())
|
||||
.first()
|
||||
)
|
||||
|
||||
return {
|
||||
"prev_id": prev_act.garmin_activity_id if prev_act else None,
|
||||
"next_id": next_act.garmin_activity_id if next_act else None,
|
||||
"prev_type_id": prev_type_act.garmin_activity_id if prev_type_act else None,
|
||||
"next_type_id": next_type_act.garmin_activity_id if next_type_act else None
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting navigation: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
Reference in New Issue
Block a user