Files
FitTrack2/FitnessSync/backend/src/api/activities.py
2026-01-11 06:06:43 -08:00

831 lines
34 KiB
Python

from fastapi import APIRouter, Query, Response, HTTPException, Depends, BackgroundTasks
from pydantic import BaseModel
from typing import List, Optional, Dict, Any
from sqlalchemy import func
from ..models.activity import Activity
import logging
from ..services.postgresql_manager import PostgreSQLManager
from sqlalchemy.orm import Session
from ..utils.config import config
# New Sync Imports
from ..services.job_manager import job_manager
from ..models.activity_state import GarminActivityState
from datetime import datetime
from ..services.parsers import extract_points_from_file
router = APIRouter()
logger = logging.getLogger(__name__)
def get_db():
db_manager = PostgreSQLManager(config.DATABASE_URL)
with db_manager.get_db_session() as session:
yield session
class BikeSetupInfo(BaseModel):
id: int
frame: str
chainring: int
rear_cog: int
name: Optional[str] = None
class ActivityResponse(BaseModel):
id: Optional[int] = None
garmin_activity_id: Optional[str] = None
activity_name: Optional[str] = None
activity_type: Optional[str] = None
start_time: Optional[str] = None
duration: Optional[int] = None
# file_path removed since we store in DB
file_type: Optional[str] = None
download_status: Optional[str] = None
downloaded_at: Optional[str] = None
bike_setup: Optional[BikeSetupInfo] = None
class ActivityDetailResponse(ActivityResponse):
distance: Optional[float] = None
calories: Optional[float] = None
avg_hr: Optional[int] = None
max_hr: Optional[int] = None
avg_speed: Optional[float] = None
max_speed: Optional[float] = None
elevation_gain: Optional[float] = None
elevation_loss: Optional[float] = None
avg_cadence: Optional[int] = None
max_cadence: Optional[int] = None
steps: Optional[int] = None
aerobic_te: Optional[float] = None
anaerobic_te: Optional[float] = None
avg_power: Optional[int] = None
max_power: Optional[int] = None
norm_power: Optional[int] = None
tss: Optional[float] = None
vo2_max: Optional[float] = None
@router.get("/activities/list", response_model=List[ActivityResponse])
async def list_activities(
limit: int = Query(50, ge=1, le=200),
offset: int = Query(0, ge=0),
db: Session = Depends(get_db)
):
"""
Return metadata for all scanned activities, indicating download status.
"""
try:
logger.info(f"Listing activities with limit={limit}, offset={offset}")
# Query GarminActivityState (all known activities)
# Left join with Activity to get file status
results = (
db.query(GarminActivityState, Activity)
.outerjoin(Activity, GarminActivityState.garmin_activity_id == Activity.garmin_activity_id)
.order_by(GarminActivityState.start_time.desc())
.offset(offset)
.limit(limit)
.all()
)
activity_responses = []
for state, activity in results:
# Determine logic
# If activity exists in 'Activity' table, use its details?
# Or prefer GarminActivityState metadata?
# State metadata is from scan (Garth). Activity is from file parse (db import).
# Usually Activity data is richer IF downloaded.
is_downloaded = (
activity is not None and
activity.download_status == 'downloaded' and
activity.file_content is not None
)
download_status = 'downloaded' if is_downloaded else 'pending'
# Or use state.sync_status? state.sync_status is 'new', 'synced'.
# 'synced' usually means downloaded.
# Construct response
activity_responses.append(
ActivityResponse(
id=activity.id if activity else None,
garmin_activity_id=state.garmin_activity_id,
activity_name=state.activity_name,
activity_type=state.activity_type,
start_time=state.start_time.isoformat() if state.start_time else None,
duration=activity.duration if activity else None, # Duration might only be in file parse? Or scan could get it? Scan currently doesn't fetch duration.
file_type=activity.file_type if activity else None,
download_status=download_status,
downloaded_at=activity.downloaded_at.isoformat() if (activity and activity.downloaded_at) else None,
bike_setup=BikeSetupInfo(
id=activity.bike_setup.id,
frame=activity.bike_setup.frame,
chainring=activity.bike_setup.chainring,
rear_cog=activity.bike_setup.rear_cog,
name=activity.bike_setup.name
) if (activity and activity.bike_setup) else None
)
)
logger.info(f"Returning {len(activity_responses)} activities")
return activity_responses
except Exception as e:
logger.error(f"Error in list_activities: {str(e)}")
raise HTTPException(status_code=500, detail=f"Error listing activities: {str(e)}")
@router.get("/activities/query", response_model=List[ActivityResponse])
async def query_activities(
activity_type: Optional[str] = Query(None),
start_date: Optional[str] = Query(None),
end_date: Optional[str] = Query(None),
download_status: Optional[str] = Query(None),
bike_setup_id: Optional[int] = Query(None),
db: Session = Depends(get_db)
):
"""
Allow advanced filtering of activities.
"""
try:
logger.info(f"Querying activities - type: {activity_type}, start: {start_date}, end: {end_date}, status: {download_status}")
# Start building the query
query = db.query(Activity)
# Apply filters based on parameters
if activity_type:
if activity_type == 'cycling':
# Match outdoor cycling types
# Using OR filtering for various sub-types
from sqlalchemy import or_
query = query.filter(or_(
Activity.activity_type == 'cycling',
Activity.activity_type == 'road_biking',
Activity.activity_type == 'mountain_biking',
Activity.activity_type == 'gravel_cycling',
Activity.activity_type == 'cyclocross',
Activity.activity_type == 'track_cycling',
Activity.activity_type == 'commuting'
))
else:
query = query.filter(Activity.activity_type == activity_type)
if start_date:
from datetime import datetime
start_dt = datetime.fromisoformat(start_date)
query = query.filter(Activity.start_time >= start_dt)
if end_date:
from datetime import datetime
end_dt = datetime.fromisoformat(end_date)
query = query.filter(Activity.start_time <= end_dt)
if download_status:
query = query.filter(Activity.download_status == download_status)
if bike_setup_id:
query = query.filter(Activity.bike_setup_id == bike_setup_id)
# Execute the query
activities = query.all()
# Convert SQLAlchemy objects to Pydantic models
activity_responses = []
for activity in activities:
activity_responses.append(
ActivityResponse(
id=activity.id,
garmin_activity_id=activity.garmin_activity_id,
activity_name=activity.activity_name,
activity_type=activity.activity_type,
start_time=activity.start_time.isoformat() if activity.start_time else None,
duration=activity.duration,
file_type=activity.file_type,
download_status=activity.download_status,
downloaded_at=activity.downloaded_at.isoformat() if activity.downloaded_at else None,
bike_setup=BikeSetupInfo(
id=activity.bike_setup.id,
frame=activity.bike_setup.frame,
chainring=activity.bike_setup.chainring,
rear_cog=activity.bike_setup.rear_cog,
name=activity.bike_setup.name
) if activity.bike_setup else None
)
)
logger.info(f"Returning {len(activity_responses)} filtered activities")
return activity_responses
except Exception as e:
logger.error(f"Error in query_activities: {str(e)}")
raise HTTPException(status_code=500, detail=f"Error querying activities: {str(e)}")
@router.get("/activities/download/{activity_id}")
async def download_activity(activity_id: str, db: Session = Depends(get_db)):
"""
Serve the stored activity file from the database.
"""
try:
logger.info(f"Downloading activity with ID: {activity_id}")
# Find the activity in the database
activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first()
if not activity:
raise HTTPException(status_code=404, detail=f"Activity with ID {activity_id} not found")
if not activity.file_content:
raise HTTPException(status_code=404, detail=f"No file content available for activity {activity_id}")
if activity.download_status != 'downloaded':
raise HTTPException(status_code=400, detail=f"File for activity {activity_id} is not ready for download (status: {activity.download_status})")
# Determine the appropriate content type based on the file type
content_type_map = {
'tcx': 'application/vnd.garmin.tcx+xml',
'gpx': 'application/gpx+xml',
'fit': 'application/octet-stream' # FIT files are binary
}
content_type = content_type_map.get(activity.file_type, 'application/octet-stream')
filename = f"activity_{activity_id}.{activity.file_type}"
logger.info(f"Returning file for activity {activity_id} with content type {content_type}")
return Response(
content=activity.file_content,
media_type=content_type,
headers={
"Content-Disposition": f"attachment; filename={filename}",
"Content-Length": str(len(activity.file_content))
}
)
except HTTPException:
# Re-raise HTTP exceptions as-is
raise
except Exception as e:
raise HTTPException(status_code=500, detail=f"Error downloading activity: {str(e)}")
@router.get("/activities/{activity_id}/details", response_model=ActivityDetailResponse)
async def get_activity_details(activity_id: str, db: Session = Depends(get_db)):
"""
Get full details for a specific activity.
"""
try:
activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first()
if not activity:
raise HTTPException(status_code=404, detail="Activity not found")
# Fallback: Extraction from file if DB fields are missing
overrides = {}
if activity.file_content and (activity.distance is None or activity.elevation_gain is None or activity.avg_hr is None):
try:
if activity.file_type == 'fit':
overrides = _extract_summary_from_fit(activity.file_content)
elif activity.file_type == 'tcx':
# overrides = _extract_summary_from_tcx(activity.file_content) # Optional TODO
pass
except Exception as e:
logger.warning(f"Failed to extract summary from file: {e}")
# Helper to merge DB value or Override
def val(attr, key):
v = getattr(activity, attr)
if v is not None: return v
return overrides.get(key)
return ActivityDetailResponse(
id=activity.id,
garmin_activity_id=activity.garmin_activity_id,
activity_name=activity.activity_name,
activity_type=activity.activity_type,
start_time=activity.start_time.isoformat() if activity.start_time else None,
duration=val('duration', 'total_timer_time'),
file_type=activity.file_type,
download_status=activity.download_status,
downloaded_at=activity.downloaded_at.isoformat() if activity.downloaded_at else None,
# Extended metrics
distance=val('distance', 'total_distance'),
calories=val('calories', 'total_calories'),
avg_hr=val('avg_hr', 'avg_heart_rate'),
max_hr=val('max_hr', 'max_heart_rate'),
avg_speed=val('avg_speed', 'enhanced_avg_speed'), # fallback to avg_speed handled in extractor
max_speed=val('max_speed', 'enhanced_max_speed'),
elevation_gain=val('elevation_gain', 'total_ascent'),
elevation_loss=val('elevation_loss', 'total_descent'),
avg_cadence=val('avg_cadence', 'avg_cadence'),
max_cadence=val('max_cadence', 'max_cadence'),
steps=activity.steps, # No session step count usually
aerobic_te=val('aerobic_te', 'total_training_effect'),
anaerobic_te=val('anaerobic_te', 'total_anaerobic_training_effect'),
avg_power=val('avg_power', 'avg_power'),
max_power=val('max_power', 'max_power'),
norm_power=val('norm_power', 'normalized_power'),
tss=val('tss', 'training_stress_score'),
vo2_max=activity.vo2_max, # Usually not in simple session msg directly but maybe
bike_setup=BikeSetupInfo(
id=activity.bike_setup.id,
frame=activity.bike_setup.frame,
chainring=activity.bike_setup.chainring,
rear_cog=activity.bike_setup.rear_cog,
name=activity.bike_setup.name
) if activity.bike_setup else None
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting activity details: {e}")
raise HTTPException(status_code=500, detail=str(e))
# Import necessary auth dependencies
from ..models.api_token import APIToken
import garth
import json
from garth.auth_tokens import OAuth1Token, OAuth2Token
def _verify_garmin_session(db: Session):
"""Helper to load token from DB and verify session with Garmin (Inline for now)."""
token_record = db.query(APIToken).filter_by(token_type='garmin').first()
if not (token_record and token_record.garth_oauth1_token and token_record.garth_oauth2_token):
return False
try:
oauth1_dict = json.loads(token_record.garth_oauth1_token)
oauth2_dict = json.loads(token_record.garth_oauth2_token)
domain = oauth1_dict.get('domain')
if domain:
garth.configure(domain=domain)
garth.client.oauth1_token = OAuth1Token(**oauth1_dict)
garth.client.oauth2_token = OAuth2Token(**oauth2_dict)
# Simple check or full profile get?
# garth.UserProfile.get() # strict check
return True
except Exception as e:
logger.error(f"Garth session load failed: {e}")
return False
@router.post("/activities/{activity_id}/redownload")
async def redownload_activity_endpoint(activity_id: str, db: Session = Depends(get_db)):
"""
Trigger a re-download of the activity file from Garmin.
"""
try:
logger.info(f"Request to redownload activity {activity_id}")
from ..services.garmin.client import GarminClient
from ..services.sync_app import SyncApp
# Verify Auth
if not _verify_garmin_session(db):
raise HTTPException(status_code=401, detail="Garmin not authenticated or tokens invalid. Please go to Setup.")
garmin_client = GarminClient()
# Double check connection?
if not garmin_client.check_connection():
# Try refreshing? For now just fail if token load wasn't enough
# But usually token load is enough.
pass
sync_app = SyncApp(db, garmin_client)
success = sync_app.redownload_activity(activity_id)
if success:
# Trigger bike matching
try:
from ..services.bike_matching import process_activity_matching
# Fetch fresh activity object using new session logic or flush/commit handled by sync_app
# Just query by garmin_id
act_obj = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first()
if act_obj:
process_activity_matching(db, act_obj.id)
logger.info(f"Retriggered bike match for {activity_id} after redownload")
except Exception as match_err:
logger.error(f"Error matching bike after redownload: {match_err}")
return {"message": f"Successfully redownloaded and matched activity {activity_id}", "status": "success"}
else:
raise HTTPException(status_code=500, detail="Failed to redownload activity. Check logs for details.")
except HTTPException:
raise
except Exception as e:
logger.error(f"Error in redownload_activity_endpoint: {e}")
raise HTTPException(status_code=500, detail=f"Error processing redownload: {str(e)}")
# New Sync Endpoints
class BikeMatchUpdate(BaseModel):
bike_setup_id: Optional[int] = None
manual_override: bool = True
@router.put("/activities/{activity_id}/bike")
async def update_activity_bike(activity_id: str, update: BikeMatchUpdate, db: Session = Depends(get_db)):
"""
Manually update the bike setup for an activity.
Sets bike_match_confidence to 2.0 to indicate manual override.
"""
try:
activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first()
if not activity:
raise HTTPException(status_code=404, detail="Activity not found")
# Verify bike setup exists if provided
if update.bike_setup_id:
from ..models.bike_setup import BikeSetup
setup = db.query(BikeSetup).filter(BikeSetup.id == update.bike_setup_id).first()
if not setup:
raise HTTPException(status_code=404, detail="Bike Setup not found")
activity.bike_setup_id = setup.id
activity.bike_match_confidence = 2.0 # Manual Override
logger.info(f"Manual bike override for {activity_id} to setup {setup.id}")
else:
# Clear setup
activity.bike_setup_id = None
activity.bike_match_confidence = 2.0 # Manual Clear
logger.info(f"Manual bike override for {activity_id} to cleared")
db.commit()
return {"message": "Bike setup updated successfully", "status": "success"}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error updating activity bike: {e}")
db.rollback()
raise HTTPException(status_code=500, detail=str(e))
def run_scan_job(job_id: str, days_back: int, db_session_factory):
"""Background task wrapper for scan"""
try:
from ..services.garmin.client import GarminClient
from ..services.sync_app import SyncApp
except Exception as e:
logger.error(f"Import error in background job: {e}")
job_manager.fail_job(job_id, f"Import error: {str(e)}")
return
try:
with db_session_factory() as db:
garmin_client = GarminClient()
sync_app = SyncApp(db, garmin_client)
job_manager.update_job(job_id, status="running", progress=0)
sync_app.scan_activities(days_back=days_back)
job_manager.complete_job(job_id)
except Exception as e:
logger.error(f"Scan job failed: {e}")
job_manager.fail_job(job_id, str(e))
def run_sync_job(job_id: str, limit: int, db_session_factory):
"""Background task wrapper for sync pending"""
try:
from ..services.garmin.client import GarminClient
from ..services.sync_app import SyncApp
except Exception as e:
logger.error(f"Import error in background job: {e}")
job_manager.fail_job(job_id, f"Import error: {str(e)}")
return
with db_session_factory() as db:
try:
garmin_client = GarminClient()
sync_app = SyncApp(db, garmin_client)
# sync_pending_activities handles job updates
sync_app.sync_pending_activities(limit=limit, job_id=job_id)
except Exception as e:
logger.error(f"Sync job failed: {e}")
job_manager.fail_job(job_id, str(e))
@router.post("/activities/sync/scan")
async def scan_activities_trigger(
background_tasks: BackgroundTasks,
days_back: int = Query(30, description="Number of days to scan back for new activities")
):
"""Trigger background scan of metadata"""
job_id = job_manager.create_job("scan_activities")
# We need a new session for the background task
db_manager = PostgreSQLManager(config.DATABASE_URL)
# Use context manager in wrapper
background_tasks.add_task(run_scan_job, job_id, days_back, db_manager.get_db_session)
return {"job_id": job_id, "status": "started"}
@router.post("/activities/sync/pending")
async def sync_pending_trigger(
background_tasks: BackgroundTasks,
limit: Optional[int] = Query(None, description="Limit number of activities to sync")
):
"""Trigger background sync of pending activities"""
job_id = job_manager.create_job("sync_pending_activities")
db_manager = PostgreSQLManager(config.DATABASE_URL)
background_tasks.add_task(run_sync_job, job_id, limit, db_manager.get_db_session)
return {"job_id": job_id, "status": "started"}
@router.get("/activities/sync/status")
async def get_sync_status_summary(db: Session = Depends(get_db)):
"""Get counts of activities by sync status"""
try:
from sqlalchemy import func
stats = db.query(
GarminActivityState.sync_status,
func.count(GarminActivityState.garmin_activity_id)
).group_by(GarminActivityState.sync_status).all()
return {s[0]: s[1] for s in stats}
except Exception as e:
logger.error(f"Error getting sync status: {e}")
return {}
@router.get("/activities/{activity_id}/geojson")
async def get_activity_geojson(activity_id: str, db: Session = Depends(get_db)):
"""
Return GeoJSON LineString for the activity track.
"""
try:
activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first()
if not activity or not activity.file_content:
raise HTTPException(status_code=404, detail="Activity or file content not found")
points = []
if activity.file_type in ['fit', 'tcx']:
points = extract_points_from_file(activity.file_content, activity.file_type)
else:
logger.warning(f"Unsupported file type for map: {activity.file_type}")
if not points:
return {"type": "FeatureCollection", "features": []}
return {
"type": "FeatureCollection",
"features": [{
"type": "Feature",
"properties": {
"color": "red"
},
"geometry": {
"type": "LineString",
"coordinates": points
}
}]
}
except Exception as e:
logger.error(f"Error generating GeoJSON: {e}")
raise HTTPException(status_code=500, detail=str(e))
def _extract_streams_from_fit(file_content: bytes) -> Dict[str, List[Any]]:
streams = {
"time": [],
"heart_rate": [],
"power": [],
"altitude": [],
"speed": [],
"cadence": []
}
try:
start_time = None
with io.BytesIO(file_content) as f:
with fitdecode.FitReader(f) as fit:
for frame in fit:
if frame.frame_type == fitdecode.FIT_FRAME_DATA and frame.name == 'record':
timestamp = frame.get_value('timestamp')
if not start_time and timestamp:
start_time = timestamp
if timestamp and start_time:
# Relative time in seconds
t = (timestamp - start_time).total_seconds()
# Helper to safely get value with fallback
def get_val(frame, keys):
for k in keys:
if frame.has_field(k):
return frame.get_value(k)
return None
streams["time"].append(t)
streams["heart_rate"].append(get_val(frame, ['heart_rate']))
streams["power"].append(get_val(frame, ['power']))
streams["altitude"].append(get_val(frame, ['enhanced_altitude', 'altitude']))
streams["speed"].append(get_val(frame, ['enhanced_speed', 'speed'])) # m/s (enhanced is also m/s)
streams["cadence"].append(get_val(frame, ['cadence']))
except Exception as e:
logger.error(f"Error extracting streams from FIT: {e}")
return streams
def _extract_summary_from_fit(file_content: bytes) -> Dict[str, Any]:
summary = {}
try:
with io.BytesIO(file_content) as f:
with fitdecode.FitReader(f) as fit:
for frame in fit:
if frame.frame_type == fitdecode.FIT_FRAME_DATA and frame.name == 'session':
# Prefer enhanced fields
def get(keys):
for k in keys:
if frame.has_field(k): return frame.get_value(k)
return None
summary['total_distance'] = get(['total_distance'])
summary['total_timer_time'] = get(['total_timer_time', 'total_elapsed_time'])
summary['total_calories'] = get(['total_calories'])
summary['avg_heart_rate'] = get(['avg_heart_rate'])
summary['max_heart_rate'] = get(['max_heart_rate'])
summary['avg_cadence'] = get(['avg_cadence'])
summary['max_cadence'] = get(['max_cadence'])
summary['avg_power'] = get(['avg_power'])
summary['max_power'] = get(['max_power'])
summary['total_ascent'] = get(['total_ascent'])
summary['total_descent'] = get(['total_descent'])
summary['enhanced_avg_speed'] = get(['enhanced_avg_speed', 'avg_speed'])
summary['enhanced_max_speed'] = get(['enhanced_max_speed', 'max_speed'])
summary['normalized_power'] = get(['normalized_power'])
summary['training_stress_score'] = get(['training_stress_score'])
summary['total_training_effect'] = get(['total_training_effect'])
summary['total_anaerobic_training_effect'] = get(['total_anaerobic_training_effect'])
# Stop after first session message (usually only one per file, or first is summary)
# Actually FIT can have multiple sessions (multipsport). We'll take the first for now.
break
except Exception as e:
logger.error(f"Error extraction summary from FIT: {e}")
return summary
def _extract_streams_from_tcx(file_content: bytes) -> Dict[str, List[Any]]:
streams = {
"time": [],
"heart_rate": [],
"power": [],
"altitude": [],
"speed": [],
"cadence": []
}
try:
root = ET.fromstring(file_content)
# Namespace strip hack
start_time = None
for trkpt in root.iter():
if trkpt.tag.endswith('Trackpoint'):
timestamp_str = None
hr = None
pwr = None
alt = None
cad = None
spd = None
for child in trkpt.iter():
if child.tag.endswith('Time'):
timestamp_str = child.text
elif child.tag.endswith('AltitudeMeters'):
try: alt = float(child.text)
except: pass
elif child.tag.endswith('HeartRateBpm'):
for val in child:
if val.tag.endswith('Value'):
try: hr = int(val.text)
except: pass
elif child.tag.endswith('Cadence'): # Standard TCX cadence
try: cad = int(child.text)
except: pass
elif child.tag.endswith('Extensions'):
# TPX extensions for speed/power
for ext in child.iter():
if ext.tag.endswith('Speed'):
try: spd = float(ext.text)
except: pass
elif ext.tag.endswith('Watts'):
try: pwr = int(ext.text)
except: pass
if timestamp_str:
try:
# TCX time format is ISO8601 usually
ts = datetime.fromisoformat(timestamp_str.replace('Z', '+00:00'))
if not start_time:
start_time = ts
streams["time"].append((ts - start_time).total_seconds())
streams["heart_rate"].append(hr)
streams["power"].append(pwr)
streams["altitude"].append(alt)
streams["speed"].append(spd)
streams["cadence"].append(cad)
except: pass
except Exception as e:
logger.error(f"Error extracting streams from TCX: {e}")
return streams
@router.get("/activities/{activity_id}/streams")
async def get_activity_streams(activity_id: str, db: Session = Depends(get_db)):
"""
Return time series data for charts.
"""
try:
activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first()
if not activity or not activity.file_content:
raise HTTPException(status_code=404, detail="Activity or file content not found")
streams = {}
if activity.file_type == 'fit':
streams = _extract_streams_from_fit(activity.file_content)
elif activity.file_type == 'tcx':
streams = _extract_streams_from_tcx(activity.file_content)
else:
logger.warning(f"Unsupported file type for streams: {activity.file_type}")
return streams
except Exception as e:
logger.error(f"Error getting streams: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/activities/{activity_id}/estimate_power")
async def estimate_activity_power(activity_id: int, db: Session = Depends(get_db)):
"""
Trigger physics-based power estimation.
"""
from ..services.power_estimator import PowerEstimatorService
try:
service = PowerEstimatorService(db)
result = service.estimate_power_for_activity(activity_id)
return {"message": "Power estimated successfully", "stats": result}
except ValueError as e:
raise HTTPException(status_code=400, detail=str(e))
except Exception as e:
logger.error(f"Error estimating power: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/activities/{activity_id}/navigation")
async def get_activity_navigation(activity_id: str, db: Session = Depends(get_db)):
"""
Return next/prev activity IDs.
"""
try:
current = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first()
if not current:
raise HTTPException(status_code=404, detail="Activity not found")
# Global Prev (Older)
prev_act = (
db.query(Activity)
.filter(Activity.start_time < current.start_time)
.order_by(Activity.start_time.desc())
.first()
)
# Global Next (Newer)
next_act = (
db.query(Activity)
.filter(Activity.start_time > current.start_time)
.order_by(Activity.start_time.asc())
.first()
)
# Same Type Prev
prev_type_act = (
db.query(Activity)
.filter(Activity.start_time < current.start_time)
.filter(Activity.activity_type == current.activity_type)
.order_by(Activity.start_time.desc())
.first()
)
# Same Type Next
next_type_act = (
db.query(Activity)
.filter(Activity.start_time > current.start_time)
.filter(Activity.activity_type == current.activity_type)
.order_by(Activity.start_time.asc())
.first()
)
return {
"prev_id": prev_act.garmin_activity_id if prev_act else None,
"next_id": next_act.garmin_activity_id if next_act else None,
"prev_type_id": prev_type_act.garmin_activity_id if prev_type_act else None,
"next_type_id": next_type_act.garmin_activity_id if next_type_act else None
}
except Exception as e:
logger.error(f"Error getting navigation: {e}")
raise HTTPException(status_code=500, detail=str(e))