mirror of
https://github.com/sstent/FitTrack_ReportGenerator.git
synced 2026-01-29 02:21:45 +00:00
This commit introduces the initial version of the FitTrack Report Generator, a FastAPI application for analyzing workout files. Key features include: - Parsing of FIT, TCX, and GPX workout files. - Analysis of power, heart rate, speed, and elevation data. - Generation of summary reports and charts. - REST API for single and batch workout analysis. The project structure has been set up with a `src` directory for core logic, an `api` directory for the FastAPI application, and a `tests` directory for unit, integration, and contract tests. The development workflow is configured to use Docker and modern Python tooling.
479 lines
16 KiB
Python
479 lines
16 KiB
Python
from typing import Optional
|
|
|
|
from fastapi import APIRouter, HTTPException
|
|
from pydantic import BaseModel
|
|
|
|
from garminsync.database import Activity, DaemonConfig, SyncLog, get_session
|
|
|
|
router = APIRouter(prefix="/api")
|
|
|
|
|
|
class ScheduleConfig(BaseModel):
|
|
enabled: bool
|
|
cron_schedule: str
|
|
|
|
|
|
@router.get("/status")
|
|
async def get_status():
|
|
"""Get current daemon status"""
|
|
session = get_session()
|
|
try:
|
|
config = session.query(DaemonConfig).first()
|
|
|
|
# Get recent logs
|
|
logs = session.query(SyncLog).order_by(SyncLog.timestamp.desc()).limit(10).all()
|
|
|
|
# Convert to dictionaries to avoid session issues
|
|
daemon_data = {
|
|
"running": config.status == "running" if config else False,
|
|
"next_run": config.next_run if config else None,
|
|
"schedule": config.schedule_cron if config else None,
|
|
"last_run": config.last_run if config else None,
|
|
"enabled": config.enabled if config else False,
|
|
}
|
|
|
|
# Add sync status
|
|
from garminsync.daemon import daemon_instance
|
|
daemon_data["sync_in_progress"] = daemon_instance.is_sync_in_progress() if hasattr(daemon_instance, 'is_sync_in_progress') else False
|
|
|
|
log_data = []
|
|
for log in logs:
|
|
log_data.append(
|
|
{
|
|
"timestamp": log.timestamp,
|
|
"operation": log.operation,
|
|
"status": log.status,
|
|
"message": log.message,
|
|
"activities_processed": log.activities_processed,
|
|
"activities_downloaded": log.activities_downloaded,
|
|
}
|
|
)
|
|
|
|
return {"daemon": daemon_data, "recent_logs": log_data}
|
|
finally:
|
|
session.close()
|
|
|
|
|
|
@router.post("/schedule")
|
|
async def update_schedule(config: ScheduleConfig):
|
|
"""Update daemon schedule configuration"""
|
|
session = get_session()
|
|
try:
|
|
daemon_config = session.query(DaemonConfig).first()
|
|
|
|
if not daemon_config:
|
|
daemon_config = DaemonConfig()
|
|
session.add(daemon_config)
|
|
|
|
daemon_config.enabled = config.enabled
|
|
daemon_config.schedule_cron = config.cron_schedule
|
|
session.commit()
|
|
|
|
return {"message": "Configuration updated successfully"}
|
|
except Exception as e:
|
|
session.rollback()
|
|
raise HTTPException(
|
|
status_code=500, detail=f"Failed to update configuration: {str(e)}"
|
|
)
|
|
finally:
|
|
session.close()
|
|
|
|
|
|
@router.post("/sync/trigger")
|
|
async def trigger_sync():
|
|
"""Manually trigger a sync operation"""
|
|
try:
|
|
# Import here to avoid circular imports
|
|
import os
|
|
from datetime import datetime
|
|
from pathlib import Path
|
|
|
|
from garminsync.database import Activity, sync_database
|
|
from garminsync.garmin import GarminClient
|
|
|
|
# Create client and sync
|
|
client = GarminClient()
|
|
sync_database(client)
|
|
|
|
# Download missing activities
|
|
session = get_session()
|
|
try:
|
|
missing_activities = (
|
|
session.query(Activity).filter_by(downloaded=False).all()
|
|
)
|
|
downloaded_count = 0
|
|
|
|
data_dir = Path(os.getenv("DATA_DIR", "data"))
|
|
data_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
for activity in missing_activities:
|
|
try:
|
|
fit_data = client.download_activity_fit(activity.activity_id)
|
|
|
|
timestamp = activity.start_time.replace(":", "-").replace(" ", "_")
|
|
filename = f"activity_{activity.activity_id}_{timestamp}.fit"
|
|
filepath = data_dir / filename
|
|
|
|
with open(filepath, "wb") as f:
|
|
f.write(fit_data)
|
|
|
|
activity.filename = str(filepath)
|
|
activity.downloaded = True
|
|
activity.last_sync = datetime.now().isoformat()
|
|
downloaded_count += 1
|
|
session.commit()
|
|
|
|
except Exception as e:
|
|
print(f"Failed to download activity {activity.activity_id}: {e}")
|
|
session.rollback()
|
|
|
|
return {
|
|
"message": f"Sync completed successfully. Downloaded {downloaded_count} activities."
|
|
}
|
|
finally:
|
|
session.close()
|
|
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Sync failed: {str(e)}")
|
|
|
|
|
|
@router.get("/activities/stats")
|
|
async def get_activity_stats():
|
|
"""Get activity statistics"""
|
|
from garminsync.database import get_offline_stats
|
|
|
|
return get_offline_stats()
|
|
|
|
|
|
@router.get("/logs")
|
|
async def get_logs(
|
|
status: str = None,
|
|
operation: str = None,
|
|
date: str = None,
|
|
page: int = 1,
|
|
per_page: int = 20,
|
|
):
|
|
"""Get sync logs with filtering and pagination"""
|
|
session = get_session()
|
|
try:
|
|
query = session.query(SyncLog)
|
|
|
|
# Apply filters
|
|
if status:
|
|
query = query.filter(SyncLog.status == status)
|
|
if operation:
|
|
query = query.filter(SyncLog.operation == operation)
|
|
if date:
|
|
# Filter by date (assuming ISO format)
|
|
query = query.filter(SyncLog.timestamp.like(f"{date}%"))
|
|
|
|
# Get total count for pagination
|
|
total = query.count()
|
|
|
|
# Apply pagination
|
|
logs = (
|
|
query.order_by(SyncLog.timestamp.desc())
|
|
.offset((page - 1) * per_page)
|
|
.limit(per_page)
|
|
.all()
|
|
)
|
|
|
|
log_data = []
|
|
for log in logs:
|
|
log_data.append(
|
|
{
|
|
"id": log.id,
|
|
"timestamp": log.timestamp,
|
|
"operation": log.operation,
|
|
"status": log.status,
|
|
"message": log.message,
|
|
"activities_processed": log.activities_processed,
|
|
"activities_downloaded": log.activities_downloaded,
|
|
}
|
|
)
|
|
|
|
return {"logs": log_data, "total": total, "page": page, "per_page": per_page}
|
|
finally:
|
|
session.close()
|
|
|
|
|
|
@router.post("/daemon/start")
|
|
async def start_daemon():
|
|
"""Start the daemon process"""
|
|
from garminsync.daemon import daemon_instance
|
|
|
|
try:
|
|
# Start the daemon in a separate thread to avoid blocking
|
|
import threading
|
|
|
|
daemon_thread = threading.Thread(target=daemon_instance.start)
|
|
daemon_thread.daemon = True
|
|
daemon_thread.start()
|
|
|
|
# Update daemon status in database
|
|
session = get_session()
|
|
config = session.query(DaemonConfig).first()
|
|
if not config:
|
|
config = DaemonConfig()
|
|
session.add(config)
|
|
config.status = "running"
|
|
session.commit()
|
|
|
|
return {"message": "Daemon started successfully"}
|
|
except Exception as e:
|
|
session.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Failed to start daemon: {str(e)}")
|
|
finally:
|
|
session.close()
|
|
|
|
|
|
@router.post("/daemon/stop")
|
|
async def stop_daemon():
|
|
"""Stop the daemon process"""
|
|
from garminsync.daemon import daemon_instance
|
|
|
|
try:
|
|
# Stop the daemon
|
|
daemon_instance.stop()
|
|
|
|
# Update daemon status in database
|
|
session = get_session()
|
|
config = session.query(DaemonConfig).first()
|
|
if config:
|
|
config.status = "stopped"
|
|
session.commit()
|
|
|
|
return {"message": "Daemon stopped successfully"}
|
|
except Exception as e:
|
|
session.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Failed to stop daemon: {str(e)}")
|
|
finally:
|
|
session.close()
|
|
|
|
|
|
@router.delete("/logs")
|
|
async def clear_logs():
|
|
"""Clear all sync logs"""
|
|
session = get_session()
|
|
try:
|
|
session.query(SyncLog).delete()
|
|
session.commit()
|
|
return {"message": "Logs cleared successfully"}
|
|
except Exception as e:
|
|
session.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Failed to clear logs: {str(e)}")
|
|
finally:
|
|
session.close()
|
|
|
|
@router.post("/activities/{activity_id}/reprocess")
|
|
async def reprocess_activity(activity_id: int):
|
|
"""Reprocess a single activity to update metrics"""
|
|
from garminsync.database import Activity, get_session
|
|
from garminsync.activity_parser import get_activity_metrics
|
|
|
|
session = get_session()
|
|
try:
|
|
activity = session.query(Activity).get(activity_id)
|
|
if not activity:
|
|
raise HTTPException(status_code=404, detail="Activity not found")
|
|
|
|
metrics = get_activity_metrics(activity, force_reprocess=True)
|
|
if metrics:
|
|
# Update activity metrics
|
|
activity.activity_type = metrics.get("activityType", {}).get("typeKey")
|
|
activity.duration = int(float(metrics.get("duration", 0))) if metrics.get("duration") else activity.duration
|
|
activity.distance = float(metrics.get("distance", 0)) if metrics.get("distance") else activity.distance
|
|
activity.max_heart_rate = int(float(metrics.get("maxHR", 0))) if metrics.get("maxHR") else activity.max_heart_rate
|
|
activity.avg_heart_rate = int(float(metrics.get("avgHR", 0))) if metrics.get("avgHR") else activity.avg_heart_rate
|
|
activity.avg_power = float(metrics.get("avgPower", 0)) if metrics.get("avgPower") else activity.avg_power
|
|
activity.calories = int(float(metrics.get("calories", 0))) if metrics.get("calories") else activity.calories
|
|
|
|
# Mark as reprocessed
|
|
activity.reprocessed = True
|
|
session.commit()
|
|
return {"message": f"Activity {activity_id} reprocessed successfully"}
|
|
except Exception as e:
|
|
session.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Reprocessing failed: {str(e)}")
|
|
finally:
|
|
session.close()
|
|
|
|
@router.post("/reprocess")
|
|
async def reprocess_activities(all: bool = False):
|
|
"""Reprocess all activities or just missing ones"""
|
|
from garminsync.daemon import daemon_instance
|
|
|
|
try:
|
|
# Trigger reprocess job in daemon
|
|
daemon_instance.reprocess_activities()
|
|
return {"message": "Reprocess job started in background"}
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"Failed to start reprocess job: {str(e)}")
|
|
|
|
|
|
@router.get("/activities")
|
|
async def get_activities(
|
|
page: int = 1,
|
|
per_page: int = 50,
|
|
activity_type: str = None,
|
|
date_from: str = None,
|
|
date_to: str = None,
|
|
):
|
|
"""Get paginated activities with filtering"""
|
|
session = get_session()
|
|
try:
|
|
query = session.query(Activity)
|
|
|
|
# Apply filters
|
|
if activity_type:
|
|
query = query.filter(Activity.activity_type == activity_type)
|
|
if date_from:
|
|
query = query.filter(Activity.start_time >= date_from)
|
|
if date_to:
|
|
query = query.filter(Activity.start_time <= date_to)
|
|
|
|
# Get total count for pagination
|
|
total = query.count()
|
|
|
|
# Apply pagination
|
|
activities = (
|
|
query.order_by(Activity.start_time.desc())
|
|
.offset((page - 1) * per_page)
|
|
.limit(per_page)
|
|
.all()
|
|
)
|
|
|
|
activity_data = []
|
|
for activity in activities:
|
|
activity_data.append(
|
|
{
|
|
"activity_id": activity.activity_id,
|
|
"start_time": activity.start_time,
|
|
"activity_type": activity.activity_type,
|
|
"duration": activity.duration,
|
|
"distance": activity.distance,
|
|
"max_heart_rate": activity.max_heart_rate,
|
|
"avg_heart_rate": activity.avg_heart_rate,
|
|
"avg_power": activity.avg_power,
|
|
"calories": activity.calories,
|
|
"filename": activity.filename,
|
|
"downloaded": activity.downloaded,
|
|
"created_at": activity.created_at,
|
|
"last_sync": activity.last_sync,
|
|
}
|
|
)
|
|
|
|
return {
|
|
"activities": activity_data,
|
|
"total": total,
|
|
"page": page,
|
|
"per_page": per_page,
|
|
}
|
|
finally:
|
|
session.close()
|
|
|
|
|
|
@router.get("/activities/{activity_id}")
|
|
async def get_activity_details(activity_id: int):
|
|
"""Get detailed activity information"""
|
|
session = get_session()
|
|
try:
|
|
activity = (
|
|
session.query(Activity).filter(Activity.activity_id == activity_id).first()
|
|
)
|
|
if not activity:
|
|
raise HTTPException(
|
|
status_code=404, detail=f"Activity with ID {activity_id} not found"
|
|
)
|
|
|
|
return {
|
|
"id": activity.activity_id,
|
|
"name": activity.filename or "Unnamed Activity",
|
|
"distance": activity.distance,
|
|
"duration": activity.duration,
|
|
"start_time": activity.start_time,
|
|
"activity_type": activity.activity_type,
|
|
"max_heart_rate": activity.max_heart_rate,
|
|
"avg_power": activity.avg_power,
|
|
"calories": activity.calories,
|
|
"filename": activity.filename,
|
|
"downloaded": activity.downloaded,
|
|
"created_at": activity.created_at,
|
|
"last_sync": activity.last_sync,
|
|
}
|
|
finally:
|
|
session.close()
|
|
|
|
|
|
@router.get("/dashboard/stats")
|
|
async def get_dashboard_stats():
|
|
"""Get comprehensive dashboard statistics"""
|
|
from garminsync.database import get_offline_stats
|
|
|
|
return get_offline_stats()
|
|
|
|
|
|
@router.get("/api/activities")
|
|
async def get_api_activities(page: int = 1, per_page: int = 10):
|
|
"""Get paginated activities for API"""
|
|
session = get_session()
|
|
try:
|
|
# Use the existing get_paginated method from Activity class
|
|
pagination = Activity.get_paginated(page, per_page)
|
|
activities = pagination.items
|
|
total_pages = pagination.pages
|
|
current_page = pagination.page
|
|
total_items = pagination.total
|
|
|
|
if not activities and page > 1:
|
|
raise HTTPException(
|
|
status_code=404, detail=f"No activities found for page {page}"
|
|
)
|
|
|
|
if not activities and page == 1 and total_items == 0:
|
|
raise HTTPException(status_code=404, detail="No activities found")
|
|
|
|
if not activities:
|
|
raise HTTPException(status_code=404, detail="No activities found")
|
|
|
|
return {
|
|
"activities": [
|
|
{
|
|
"id": activity.activity_id,
|
|
"name": activity.filename or "Unnamed Activity",
|
|
"distance": activity.distance,
|
|
"duration": activity.duration,
|
|
"start_time": activity.start_time,
|
|
"activity_type": activity.activity_type,
|
|
"max_heart_rate": activity.max_heart_rate,
|
|
"avg_power": activity.avg_power,
|
|
"calories": activity.calories,
|
|
"downloaded": activity.downloaded,
|
|
"created_at": activity.created_at,
|
|
"last_sync": activity.last_sync,
|
|
"device": activity.device or "Unknown",
|
|
"intensity": activity.intensity or "Unknown",
|
|
"average_speed": activity.average_speed,
|
|
"elevation_gain": activity.elevation_gain,
|
|
"heart_rate_zones": activity.heart_rate_zones or [],
|
|
"power_zones": activity.power_zones or [],
|
|
"training_effect": activity.training_effect or 0,
|
|
"training_effect_label": activity.training_effect_label
|
|
or "Unknown",
|
|
}
|
|
for activity in activities
|
|
],
|
|
"total_pages": total_pages,
|
|
"current_page": current_page,
|
|
"total_items": total_items,
|
|
"page_size": per_page,
|
|
"status": "success",
|
|
}
|
|
except Exception as e:
|
|
raise HTTPException(
|
|
status_code=500,
|
|
detail=f"An error occurred while fetching activities: {str(e)}",
|
|
)
|
|
finally:
|
|
session.close()
|