mirror of
https://github.com/sstent/FitTrack_ReportGenerator.git
synced 2026-01-29 18:41:59 +00:00
feat: Initial implementation of FitTrack Report Generator
This commit introduces the initial version of the FitTrack Report Generator, a FastAPI application for analyzing workout files. Key features include: - Parsing of FIT, TCX, and GPX workout files. - Analysis of power, heart rate, speed, and elevation data. - Generation of summary reports and charts. - REST API for single and batch workout analysis. The project structure has been set up with a `src` directory for core logic, an `api` directory for the FastAPI application, and a `tests` directory for unit, integration, and contract tests. The development workflow is configured to use Docker and modern Python tooling.
This commit is contained in:
0
examples/GarminSync/garminsync/__init__.py
Normal file
0
examples/GarminSync/garminsync/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
BIN
examples/GarminSync/garminsync/__pycache__/cli.cpython-310.pyc
Normal file
BIN
examples/GarminSync/garminsync/__pycache__/cli.cpython-310.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
251
examples/GarminSync/garminsync/activity_parser.py
Normal file
251
examples/GarminSync/garminsync/activity_parser.py
Normal file
@@ -0,0 +1,251 @@
|
||||
import os
|
||||
import gzip
|
||||
import fitdecode
|
||||
import xml.etree.ElementTree as ET
|
||||
import numpy as np
|
||||
from .fit_processor.power_estimator import PowerEstimator
|
||||
from .fit_processor.gear_analyzer import SinglespeedAnalyzer
|
||||
from math import radians, sin, cos, sqrt, atan2
|
||||
|
||||
def detect_file_type(file_path):
|
||||
"""Detect file format (FIT, XML, or unknown)"""
|
||||
try:
|
||||
with open(file_path, 'rb') as f:
|
||||
header = f.read(128)
|
||||
if b'<?xml' in header[:20]:
|
||||
return 'xml'
|
||||
if len(header) >= 8 and header[4:8] == b'.FIT':
|
||||
return 'fit'
|
||||
if (len(header) >= 8 and
|
||||
(header[0:4] == b'.FIT' or
|
||||
header[4:8] == b'FIT.' or
|
||||
header[8:12] == b'.FIT')):
|
||||
return 'fit'
|
||||
return 'unknown'
|
||||
except Exception as e:
|
||||
return 'error'
|
||||
|
||||
def parse_xml_file(file_path):
|
||||
"""Parse XML (TCX) file to extract activity metrics"""
|
||||
try:
|
||||
tree = ET.parse(file_path)
|
||||
root = tree.getroot()
|
||||
namespaces = {'ns': 'http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2'}
|
||||
|
||||
sport = root.find('.//ns:Activity', namespaces).get('Sport', 'other')
|
||||
distance = root.find('.//ns:DistanceMeters', namespaces)
|
||||
distance = float(distance.text) if distance is not None else None
|
||||
duration = root.find('.//ns:TotalTimeSeconds', namespaces)
|
||||
duration = float(duration.text) if duration is not None else None
|
||||
calories = root.find('.//ns:Calories', namespaces)
|
||||
calories = int(calories.text) if calories is not None else None
|
||||
|
||||
hr_values = []
|
||||
for hr in root.findall('.//ns:HeartRateBpm/ns:Value', namespaces):
|
||||
try:
|
||||
hr_values.append(int(hr.text))
|
||||
except:
|
||||
continue
|
||||
max_hr = max(hr_values) if hr_values else None
|
||||
|
||||
return {
|
||||
"activityType": {"typeKey": sport},
|
||||
"summaryDTO": {
|
||||
"duration": duration,
|
||||
"distance": distance,
|
||||
"maxHR": max_hr,
|
||||
"avgPower": None,
|
||||
"calories": calories
|
||||
}
|
||||
}
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def compute_gradient(altitudes, positions, distance_m=10):
|
||||
"""Compute gradient percentage for each point using elevation changes"""
|
||||
if len(altitudes) < 2:
|
||||
return [0] * len(altitudes)
|
||||
|
||||
gradients = []
|
||||
for i in range(1, len(altitudes)):
|
||||
elev_change = altitudes[i] - altitudes[i-1]
|
||||
if positions and i < len(positions):
|
||||
distance = distance_between_points(positions[i-1], positions[i])
|
||||
else:
|
||||
distance = distance_m
|
||||
gradients.append((elev_change / distance) * 100)
|
||||
|
||||
return [gradients[0]] + gradients
|
||||
|
||||
def distance_between_points(point1, point2):
|
||||
"""Calculate distance between two (lat, lon) points in meters using Haversine"""
|
||||
R = 6371000 # Earth radius in meters
|
||||
|
||||
lat1, lon1 = radians(point1[0]), radians(point1[1])
|
||||
lat2, lon2 = radians(point2[0]), radians(point2[1])
|
||||
|
||||
dlat = lat2 - lat1
|
||||
dlon = lon2 - lon1
|
||||
|
||||
a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
|
||||
c = 2 * atan2(sqrt(a), sqrt(1-a))
|
||||
|
||||
return R * c
|
||||
|
||||
def parse_fit_file(file_path):
|
||||
"""Parse FIT file to extract activity metrics and detailed cycling data"""
|
||||
metrics = {}
|
||||
detailed_metrics = {
|
||||
'speeds': [], 'cadences': [], 'altitudes': [],
|
||||
'positions': [], 'gradients': [], 'powers': [], 'timestamps': []
|
||||
}
|
||||
|
||||
power_estimator = PowerEstimator()
|
||||
gear_analyzer = SinglespeedAnalyzer()
|
||||
|
||||
try:
|
||||
with open(file_path, 'rb') as f:
|
||||
magic = f.read(2)
|
||||
f.seek(0)
|
||||
is_gzipped = magic == b'\x1f\x8b'
|
||||
|
||||
if is_gzipped:
|
||||
with gzip.open(file_path, 'rb') as gz_file:
|
||||
from io import BytesIO
|
||||
with BytesIO(gz_file.read()) as fit_data:
|
||||
fit = fitdecode.FitReader(fit_data)
|
||||
for frame in fit:
|
||||
if frame.frame_type == fitdecode.FrameType.DATA:
|
||||
if frame.name == 'record':
|
||||
if timestamp := frame.get_value('timestamp'):
|
||||
detailed_metrics['timestamps'].append(timestamp)
|
||||
if (lat := frame.get_value('position_lat')) and (lon := frame.get_value('position_long')):
|
||||
detailed_metrics['positions'].append((lat, lon))
|
||||
if altitude := frame.get_value('altitude'):
|
||||
detailed_metrics['altitudes'].append(altitude)
|
||||
if speed := frame.get_value('speed'):
|
||||
detailed_metrics['speeds'].append(speed)
|
||||
if cadence := frame.get_value('cadence'):
|
||||
detailed_metrics['cadences'].append(cadence)
|
||||
if power := frame.get_value('power'):
|
||||
detailed_metrics['powers'].append(power)
|
||||
|
||||
elif frame.name == 'session':
|
||||
metrics = {
|
||||
"sport": frame.get_value("sport"),
|
||||
"total_timer_time": frame.get_value("total_timer_time"),
|
||||
"total_distance": frame.get_value("total_distance"),
|
||||
"max_heart_rate": frame.get_value("max_heart_rate"),
|
||||
"avg_power": frame.get_value("avg_power"),
|
||||
"total_calories": frame.get_value("total_calories")
|
||||
}
|
||||
else:
|
||||
with fitdecode.FitReader(file_path) as fit:
|
||||
for frame in fit:
|
||||
if frame.frame_type == fitdecode.FrameType.DATA:
|
||||
if frame.name == 'record':
|
||||
if timestamp := frame.get_value('timestamp'):
|
||||
detailed_metrics['timestamps'].append(timestamp)
|
||||
if (lat := frame.get_value('position_lat')) and (lon := frame.get_value('position_long')):
|
||||
detailed_metrics['positions'].append((lat, lon))
|
||||
if altitude := frame.get_value('altitude'):
|
||||
detailed_metrics['altitudes'].append(altitude)
|
||||
if speed := frame.get_value('speed'):
|
||||
detailed_metrics['speeds'].append(speed)
|
||||
if cadence := frame.get_value('cadence'):
|
||||
detailed_metrics['cadences'].append(cadence)
|
||||
if power := frame.get_value('power'):
|
||||
detailed_metrics['powers'].append(power)
|
||||
|
||||
elif frame.name == 'session':
|
||||
metrics = {
|
||||
"sport": frame.get_value("sport"),
|
||||
"total_timer_time": frame.get_value("total_timer_time"),
|
||||
"total_distance": frame.get_value("total_distance"),
|
||||
"max_heart_rate": frame.get_value("max_heart_rate"),
|
||||
"avg_power": frame.get_value("avg_power"),
|
||||
"total_calories": frame.get_value("total_calories")
|
||||
}
|
||||
|
||||
# Compute gradients if data available
|
||||
if detailed_metrics['altitudes']:
|
||||
detailed_metrics['gradients'] = compute_gradient(
|
||||
detailed_metrics['altitudes'],
|
||||
detailed_metrics['positions']
|
||||
)
|
||||
|
||||
# Process cycling-specific metrics
|
||||
if metrics.get('sport') in ['cycling', 'road_biking', 'mountain_biking']:
|
||||
# Estimate power if not present
|
||||
if not detailed_metrics['powers']:
|
||||
for speed, gradient in zip(detailed_metrics['speeds'], detailed_metrics['gradients']):
|
||||
estimated_power = power_estimator.calculate_power(speed, gradient)
|
||||
detailed_metrics['powers'].append(estimated_power)
|
||||
metrics['avg_power'] = np.mean(detailed_metrics['powers']) if detailed_metrics['powers'] else None
|
||||
|
||||
# Run gear analysis
|
||||
if detailed_metrics['speeds'] and detailed_metrics['cadences']:
|
||||
gear_analysis = gear_analyzer.analyze_gear_ratio(
|
||||
detailed_metrics['speeds'],
|
||||
detailed_metrics['cadences'],
|
||||
detailed_metrics['gradients']
|
||||
)
|
||||
metrics['gear_analysis'] = gear_analysis or {}
|
||||
|
||||
return {
|
||||
"activityType": {"typeKey": metrics.get("sport", "other")},
|
||||
"summaryDTO": {
|
||||
"duration": metrics.get("total_timer_time"),
|
||||
"distance": metrics.get("total_distance"),
|
||||
"maxHR": metrics.get("max_heart_rate"),
|
||||
"avgPower": metrics.get("avg_power"),
|
||||
"calories": metrics.get("total_calories"),
|
||||
"gearAnalysis": metrics.get("gear_analysis", {})
|
||||
},
|
||||
"detailedMetrics": detailed_metrics
|
||||
}
|
||||
except Exception as e:
|
||||
print(f"Error parsing FIT file: {str(e)}")
|
||||
return None
|
||||
|
||||
def get_activity_metrics(activity, client=None, force_reprocess=False):
|
||||
"""
|
||||
Get activity metrics from local file or Garmin API
|
||||
|
||||
:param activity: Activity object
|
||||
:param client: Optional GarminClient instance
|
||||
:param force_reprocess: If True, re-process file even if already parsed
|
||||
:return: Activity metrics dictionary
|
||||
"""
|
||||
metrics = None
|
||||
# Always re-process if force_reprocess is True
|
||||
if force_reprocess and activity.filename and os.path.exists(activity.filename):
|
||||
file_type = detect_file_type(activity.filename)
|
||||
try:
|
||||
if file_type == 'fit':
|
||||
metrics = parse_fit_file(activity.filename)
|
||||
elif file_type == 'xml':
|
||||
metrics = parse_xml_file(activity.filename)
|
||||
except Exception as e:
|
||||
print(f"Error parsing activity file: {str(e)}")
|
||||
|
||||
# Only parse if metrics not already obtained through force_reprocess
|
||||
if not metrics:
|
||||
if activity.filename and os.path.exists(activity.filename):
|
||||
file_type = detect_file_type(activity.filename)
|
||||
try:
|
||||
if file_type == 'fit':
|
||||
metrics = parse_fit_file(activity.filename)
|
||||
elif file_type == 'xml':
|
||||
metrics = parse_xml_file(activity.filename)
|
||||
except Exception as e:
|
||||
print(f"Error parsing activity file: {str(e)}")
|
||||
|
||||
if not metrics and client:
|
||||
try:
|
||||
metrics = client.get_activity_details(activity.activity_id)
|
||||
except Exception as e:
|
||||
print(f"Error fetching activity from API: {str(e)}")
|
||||
|
||||
# Return summary DTO for compatibility
|
||||
return metrics.get("summaryDTO") if metrics and "summaryDTO" in metrics else metrics
|
||||
369
examples/GarminSync/garminsync/cli.py
Normal file
369
examples/GarminSync/garminsync/cli.py
Normal file
@@ -0,0 +1,369 @@
|
||||
import os
|
||||
|
||||
import typer
|
||||
from typing_extensions import Annotated
|
||||
|
||||
from .config import load_config
|
||||
|
||||
# Initialize environment variables
|
||||
load_config()
|
||||
|
||||
app = typer.Typer(
|
||||
help="GarminSync - Download Garmin Connect activities", rich_markup_mode=None
|
||||
)
|
||||
|
||||
|
||||
@app.command("list")
|
||||
def list_activities(
|
||||
all_activities: Annotated[
|
||||
bool, typer.Option("--all", help="List all activities")
|
||||
] = False,
|
||||
missing: Annotated[
|
||||
bool, typer.Option("--missing", help="List missing activities")
|
||||
] = False,
|
||||
downloaded: Annotated[
|
||||
bool, typer.Option("--downloaded", help="List downloaded activities")
|
||||
] = False,
|
||||
offline: Annotated[
|
||||
bool, typer.Option("--offline", help="Work offline without syncing")
|
||||
] = False,
|
||||
):
|
||||
"""List activities based on specified filters"""
|
||||
from tqdm import tqdm
|
||||
|
||||
from .database import (Activity, get_offline_stats, get_session,
|
||||
sync_database)
|
||||
from .garmin import GarminClient
|
||||
|
||||
# Validate input
|
||||
if not any([all_activities, missing, downloaded]):
|
||||
typer.echo(
|
||||
"Error: Please specify at least one filter option (--all, --missing, --downloaded)"
|
||||
)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
try:
|
||||
client = GarminClient()
|
||||
session = get_session()
|
||||
|
||||
if not offline:
|
||||
# Sync database with latest activities
|
||||
typer.echo("Syncing activities from Garmin Connect...")
|
||||
sync_database(client)
|
||||
else:
|
||||
# Show offline status with last sync info
|
||||
stats = get_offline_stats()
|
||||
typer.echo(
|
||||
f"Working in offline mode - using cached data (last sync: {stats['last_sync']})"
|
||||
)
|
||||
|
||||
# Build query based on filters
|
||||
query = session.query(Activity)
|
||||
|
||||
if all_activities:
|
||||
pass # Return all activities
|
||||
elif missing:
|
||||
query = query.filter_by(downloaded=False)
|
||||
elif downloaded:
|
||||
query = query.filter_by(downloaded=True)
|
||||
|
||||
# Execute query and display results
|
||||
activities = query.all()
|
||||
if not activities:
|
||||
typer.echo("No activities found matching your criteria")
|
||||
return
|
||||
|
||||
# Display results with progress bar
|
||||
typer.echo(f"Found {len(activities)} activities:")
|
||||
for activity in tqdm(activities, desc="Listing activities"):
|
||||
status = "Downloaded" if activity.downloaded else "Missing"
|
||||
typer.echo(
|
||||
f"- ID: {activity.activity_id}, Start: {activity.start_time}, Status: {status}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
typer.echo(f"Error: {str(e)}")
|
||||
raise typer.Exit(code=1)
|
||||
finally:
|
||||
if "session" in locals():
|
||||
session.close()
|
||||
|
||||
|
||||
@app.command("download")
|
||||
def download(
|
||||
missing: Annotated[
|
||||
bool, typer.Option("--missing", help="Download missing activities")
|
||||
] = False,
|
||||
):
|
||||
"""Download activities based on specified filters"""
|
||||
from pathlib import Path
|
||||
|
||||
from tqdm import tqdm
|
||||
|
||||
from .database import Activity, get_session
|
||||
from .garmin import GarminClient
|
||||
|
||||
# Validate input
|
||||
if not missing:
|
||||
typer.echo("Error: Currently only --missing downloads are supported")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
try:
|
||||
client = GarminClient()
|
||||
session = get_session()
|
||||
|
||||
# Sync database with latest activities
|
||||
typer.echo("Syncing activities from Garmin Connect...")
|
||||
from .database import sync_database
|
||||
|
||||
sync_database(client)
|
||||
|
||||
# Get missing activities
|
||||
activities = session.query(Activity).filter_by(downloaded=False).all()
|
||||
if not activities:
|
||||
typer.echo("No missing activities found")
|
||||
return
|
||||
|
||||
# Create data directory if it doesn't exist
|
||||
data_dir = Path(os.getenv("DATA_DIR", "data"))
|
||||
data_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Download activities with progress bar
|
||||
typer.echo(f"Downloading {len(activities)} missing activities...")
|
||||
for activity in tqdm(activities, desc="Downloading"):
|
||||
try:
|
||||
# Download FIT data
|
||||
fit_data = client.download_activity_fit(activity.activity_id)
|
||||
|
||||
# Create filename-safe timestamp
|
||||
timestamp = activity.start_time.replace(":", "-").replace(" ", "_")
|
||||
filename = f"activity_{activity.activity_id}_{timestamp}.fit"
|
||||
filepath = data_dir / filename
|
||||
|
||||
# Save file
|
||||
with open(filepath, "wb") as f:
|
||||
f.write(fit_data)
|
||||
|
||||
# Update database
|
||||
activity.filename = str(filepath)
|
||||
activity.downloaded = True
|
||||
session.commit()
|
||||
|
||||
except Exception as e:
|
||||
typer.echo(
|
||||
f"Error downloading activity {activity.activity_id}: {str(e)}"
|
||||
)
|
||||
session.rollback()
|
||||
|
||||
typer.echo("Download completed successfully")
|
||||
|
||||
except Exception as e:
|
||||
typer.echo(f"Error: {str(e)}")
|
||||
raise typer.Exit(code=1)
|
||||
finally:
|
||||
if "session" in locals():
|
||||
session.close()
|
||||
|
||||
|
||||
@app.command("daemon")
|
||||
def daemon_mode(
|
||||
start: Annotated[bool, typer.Option("--start", help="Start daemon")] = False,
|
||||
stop: Annotated[bool, typer.Option("--stop", help="Stop daemon")] = False,
|
||||
status: Annotated[
|
||||
bool, typer.Option("--status", help="Show daemon status")
|
||||
] = False,
|
||||
port: Annotated[int, typer.Option("--port", help="Web UI port")] = 8080,
|
||||
run_migrations: Annotated[
|
||||
bool,
|
||||
typer.Option(
|
||||
"--run-migrations/--skip-migrations",
|
||||
help="Run database migrations on startup (default: run)"
|
||||
)
|
||||
] = True,
|
||||
):
|
||||
"""Daemon mode operations"""
|
||||
from .daemon import GarminSyncDaemon
|
||||
|
||||
if start:
|
||||
daemon = GarminSyncDaemon()
|
||||
daemon.start(web_port=port, run_migrations=run_migrations)
|
||||
elif stop:
|
||||
# Implementation for stopping daemon (PID file or signal)
|
||||
typer.echo("Stopping daemon...")
|
||||
# TODO: Implement stop (we can use a PID file to stop the daemon)
|
||||
typer.echo("Daemon stop not implemented yet")
|
||||
elif status:
|
||||
# Show current daemon status
|
||||
typer.echo("Daemon status not implemented yet")
|
||||
else:
|
||||
typer.echo("Please specify one of: --start, --stop, --status")
|
||||
|
||||
|
||||
@app.command("migrate")
|
||||
def migrate_activities():
|
||||
"""Migrate database to add new activity fields"""
|
||||
from .migrate_activities import migrate_activities as run_migration
|
||||
|
||||
typer.echo("Starting database migration...")
|
||||
success = run_migration()
|
||||
if success:
|
||||
typer.echo("Database migration completed successfully!")
|
||||
else:
|
||||
typer.echo("Database migration failed!")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
@app.command("analyze")
|
||||
def analyze_activities(
|
||||
activity_id: Annotated[int, typer.Option("--activity-id", help="Activity ID to analyze")] = None,
|
||||
missing: Annotated[bool, typer.Option("--missing", help="Analyze all cycling activities missing analysis")] = False,
|
||||
cycling: Annotated[bool, typer.Option("--cycling", help="Run cycling-specific analysis")] = False,
|
||||
):
|
||||
"""Analyze activity data for cycling metrics"""
|
||||
from tqdm import tqdm
|
||||
from .database import Activity, get_session
|
||||
from .activity_parser import get_activity_metrics
|
||||
|
||||
if not cycling:
|
||||
typer.echo("Error: Currently only cycling analysis is supported")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
session = get_session()
|
||||
activities = []
|
||||
|
||||
if activity_id:
|
||||
activity = session.query(Activity).get(activity_id)
|
||||
if not activity:
|
||||
typer.echo(f"Error: Activity with ID {activity_id} not found")
|
||||
raise typer.Exit(code=1)
|
||||
activities = [activity]
|
||||
elif missing:
|
||||
activities = session.query(Activity).filter(
|
||||
Activity.activity_type == 'cycling',
|
||||
Activity.analyzed == False # Only unanalyzed activities
|
||||
).all()
|
||||
if not activities:
|
||||
typer.echo("No unanalyzed cycling activities found")
|
||||
return
|
||||
else:
|
||||
typer.echo("Error: Please specify --activity-id or --missing")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
typer.echo(f"Analyzing {len(activities)} cycling activities...")
|
||||
for activity in tqdm(activities, desc="Processing"):
|
||||
metrics = get_activity_metrics(activity)
|
||||
if metrics and "gearAnalysis" in metrics:
|
||||
# Update activity with analysis results
|
||||
activity.analyzed = True
|
||||
activity.gear_ratio = metrics["gearAnalysis"].get("gear_ratio")
|
||||
activity.gear_inches = metrics["gearAnalysis"].get("gear_inches")
|
||||
# Add other metrics as needed
|
||||
session.commit()
|
||||
|
||||
typer.echo("Analysis completed successfully")
|
||||
|
||||
@app.command("reprocess")
|
||||
def reprocess_activities(
|
||||
all: Annotated[bool, typer.Option("--all", help="Reprocess all activities")] = False,
|
||||
missing: Annotated[bool, typer.Option("--missing", help="Reprocess activities missing metrics")] = False,
|
||||
activity_id: Annotated[int, typer.Option("--activity-id", help="Reprocess specific activity by ID")] = None,
|
||||
):
|
||||
"""Reprocess activities to calculate missing metrics"""
|
||||
from tqdm import tqdm
|
||||
from .database import Activity, get_session
|
||||
from .activity_parser import get_activity_metrics
|
||||
|
||||
session = get_session()
|
||||
activities = []
|
||||
|
||||
if activity_id:
|
||||
activity = session.query(Activity).get(activity_id)
|
||||
if not activity:
|
||||
typer.echo(f"Error: Activity with ID {activity_id} not found")
|
||||
raise typer.Exit(code=1)
|
||||
activities = [activity]
|
||||
elif missing:
|
||||
activities = session.query(Activity).filter(
|
||||
Activity.reprocessed == False
|
||||
).all()
|
||||
if not activities:
|
||||
typer.echo("No activities to reprocess")
|
||||
return
|
||||
elif all:
|
||||
activities = session.query(Activity).filter(
|
||||
Activity.downloaded == True
|
||||
).all()
|
||||
if not activities:
|
||||
typer.echo("No downloaded activities found")
|
||||
return
|
||||
else:
|
||||
typer.echo("Error: Please specify one of: --all, --missing, --activity-id")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
typer.echo(f"Reprocessing {len(activities)} activities...")
|
||||
for activity in tqdm(activities, desc="Reprocessing"):
|
||||
# Use force_reprocess=True to ensure we parse the file again
|
||||
metrics = get_activity_metrics(activity, force_reprocess=True)
|
||||
|
||||
# Update activity metrics
|
||||
if metrics:
|
||||
activity.activity_type = metrics.get("activityType", {}).get("typeKey")
|
||||
activity.duration = int(float(metrics.get("duration", 0))) if metrics.get("duration") else activity.duration
|
||||
activity.distance = float(metrics.get("distance", 0)) if metrics.get("distance") else activity.distance
|
||||
activity.max_heart_rate = int(float(metrics.get("maxHR", 0))) if metrics.get("maxHR") else activity.max_heart_rate
|
||||
activity.avg_heart_rate = int(float(metrics.get("avgHR", 0))) if metrics.get("avgHR") else activity.avg_heart_rate
|
||||
activity.avg_power = float(metrics.get("avgPower", 0)) if metrics.get("avgPower") else activity.avg_power
|
||||
activity.calories = int(float(metrics.get("calories", 0))) if metrics.get("calories") else activity.calories
|
||||
|
||||
# Mark as reprocessed
|
||||
activity.reprocessed = True
|
||||
session.commit()
|
||||
|
||||
typer.echo("Reprocessing completed")
|
||||
|
||||
@app.command("report")
|
||||
def generate_report(
|
||||
power_analysis: Annotated[bool, typer.Option("--power-analysis", help="Generate power metrics report")] = False,
|
||||
gear_analysis: Annotated[bool, typer.Option("--gear-analysis", help="Generate gear analysis report")] = False,
|
||||
):
|
||||
"""Generate performance reports for cycling activities"""
|
||||
from .database import Activity, get_session
|
||||
from .web import app as web_app
|
||||
|
||||
if not any([power_analysis, gear_analysis]):
|
||||
typer.echo("Error: Please specify at least one report type")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
session = get_session()
|
||||
activities = session.query(Activity).filter(
|
||||
Activity.activity_type == 'cycling',
|
||||
Activity.analyzed == True
|
||||
).all()
|
||||
|
||||
if not activities:
|
||||
typer.echo("No analyzed cycling activities found")
|
||||
return
|
||||
|
||||
# Simple CLI report - real implementation would use web UI
|
||||
typer.echo("Cycling Analysis Report")
|
||||
typer.echo("=======================")
|
||||
|
||||
for activity in activities:
|
||||
typer.echo(f"\nActivity ID: {activity.activity_id}")
|
||||
typer.echo(f"Date: {activity.start_time}")
|
||||
|
||||
if power_analysis:
|
||||
typer.echo(f"- Average Power: {activity.avg_power}W")
|
||||
# Add other power metrics as needed
|
||||
|
||||
if gear_analysis:
|
||||
typer.echo(f"- Gear Ratio: {activity.gear_ratio}")
|
||||
typer.echo(f"- Gear Inches: {activity.gear_inches}")
|
||||
|
||||
typer.echo("\nFull reports available in the web UI at http://localhost:8080")
|
||||
|
||||
def main():
|
||||
app()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
18
examples/GarminSync/garminsync/config.py
Normal file
18
examples/GarminSync/garminsync/config.py
Normal file
@@ -0,0 +1,18 @@
|
||||
import os
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
|
||||
def load_config():
|
||||
"""Load environment variables from .env file"""
|
||||
load_dotenv()
|
||||
|
||||
|
||||
class Config:
|
||||
GARMIN_EMAIL = os.getenv("GARMIN_EMAIL")
|
||||
GARMIN_PASSWORD = os.getenv("GARMIN_PASSWORD")
|
||||
|
||||
@classmethod
|
||||
def validate(cls):
|
||||
if not cls.GARMIN_EMAIL or not cls.GARMIN_PASSWORD:
|
||||
raise ValueError("Missing GARMIN_EMAIL or GARMIN_PASSWORD in environment")
|
||||
450
examples/GarminSync/garminsync/daemon.py
Normal file
450
examples/GarminSync/garminsync/daemon.py
Normal file
@@ -0,0 +1,450 @@
|
||||
import os
|
||||
import signal
|
||||
import asyncio
|
||||
import concurrent.futures
|
||||
import time
|
||||
from datetime import datetime
|
||||
from queue import PriorityQueue
|
||||
import threading
|
||||
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from .database import Activity, DaemonConfig, SyncLog, get_legacy_session, init_db, get_offline_stats
|
||||
from .garmin import GarminClient
|
||||
from .utils import logger
|
||||
from .activity_parser import get_activity_metrics
|
||||
|
||||
# Priority levels: 1=High (API requests), 2=Medium (Sync jobs), 3=Low (Reprocessing)
|
||||
PRIORITY_HIGH = 1
|
||||
PRIORITY_MEDIUM = 2
|
||||
PRIORITY_LOW = 3
|
||||
|
||||
class GarminSyncDaemon:
|
||||
def __init__(self):
|
||||
self.scheduler = BackgroundScheduler()
|
||||
self.running = False
|
||||
self.web_server = None
|
||||
# Process pool for CPU-bound tasks
|
||||
self.executor = concurrent.futures.ProcessPoolExecutor(
|
||||
max_workers=os.cpu_count() - 1 or 1
|
||||
)
|
||||
# Priority queue for task scheduling
|
||||
self.task_queue = PriorityQueue()
|
||||
# Worker thread for processing tasks
|
||||
self.worker_thread = threading.Thread(target=self._process_tasks, daemon=True)
|
||||
# Lock for database access during migration
|
||||
self.db_lock = threading.Lock()
|
||||
# Thread lock to prevent concurrent sync operations
|
||||
self.sync_lock = threading.Lock()
|
||||
self.sync_in_progress = False
|
||||
|
||||
def start(self, web_port=8888, run_migrations=True):
|
||||
"""Start daemon with scheduler and web UI"""
|
||||
try:
|
||||
# Initialize database (synchronous)
|
||||
with self.db_lock:
|
||||
init_db()
|
||||
|
||||
# Set migration flag for entrypoint
|
||||
if run_migrations:
|
||||
os.environ['RUN_MIGRATIONS'] = "1"
|
||||
else:
|
||||
os.environ['RUN_MIGRATIONS'] = "0"
|
||||
|
||||
# Start task processing worker
|
||||
self.worker_thread.start()
|
||||
|
||||
# Load configuration from database
|
||||
config_data = self.load_config()
|
||||
|
||||
# Setup scheduled jobs
|
||||
if config_data["enabled"]:
|
||||
# Sync job
|
||||
cron_str = config_data["schedule_cron"]
|
||||
try:
|
||||
# Validate cron string
|
||||
if not cron_str or len(cron_str.strip().split()) != 5:
|
||||
logger.error(
|
||||
f"Invalid cron schedule: '{cron_str}'. Using default '0 */6 * * *'"
|
||||
)
|
||||
cron_str = "0 */6 * * *"
|
||||
|
||||
self.scheduler.add_job(
|
||||
func=self._enqueue_sync,
|
||||
trigger=CronTrigger.from_crontab(cron_str),
|
||||
id="sync_job",
|
||||
replace_existing=True,
|
||||
)
|
||||
logger.info(f"Sync job scheduled with cron: '{cron_str}'")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create sync job: {str(e)}")
|
||||
# Fallback to default schedule
|
||||
self.scheduler.add_job(
|
||||
func=self._enqueue_sync,
|
||||
trigger=CronTrigger.from_crontab("0 */6 * * *"),
|
||||
id="sync_job",
|
||||
replace_existing=True,
|
||||
)
|
||||
logger.info("Using default schedule for sync job: '0 */6 * * *'")
|
||||
|
||||
# Reprocess job - run daily at 2 AM
|
||||
reprocess_cron = "0 2 * * *"
|
||||
try:
|
||||
self.scheduler.add_job(
|
||||
func=self._enqueue_reprocess,
|
||||
trigger=CronTrigger.from_crontab(reprocess_cron),
|
||||
id="reprocess_job",
|
||||
replace_existing=True,
|
||||
)
|
||||
logger.info(f"Reprocess job scheduled with cron: '{reprocess_cron}'")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create reprocess job: {str(e)}")
|
||||
|
||||
# Start scheduler
|
||||
self.scheduler.start()
|
||||
self.running = True
|
||||
|
||||
# Update daemon status to running
|
||||
self.update_daemon_status("running")
|
||||
|
||||
# Start web UI in separate thread
|
||||
self.start_web_ui(web_port)
|
||||
|
||||
# Setup signal handlers for graceful shutdown
|
||||
signal.signal(signal.SIGINT, self.signal_handler)
|
||||
signal.signal(signal.SIGTERM, self.signal_handler)
|
||||
|
||||
logger.info(
|
||||
f"Daemon started. Web UI available at http://localhost:{web_port}"
|
||||
)
|
||||
|
||||
# Keep daemon running
|
||||
while self.running:
|
||||
time.sleep(1)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start daemon: {str(e)}")
|
||||
self.update_daemon_status("error")
|
||||
self.stop()
|
||||
|
||||
def _enqueue_sync(self):
|
||||
"""Enqueue sync job with medium priority"""
|
||||
self.task_queue.put((PRIORITY_MEDIUM, ("sync", None)))
|
||||
logger.debug("Enqueued sync job")
|
||||
|
||||
def _enqueue_reprocess(self):
|
||||
"""Enqueue reprocess job with low priority"""
|
||||
self.task_queue.put((PRIORITY_LOW, ("reprocess", None)))
|
||||
logger.debug("Enqueued reprocess job")
|
||||
|
||||
def _process_tasks(self):
|
||||
"""Worker thread to process tasks from the priority queue"""
|
||||
logger.info("Task worker started")
|
||||
while self.running:
|
||||
try:
|
||||
priority, (task_type, data) = self.task_queue.get(timeout=1)
|
||||
logger.info(f"Processing {task_type} task (priority {priority})")
|
||||
|
||||
if task_type == "sync":
|
||||
self._execute_in_process_pool(self.sync_and_download)
|
||||
elif task_type == "reprocess":
|
||||
self._execute_in_process_pool(self.reprocess_activities)
|
||||
elif task_type == "api":
|
||||
# Placeholder for high-priority API tasks
|
||||
logger.debug(f"Processing API task: {data}")
|
||||
|
||||
self.task_queue.task_done()
|
||||
except Exception as e:
|
||||
logger.error(f"Task processing error: {str(e)}")
|
||||
except asyncio.TimeoutError:
|
||||
# Timeout is normal when queue is empty
|
||||
pass
|
||||
logger.info("Task worker stopped")
|
||||
|
||||
def _execute_in_process_pool(self, func):
|
||||
"""Execute function in process pool and handle results"""
|
||||
try:
|
||||
future = self.executor.submit(func)
|
||||
# Block until done to maintain task order but won't block main thread
|
||||
result = future.result()
|
||||
logger.debug(f"Process pool task completed: {result}")
|
||||
except Exception as e:
|
||||
logger.error(f"Process pool task failed: {str(e)}")
|
||||
|
||||
def sync_and_download(self):
|
||||
"""Scheduled job function (run in process pool)"""
|
||||
# Check if sync is already in progress
|
||||
if not self.sync_lock.acquire(blocking=False):
|
||||
logger.info("Sync already in progress, skipping this run")
|
||||
return
|
||||
|
||||
try:
|
||||
self.sync_in_progress = True
|
||||
self.log_operation("sync", "started")
|
||||
|
||||
# Import here to avoid circular imports
|
||||
from .database import sync_database
|
||||
from .garmin import GarminClient
|
||||
|
||||
# Perform sync and download
|
||||
client = GarminClient()
|
||||
|
||||
# Sync database first
|
||||
with self.db_lock:
|
||||
sync_database(client)
|
||||
|
||||
# Download missing activities
|
||||
downloaded_count = 0
|
||||
session = get_legacy_session()
|
||||
missing_activities = (
|
||||
session.query(Activity).filter_by(downloaded=False).all()
|
||||
)
|
||||
|
||||
for activity in missing_activities:
|
||||
try:
|
||||
# Download FIT file
|
||||
fit_data = client.download_activity_fit(activity.activity_id)
|
||||
|
||||
# Save to file
|
||||
import os
|
||||
from pathlib import Path
|
||||
data_dir = Path(os.getenv("DATA_DIR", "data"))
|
||||
data_dir.mkdir(parents=True, exist_ok=True)
|
||||
timestamp = activity.start_time.replace(":", "-").replace(" ", "_")
|
||||
filename = f"activity_{activity.activity_id}_{timestamp}.fit"
|
||||
filepath = data_dir / filename
|
||||
|
||||
with open(filepath, "wb") as f:
|
||||
f.write(fit_data)
|
||||
|
||||
# Update activity record
|
||||
activity.filename = str(filepath)
|
||||
activity.downloaded = True
|
||||
activity.last_sync = datetime.now().isoformat()
|
||||
|
||||
# Get metrics immediately after download
|
||||
metrics = get_activity_metrics(activity, client)
|
||||
if metrics:
|
||||
# Update metrics if available
|
||||
activity.activity_type = metrics.get("activityType", {}).get("typeKey")
|
||||
activity.duration = int(float(metrics.get("duration", 0)))
|
||||
activity.distance = float(metrics.get("distance", 0))
|
||||
activity.max_heart_rate = int(float(metrics.get("maxHR", 0)))
|
||||
activity.avg_power = float(metrics.get("avgPower", 0))
|
||||
activity.calories = int(float(metrics.get("calories", 0)))
|
||||
|
||||
session.commit()
|
||||
downloaded_count += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to download activity {activity.activity_id}: {e}"
|
||||
)
|
||||
session.rollback()
|
||||
|
||||
self.log_operation(
|
||||
"sync", "success",
|
||||
f"Downloaded {downloaded_count} new activities and updated metrics"
|
||||
)
|
||||
|
||||
# Update last run time
|
||||
self.update_daemon_last_run()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Sync failed: {e}")
|
||||
self.log_operation("sync", "error", str(e))
|
||||
finally:
|
||||
self.sync_in_progress = False
|
||||
self.sync_lock.release()
|
||||
if session:
|
||||
session.close()
|
||||
|
||||
def load_config(self):
|
||||
"""Load daemon configuration from database and return dict"""
|
||||
session = get_session()
|
||||
try:
|
||||
config = session.query(DaemonConfig).first()
|
||||
if not config:
|
||||
# Create default configuration with explicit cron schedule
|
||||
config = DaemonConfig(
|
||||
schedule_cron="0 */6 * * *", enabled=True, status="stopped"
|
||||
)
|
||||
session.add(config)
|
||||
session.commit()
|
||||
session.refresh(config) # Ensure we have the latest data
|
||||
|
||||
# Return configuration as dictionary to avoid session issues
|
||||
return {
|
||||
"id": config.id,
|
||||
"enabled": config.enabled,
|
||||
"schedule_cron": config.schedule_cron,
|
||||
"last_run": config.last_run,
|
||||
"next_run": config.next_run,
|
||||
"status": config.status,
|
||||
}
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def update_daemon_status(self, status):
|
||||
"""Update daemon status in database"""
|
||||
session = get_session()
|
||||
try:
|
||||
config = session.query(DaemonConfig).first()
|
||||
if not config:
|
||||
config = DaemonConfig()
|
||||
session.add(config)
|
||||
|
||||
config.status = status
|
||||
session.commit()
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def update_daemon_last_run(self):
|
||||
"""Update daemon last run timestamp"""
|
||||
session = get_session()
|
||||
try:
|
||||
config = session.query(DaemonConfig).first()
|
||||
if config:
|
||||
config.last_run = datetime.now().isoformat()
|
||||
session.commit()
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def start_web_ui(self, port):
|
||||
"""Start FastAPI web server in a separate thread"""
|
||||
try:
|
||||
import uvicorn
|
||||
from .web.app import app
|
||||
|
||||
# Add shutdown hook to stop worker thread
|
||||
@app.on_event("shutdown")
|
||||
def shutdown_event():
|
||||
logger.info("Web server shutting down")
|
||||
self.running = False
|
||||
self.worker_thread.join(timeout=5)
|
||||
|
||||
def run_server():
|
||||
try:
|
||||
# Use async execution model for better concurrency
|
||||
config = uvicorn.Config(
|
||||
app,
|
||||
host="0.0.0.0",
|
||||
port=port,
|
||||
log_level="info",
|
||||
workers=1,
|
||||
loop="asyncio"
|
||||
)
|
||||
server = uvicorn.Server(config)
|
||||
server.run()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start web server: {e}")
|
||||
|
||||
web_thread = threading.Thread(target=run_server, daemon=True)
|
||||
web_thread.start()
|
||||
self.web_server = web_thread
|
||||
except ImportError as e:
|
||||
logger.warning(f"Could not start web UI: {e}")
|
||||
|
||||
def signal_handler(self, signum, frame):
|
||||
"""Handle shutdown signals"""
|
||||
logger.info("Received shutdown signal, stopping daemon...")
|
||||
self.stop()
|
||||
|
||||
def is_sync_in_progress(self):
|
||||
"""Check if sync operation is currently running"""
|
||||
return self.sync_in_progress
|
||||
|
||||
def stop(self):
|
||||
"""Stop daemon and clean up resources"""
|
||||
if self.scheduler.running:
|
||||
self.scheduler.shutdown()
|
||||
self.running = False
|
||||
self.update_daemon_status("stopped")
|
||||
self.log_operation("daemon", "stopped", "Daemon shutdown completed")
|
||||
logger.info("Daemon stopped")
|
||||
|
||||
def log_operation(self, operation, status, message=None):
|
||||
"""Log sync operation to database"""
|
||||
session = get_session()
|
||||
try:
|
||||
log = SyncLog(
|
||||
timestamp=datetime.now().isoformat(),
|
||||
operation=operation,
|
||||
status=status,
|
||||
message=message,
|
||||
activities_processed=0, # Can be updated later if needed
|
||||
activities_downloaded=0, # Can be updated later if needed
|
||||
)
|
||||
session.add(log)
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to log operation: {e}")
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def count_missing(self):
|
||||
"""Count missing activities"""
|
||||
session = get_session()
|
||||
try:
|
||||
return session.query(Activity).filter_by(downloaded=False).count()
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def reprocess_activities(self):
|
||||
"""Reprocess activities to calculate missing metrics"""
|
||||
from .database import get_session
|
||||
from .activity_parser import get_activity_metrics
|
||||
from .database import Activity
|
||||
from tqdm import tqdm
|
||||
|
||||
logger.info("Starting reprocess job")
|
||||
session = get_session()
|
||||
try:
|
||||
# Get activities that need reprocessing
|
||||
activities = session.query(Activity).filter(
|
||||
Activity.downloaded == True,
|
||||
Activity.reprocessed == False
|
||||
).all()
|
||||
|
||||
if not activities:
|
||||
logger.info("No activities to reprocess")
|
||||
return
|
||||
|
||||
logger.info(f"Reprocessing {len(activities)} activities")
|
||||
success_count = 0
|
||||
|
||||
# Reprocess each activity
|
||||
for activity in tqdm(activities, desc="Reprocessing"):
|
||||
try:
|
||||
# Use force_reprocess=True to ensure we parse the file again
|
||||
metrics = get_activity_metrics(activity, client=None, force_reprocess=True)
|
||||
|
||||
# Update activity metrics if we got new data
|
||||
if metrics:
|
||||
activity.activity_type = metrics.get("activityType", {}).get("typeKey")
|
||||
activity.duration = int(float(metrics.get("duration", 0))) if metrics.get("duration") else activity.duration
|
||||
activity.distance = float(metrics.get("distance", 0)) if metrics.get("distance") else activity.distance
|
||||
activity.max_heart_rate = int(float(metrics.get("maxHR", 0))) if metrics.get("maxHR") else activity.max_heart_rate
|
||||
activity.avg_heart_rate = int(float(metrics.get("avgHR", 0))) if metrics.get("avgHR") else activity.avg_heart_rate
|
||||
activity.avg_power = float(metrics.get("avgPower", 0)) if metrics.get("avgPower") else activity.avg_power
|
||||
activity.calories = int(float(metrics.get("calories", 0))) if metrics.get("calories") else activity.calories
|
||||
|
||||
# Mark as reprocessed regardless of success
|
||||
activity.reprocessed = True
|
||||
session.commit()
|
||||
success_count += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error reprocessing activity {activity.activity_id}: {str(e)}")
|
||||
session.rollback()
|
||||
|
||||
logger.info(f"Reprocessed {success_count}/{len(activities)} activities successfully")
|
||||
self.log_operation("reprocess", "success", f"Reprocessed {success_count} activities")
|
||||
self.update_daemon_last_run()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Reprocess job failed: {str(e)}")
|
||||
self.log_operation("reprocess", "error", str(e))
|
||||
finally:
|
||||
session.close()
|
||||
234
examples/GarminSync/garminsync/database.py
Normal file
234
examples/GarminSync/garminsync/database.py
Normal file
@@ -0,0 +1,234 @@
|
||||
"""Database module for GarminSync application with async support."""
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from sqlalchemy import Boolean, Column, Float, Integer, String
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
||||
from sqlalchemy.ext.asyncio import async_sessionmaker
|
||||
from sqlalchemy.future import select
|
||||
from sqlalchemy.orm import declarative_base
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import selectinload, joinedload
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class Activity(Base):
|
||||
"""Activity model representing a Garmin activity record."""
|
||||
|
||||
__tablename__ = "activities"
|
||||
|
||||
activity_id = Column(Integer, primary_key=True)
|
||||
start_time = Column(String, nullable=False)
|
||||
activity_type = Column(String, nullable=True)
|
||||
duration = Column(Integer, nullable=True)
|
||||
distance = Column(Float, nullable=True)
|
||||
max_heart_rate = Column(Integer, nullable=True)
|
||||
avg_heart_rate = Column(Integer, nullable=True)
|
||||
avg_power = Column(Float, nullable=True)
|
||||
calories = Column(Integer, nullable=True)
|
||||
filename = Column(String, unique=True, nullable=True)
|
||||
downloaded = Column(Boolean, default=False, nullable=False)
|
||||
reprocessed = Column(Boolean, default=False, nullable=False)
|
||||
created_at = Column(String, nullable=False)
|
||||
last_sync = Column(String, nullable=True)
|
||||
|
||||
@classmethod
|
||||
async def get_paginated(cls, db, page=1, per_page=10):
|
||||
"""Get paginated list of activities (async)."""
|
||||
async with db.begin() as session:
|
||||
query = select(cls).order_by(cls.start_time.desc())
|
||||
result = await session.execute(query.offset((page-1)*per_page).limit(per_page))
|
||||
activities = result.scalars().all()
|
||||
count_result = await session.execute(select(select(cls).count()))
|
||||
total = count_result.scalar_one()
|
||||
return {
|
||||
"items": activities,
|
||||
"page": page,
|
||||
"per_page": per_page,
|
||||
"total": total,
|
||||
"pages": (total + per_page - 1) // per_page
|
||||
}
|
||||
|
||||
def to_dict(self):
|
||||
"""Convert activity to dictionary representation."""
|
||||
return {
|
||||
"id": self.activity_id,
|
||||
"name": self.filename or "Unnamed Activity",
|
||||
"distance": self.distance,
|
||||
"duration": self.duration,
|
||||
"start_time": self.start_time,
|
||||
"activity_type": self.activity_type,
|
||||
"max_heart_rate": self.max_heart_rate,
|
||||
"avg_heart_rate": self.avg_heart_rate,
|
||||
"avg_power": self.avg_power,
|
||||
"calories": self.calories,
|
||||
}
|
||||
|
||||
|
||||
class DaemonConfig(Base):
|
||||
"""Daemon configuration model."""
|
||||
|
||||
__tablename__ = "daemon_config"
|
||||
|
||||
id = Column(Integer, primary_key=True, default=1)
|
||||
enabled = Column(Boolean, default=True, nullable=False)
|
||||
schedule_cron = Column(String, default="0 */6 * * *", nullable=False)
|
||||
last_run = Column(String, nullable=True)
|
||||
next_run = Column(String, nullable=True)
|
||||
status = Column(String, default="stopped", nullable=False)
|
||||
|
||||
@classmethod
|
||||
async def get(cls, db):
|
||||
"""Get configuration record (async)."""
|
||||
async with db.begin() as session:
|
||||
result = await session.execute(select(cls))
|
||||
return result.scalars().first()
|
||||
|
||||
|
||||
class SyncLog(Base):
|
||||
"""Sync log model for tracking sync operations."""
|
||||
|
||||
__tablename__ = "sync_logs"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
timestamp = Column(String, nullable=False)
|
||||
operation = Column(String, nullable=False)
|
||||
status = Column(String, nullable=False)
|
||||
message = Column(String, nullable=True)
|
||||
activities_processed = Column(Integer, default=0, nullable=False)
|
||||
activities_downloaded = Column(Integer, default=0, nullable=False)
|
||||
|
||||
|
||||
# Database initialization and session management
|
||||
engine = None
|
||||
async_session = None
|
||||
|
||||
async def init_db():
|
||||
"""Initialize database connection and create tables."""
|
||||
global engine, async_session
|
||||
db_path = os.getenv("DB_PATH", "data/garmin.db")
|
||||
engine = create_async_engine(
|
||||
f"sqlite+aiosqlite:///{db_path}",
|
||||
pool_size=10,
|
||||
max_overflow=20,
|
||||
pool_pre_ping=True
|
||||
)
|
||||
async_session = async_sessionmaker(engine, expire_on_commit=False)
|
||||
|
||||
# Create tables if they don't exist
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_db():
|
||||
"""Async context manager for database sessions."""
|
||||
async with async_session() as session:
|
||||
try:
|
||||
yield session
|
||||
await session.commit()
|
||||
except SQLAlchemyError:
|
||||
await session.rollback()
|
||||
raise
|
||||
|
||||
|
||||
# Compatibility layer for legacy sync functions
|
||||
def get_legacy_session():
|
||||
"""Temporary synchronous session for migration purposes."""
|
||||
db_path = os.getenv("DB_PATH", "data/garmin.db")
|
||||
sync_engine = create_engine(f"sqlite:///{db_path}")
|
||||
Base.metadata.create_all(sync_engine)
|
||||
Session = sessionmaker(bind=sync_engine)
|
||||
return Session()
|
||||
|
||||
|
||||
async def sync_database(garmin_client):
|
||||
"""Sync local database with Garmin Connect activities (async)."""
|
||||
from garminsync.activity_parser import get_activity_metrics
|
||||
async with get_db() as session:
|
||||
try:
|
||||
activities = garmin_client.get_activities(0, 1000)
|
||||
|
||||
if not activities:
|
||||
print("No activities returned from Garmin API")
|
||||
return
|
||||
|
||||
for activity_data in activities:
|
||||
if not isinstance(activity_data, dict):
|
||||
print(f"Invalid activity data: {activity_data}")
|
||||
continue
|
||||
|
||||
activity_id = activity_data.get("activityId")
|
||||
start_time = activity_data.get("startTimeLocal")
|
||||
|
||||
if not activity_id or not start_time:
|
||||
print(f"Missing required fields in activity: {activity_data}")
|
||||
continue
|
||||
|
||||
result = await session.execute(
|
||||
select(Activity).filter_by(activity_id=activity_id)
|
||||
)
|
||||
existing = result.scalars().first()
|
||||
|
||||
# Create or update basic activity info
|
||||
if not existing:
|
||||
activity = Activity(
|
||||
activity_id=activity_id,
|
||||
start_time=start_time,
|
||||
downloaded=False,
|
||||
created_at=datetime.now().isoformat(),
|
||||
last_sync=datetime.now().isoformat(),
|
||||
)
|
||||
session.add(activity)
|
||||
else:
|
||||
activity = existing
|
||||
|
||||
# Update metrics using shared parser
|
||||
metrics = get_activity_metrics(activity, garmin_client)
|
||||
if metrics:
|
||||
activity.activity_type = metrics.get("activityType", {}).get("typeKey")
|
||||
# ... rest of metric processing ...
|
||||
|
||||
# Update sync timestamp
|
||||
activity.last_sync = datetime.now().isoformat()
|
||||
|
||||
await session.commit()
|
||||
except SQLAlchemyError as e:
|
||||
await session.rollback()
|
||||
raise e
|
||||
|
||||
|
||||
async def get_offline_stats():
|
||||
"""Return statistics about cached data without API calls (async)."""
|
||||
async with get_db() as session:
|
||||
try:
|
||||
result = await session.execute(select(Activity))
|
||||
total = len(result.scalars().all())
|
||||
|
||||
result = await session.execute(
|
||||
select(Activity).filter_by(downloaded=True)
|
||||
)
|
||||
downloaded = len(result.scalars().all())
|
||||
|
||||
result = await session.execute(
|
||||
select(Activity).order_by(Activity.last_sync.desc())
|
||||
)
|
||||
last_sync = result.scalars().first()
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"downloaded": downloaded,
|
||||
"missing": total - downloaded,
|
||||
"last_sync": last_sync.last_sync if last_sync else "Never synced",
|
||||
}
|
||||
except SQLAlchemyError as e:
|
||||
print(f"Database error: {e}")
|
||||
return {
|
||||
"total": 0,
|
||||
"downloaded": 0,
|
||||
"missing": 0,
|
||||
"last_sync": "Error"
|
||||
}
|
||||
@@ -0,0 +1,73 @@
|
||||
import numpy as np
|
||||
|
||||
class SinglespeedAnalyzer:
|
||||
def __init__(self):
|
||||
self.chainring_options = [38, 46] # teeth
|
||||
self.common_cogs = list(range(11, 28)) # 11t to 27t rear cogs
|
||||
self.wheel_circumference_m = 2.096 # 700x25c tire
|
||||
|
||||
def analyze_gear_ratio(self, speed_data, cadence_data, gradient_data):
|
||||
"""Determine most likely singlespeed gear ratio"""
|
||||
# Validate input parameters
|
||||
if not speed_data or not cadence_data or not gradient_data:
|
||||
raise ValueError("Input data cannot be empty")
|
||||
if len(speed_data) != len(cadence_data) or len(speed_data) != len(gradient_data):
|
||||
raise ValueError("Input data arrays must be of equal length")
|
||||
|
||||
# Filter for flat terrain segments (gradient < 3%)
|
||||
flat_indices = [i for i, grad in enumerate(gradient_data) if abs(grad) < 3.0]
|
||||
flat_speeds = [speed_data[i] for i in flat_indices]
|
||||
flat_cadences = [cadence_data[i] for i in flat_indices]
|
||||
|
||||
# Only consider data points with sufficient speed (15 km/h) and cadence
|
||||
valid_indices = [i for i in range(len(flat_speeds))
|
||||
if flat_speeds[i] > 4.17 and flat_cadences[i] > 0] # 15 km/h threshold
|
||||
|
||||
if not valid_indices:
|
||||
return None # Not enough data
|
||||
|
||||
valid_speeds = [flat_speeds[i] for i in valid_indices]
|
||||
valid_cadences = [flat_cadences[i] for i in valid_indices]
|
||||
|
||||
# Calculate gear ratios from speed and cadence
|
||||
gear_ratios = []
|
||||
for speed, cadence in zip(valid_speeds, valid_cadences):
|
||||
# Gear ratio = (speed in m/s * 60 seconds/minute) / (cadence in rpm * wheel circumference in meters)
|
||||
gr = (speed * 60) / (cadence * self.wheel_circumference_m)
|
||||
gear_ratios.append(gr)
|
||||
|
||||
# Calculate average gear ratio
|
||||
avg_gear_ratio = sum(gear_ratios) / len(gear_ratios)
|
||||
|
||||
# Find best matching chainring and cog combination
|
||||
best_fit = None
|
||||
min_diff = float('inf')
|
||||
for chainring in self.chainring_options:
|
||||
for cog in self.common_cogs:
|
||||
theoretical_ratio = chainring / cog
|
||||
diff = abs(theoretical_ratio - avg_gear_ratio)
|
||||
if diff < min_diff:
|
||||
min_diff = diff
|
||||
best_fit = (chainring, cog, theoretical_ratio)
|
||||
|
||||
if not best_fit:
|
||||
return None
|
||||
|
||||
chainring, cog, ratio = best_fit
|
||||
|
||||
# Calculate gear metrics
|
||||
wheel_diameter_inches = 27.0 # 700c wheel diameter
|
||||
gear_inches = ratio * wheel_diameter_inches
|
||||
development_meters = ratio * self.wheel_circumference_m
|
||||
|
||||
# Calculate confidence score (1 - relative error)
|
||||
confidence = max(0, 1 - (min_diff / ratio)) if ratio > 0 else 0
|
||||
|
||||
return {
|
||||
'estimated_chainring_teeth': chainring,
|
||||
'estimated_cassette_teeth': cog,
|
||||
'gear_ratio': ratio,
|
||||
'gear_inches': gear_inches,
|
||||
'development_meters': development_meters,
|
||||
'confidence_score': confidence
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
import numpy as np
|
||||
|
||||
class PowerEstimator:
|
||||
def __init__(self):
|
||||
self.bike_weight_kg = 10.0 # 22 lbs
|
||||
self.rider_weight_kg = 75.0 # Default assumption
|
||||
self.drag_coefficient = 0.88 # Road bike
|
||||
self.frontal_area_m2 = 0.4 # Typical road cycling position
|
||||
self.rolling_resistance = 0.004 # Road tires
|
||||
self.drivetrain_efficiency = 0.97
|
||||
self.air_density = 1.225 # kg/m³ at sea level, 20°C
|
||||
|
||||
def calculate_power(self, speed_ms, gradient_percent,
|
||||
air_temp_c=20, altitude_m=0):
|
||||
"""Calculate estimated power using physics model"""
|
||||
# Validate input parameters
|
||||
if not isinstance(speed_ms, (int, float)) or speed_ms < 0:
|
||||
raise ValueError("Speed must be a non-negative number")
|
||||
if not isinstance(gradient_percent, (int, float)):
|
||||
raise ValueError("Gradient must be a number")
|
||||
|
||||
# Calculate air density based on temperature and altitude
|
||||
temp_k = air_temp_c + 273.15
|
||||
pressure = 101325 * (1 - 0.0000225577 * altitude_m) ** 5.25588
|
||||
air_density = pressure / (287.05 * temp_k)
|
||||
|
||||
# Convert gradient to angle
|
||||
gradient_rad = np.arctan(gradient_percent / 100.0)
|
||||
|
||||
# Total mass
|
||||
total_mass = self.bike_weight_kg + self.rider_weight_kg
|
||||
|
||||
# Power components
|
||||
P_roll = self.rolling_resistance * total_mass * 9.81 * np.cos(gradient_rad) * speed_ms
|
||||
P_grav = total_mass * 9.81 * np.sin(gradient_rad) * speed_ms
|
||||
P_aero = 0.5 * air_density * self.drag_coefficient * self.frontal_area_m2 * speed_ms ** 3
|
||||
|
||||
# Power = (Rolling + Gravity + Aerodynamic) / Drivetrain efficiency
|
||||
return (P_roll + P_grav + P_aero) / self.drivetrain_efficiency
|
||||
|
||||
def estimate_peak_power(self, power_values, durations):
|
||||
"""Calculate peak power for various durations"""
|
||||
# This will be implemented in Phase 3
|
||||
return {}
|
||||
196
examples/GarminSync/garminsync/garmin.py
Normal file
196
examples/GarminSync/garminsync/garmin.py
Normal file
@@ -0,0 +1,196 @@
|
||||
"""Garmin API client module for GarminSync application."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
|
||||
from garminconnect import (Garmin, GarminConnectAuthenticationError,
|
||||
GarminConnectConnectionError,
|
||||
GarminConnectTooManyRequestsError)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GarminClient:
|
||||
"""Garmin API client for interacting with Garmin Connect services."""
|
||||
|
||||
def __init__(self):
|
||||
self.client = None
|
||||
|
||||
def authenticate(self):
|
||||
"""Authenticate using credentials from environment variables"""
|
||||
email = os.getenv("GARMIN_EMAIL")
|
||||
password = os.getenv("GARMIN_PASSWORD")
|
||||
|
||||
if not email or not password:
|
||||
raise ValueError("Garmin credentials not found in environment variables")
|
||||
|
||||
try:
|
||||
self.client = Garmin(email, password)
|
||||
self.client.login()
|
||||
logger.info("Successfully authenticated with Garmin Connect")
|
||||
return self.client
|
||||
except GarminConnectAuthenticationError as e:
|
||||
logger.error("Authentication failed: %s", e)
|
||||
raise ValueError(f"Garmin authentication failed: {e}") from e
|
||||
except GarminConnectConnectionError as e:
|
||||
logger.error("Connection error: %s", e)
|
||||
raise ConnectionError(f"Failed to connect to Garmin Connect: {e}") from e
|
||||
except Exception as e:
|
||||
logger.error("Unexpected error during authentication: %s", e)
|
||||
raise RuntimeError(f"Unexpected error during authentication: {e}") from e
|
||||
|
||||
def get_activities(self, start=0, limit=10):
|
||||
"""Get list of activities with rate limiting
|
||||
|
||||
Args:
|
||||
start: Starting index for activities
|
||||
limit: Maximum number of activities to return
|
||||
|
||||
Returns:
|
||||
List of activities or None if failed
|
||||
|
||||
Raises:
|
||||
ValueError: If authentication fails
|
||||
ConnectionError: If connection to Garmin fails
|
||||
RuntimeError: For other unexpected errors
|
||||
"""
|
||||
if not self.client:
|
||||
self.authenticate()
|
||||
|
||||
try:
|
||||
activities = self.client.get_activities(start, limit)
|
||||
time.sleep(2) # Rate limiting
|
||||
logger.info("Retrieved %d activities", len(activities) if activities else 0)
|
||||
return activities
|
||||
except (GarminConnectConnectionError, TimeoutError, GarminConnectTooManyRequestsError) as e:
|
||||
logger.error("Network error while fetching activities: %s", e)
|
||||
raise ConnectionError(f"Failed to fetch activities: {e}") from e
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
logger.error("Unexpected error while fetching activities: %s", e)
|
||||
raise RuntimeError(f"Failed to fetch activities: {e}") from e
|
||||
|
||||
def download_activity_fit(self, activity_id):
|
||||
"""Download .fit file for a specific activity"""
|
||||
if not self.client:
|
||||
self.authenticate()
|
||||
|
||||
print(f"Attempting to download activity {activity_id}")
|
||||
|
||||
# Try multiple methods to download FIT file
|
||||
methods_to_try = [
|
||||
# Method 1: No format parameter (most likely to work)
|
||||
lambda: self.client.download_activity(activity_id),
|
||||
# Method 2: Use correct parameter name with different values
|
||||
lambda: self.client.download_activity(activity_id, dl_fmt="FIT"),
|
||||
lambda: self.client.download_activity(
|
||||
activity_id, dl_fmt="tcx"
|
||||
), # Fallback format
|
||||
]
|
||||
|
||||
last_exception = None
|
||||
|
||||
for i, method in enumerate(methods_to_try, 1):
|
||||
try:
|
||||
# Try the download method
|
||||
print(f"Trying download method {i}...")
|
||||
fit_data = method()
|
||||
|
||||
if fit_data:
|
||||
print(
|
||||
f"Successfully downloaded {len(fit_data)} bytes using method {i}"
|
||||
)
|
||||
time.sleep(2) # Rate limiting
|
||||
return fit_data
|
||||
print(f"Method {i} returned empty data")
|
||||
|
||||
# Catch connection errors specifically
|
||||
except (GarminConnectConnectionError, ConnectionError) as e: # pylint: disable=duplicate-except
|
||||
print(f"Method {i} failed with connection error: {e}")
|
||||
last_exception = e
|
||||
continue
|
||||
# Catch all other exceptions as a fallback
|
||||
except (TimeoutError, GarminConnectTooManyRequestsError) as e:
|
||||
print(f"Method {i} failed with retryable error: {e}")
|
||||
last_exception = e
|
||||
continue
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
print(f"Method {i} failed with unexpected error: "
|
||||
f"{type(e).__name__}: {e}")
|
||||
last_exception = e
|
||||
continue
|
||||
|
||||
# If all methods failed, raise the last exception
|
||||
if last_exception:
|
||||
raise RuntimeError(
|
||||
f"All download methods failed. Last error: {last_exception}"
|
||||
) from last_exception
|
||||
raise RuntimeError(
|
||||
"All download methods failed, but no specific error was captured"
|
||||
)
|
||||
|
||||
def get_activity_details(self, activity_id):
|
||||
"""Get detailed information about a specific activity
|
||||
|
||||
Args:
|
||||
activity_id: ID of the activity to retrieve
|
||||
|
||||
Returns:
|
||||
Activity details dictionary or None if failed
|
||||
"""
|
||||
if not self.client:
|
||||
self.authenticate()
|
||||
|
||||
try:
|
||||
activity_details = self.client.get_activity(activity_id)
|
||||
time.sleep(2) # Rate limiting
|
||||
logger.info("Retrieved details for activity %s", activity_id)
|
||||
return activity_details
|
||||
except (GarminConnectConnectionError, TimeoutError) as e:
|
||||
logger.error(
|
||||
"Connection/timeout error fetching activity details for %s: %s",
|
||||
activity_id, e
|
||||
)
|
||||
return None
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
logger.error("Unexpected error fetching activity details for %s: %s", activity_id, e)
|
||||
return None
|
||||
|
||||
# Example usage and testing function
|
||||
|
||||
|
||||
def test_download(activity_id):
|
||||
"""Test function to verify download functionality"""
|
||||
client = GarminClient()
|
||||
try:
|
||||
fit_data = client.download_activity_fit(activity_id)
|
||||
|
||||
# Verify the data looks like a FIT file
|
||||
if not fit_data or len(fit_data) <= 14:
|
||||
print("❌ Downloaded data is empty or too small")
|
||||
return None
|
||||
|
||||
header = fit_data[:14]
|
||||
if b".FIT" in header or header[8:12] == b".FIT":
|
||||
print("✅ Downloaded data appears to be a valid FIT file")
|
||||
else:
|
||||
print("⚠️ Downloaded data may not be a FIT file")
|
||||
print(f"Header: {header}")
|
||||
return fit_data
|
||||
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
print(f"❌ Test failed: {e}")
|
||||
return None
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Test with a sample activity ID if provided
|
||||
import sys
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
test_activity_id = sys.argv[1]
|
||||
print(f"Testing download for activity ID: {test_activity_id}")
|
||||
test_download(test_activity_id)
|
||||
else:
|
||||
print("Usage: python garmin.py <activity_id>")
|
||||
print("This will test the download functionality with the provided activity ID")
|
||||
131
examples/GarminSync/garminsync/migrate_activities.py
Normal file
131
examples/GarminSync/garminsync/migrate_activities.py
Normal file
@@ -0,0 +1,131 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Migration script to populate activity fields from FIT files or Garmin API
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
import logging
|
||||
|
||||
from sqlalchemy import MetaData, Table, create_engine, text
|
||||
from sqlalchemy.exc import OperationalError
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Add parent directory to path to import garminsync modules
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from garminsync.database import Activity, get_session, init_db
|
||||
from garminsync.garmin import GarminClient
|
||||
from garminsync.activity_parser import get_activity_metrics
|
||||
|
||||
def migrate_activities():
|
||||
"""Migrate activities to populate fields from FIT files or Garmin API"""
|
||||
logger.info("Starting activity migration...")
|
||||
|
||||
# We assume database schema has been updated via Alembic migrations
|
||||
# during container startup. Columns should already exist.
|
||||
|
||||
# Initialize Garmin client
|
||||
try:
|
||||
client = GarminClient()
|
||||
logger.info("Garmin client initialized successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize Garmin client: {e}")
|
||||
# Continue with migration but without Garmin data
|
||||
client = None
|
||||
|
||||
# Get database session
|
||||
session = get_session()
|
||||
|
||||
try:
|
||||
# Get all activities that need to be updated (those with NULL activity_type)
|
||||
activities = session.query(Activity).filter(Activity.activity_type.is_(None)).all()
|
||||
logger.info(f"Found {len(activities)} activities to migrate")
|
||||
|
||||
# If no activities found, exit early
|
||||
if not activities:
|
||||
logger.info("No activities found for migration")
|
||||
return True
|
||||
|
||||
updated_count = 0
|
||||
error_count = 0
|
||||
|
||||
for i, activity in enumerate(activities):
|
||||
try:
|
||||
logger.info(f"Processing activity {i+1}/{len(activities)} (ID: {activity.activity_id})")
|
||||
|
||||
# Use shared parser to get activity metrics
|
||||
activity_details = get_activity_metrics(activity, client)
|
||||
|
||||
# Update activity fields if we have details
|
||||
if activity_details:
|
||||
logger.info(f"Successfully parsed metrics for activity {activity.activity_id}")
|
||||
|
||||
# Update activity fields
|
||||
activity.activity_type = activity_details.get("activityType", {}).get("typeKey", "Unknown")
|
||||
|
||||
# Extract duration in seconds
|
||||
duration = activity_details.get("summaryDTO", {}).get("duration")
|
||||
if duration is not None:
|
||||
activity.duration = int(float(duration))
|
||||
|
||||
# Extract distance in meters
|
||||
distance = activity_details.get("summaryDTO", {}).get("distance")
|
||||
if distance is not None:
|
||||
activity.distance = float(distance)
|
||||
|
||||
# Extract max heart rate
|
||||
max_hr = activity_details.get("summaryDTO", {}).get("maxHR")
|
||||
if max_hr is not None:
|
||||
activity.max_heart_rate = int(float(max_hr))
|
||||
|
||||
# Extract average power
|
||||
avg_power = activity_details.get("summaryDTO", {}).get("avgPower")
|
||||
if avg_power is not None:
|
||||
activity.avg_power = float(avg_power)
|
||||
|
||||
# Extract calories
|
||||
calories = activity_details.get("summaryDTO", {}).get("calories")
|
||||
if calories is not None:
|
||||
activity.calories = int(float(calories))
|
||||
else:
|
||||
# Set default values if we can't get details
|
||||
activity.activity_type = "Unknown"
|
||||
logger.warning(f"Could not retrieve metrics for activity {activity.activity_id}")
|
||||
|
||||
# Update last sync timestamp
|
||||
activity.last_sync = datetime.now().isoformat()
|
||||
|
||||
session.commit()
|
||||
updated_count += 1
|
||||
|
||||
# Log progress every 10 activities
|
||||
if (i + 1) % 10 == 0:
|
||||
logger.info(f"Progress: {i+1}/{len(activities)} activities processed")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing activity {activity.activity_id}: {e}")
|
||||
session.rollback()
|
||||
error_count += 1
|
||||
continue
|
||||
|
||||
logger.info(f"Migration completed. Updated: {updated_count}, Errors: {error_count}")
|
||||
return updated_count > 0 or error_count == 0 # Success if we updated any or had no errors
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Migration failed: {e}")
|
||||
return False
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = migrate_activities()
|
||||
sys.exit(0 if success else 1)
|
||||
153
examples/GarminSync/garminsync/parsers/gpx_parser.py
Normal file
153
examples/GarminSync/garminsync/parsers/gpx_parser.py
Normal file
@@ -0,0 +1,153 @@
|
||||
import xml.etree.ElementTree as ET
|
||||
from datetime import datetime
|
||||
import math
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def parse_gpx_file(file_path):
|
||||
"""
|
||||
Parse GPX file to extract activity metrics.
|
||||
Returns: Dictionary of activity metrics or None if parsing fails
|
||||
"""
|
||||
try:
|
||||
tree = ET.parse(file_path)
|
||||
root = tree.getroot()
|
||||
|
||||
# GPX namespace
|
||||
ns = {'gpx': 'http://www.topografix.com/GPX/1/1'}
|
||||
|
||||
# Extract metadata
|
||||
metadata = root.find('gpx:metadata', ns)
|
||||
if metadata is not None:
|
||||
time_elem = metadata.find('gpx:time', ns)
|
||||
if time_elem is not None:
|
||||
start_time = datetime.fromisoformat(time_elem.text.replace('Z', '+00:00'))
|
||||
else:
|
||||
# Fallback to first track point time
|
||||
trkpt = root.find('.//gpx:trkpt', ns)
|
||||
if trkpt is not None:
|
||||
time_elem = trkpt.find('gpx:time', ns)
|
||||
if time_elem is not None:
|
||||
start_time = datetime.fromisoformat(time_elem.text.replace('Z', '+00:00'))
|
||||
else:
|
||||
logger.error(f"No track points found in GPX file: {file_path}")
|
||||
return None
|
||||
|
||||
# Get all track points
|
||||
track_points = root.findall('.//gpx:trkpt', ns)
|
||||
if not track_points:
|
||||
logger.warning(f"No track points found in GPX file: {file_path}")
|
||||
return None
|
||||
|
||||
# Activity metrics
|
||||
total_distance = 0.0
|
||||
start_elevation = None
|
||||
min_elevation = float('inf')
|
||||
max_elevation = float('-inf')
|
||||
elevations = []
|
||||
heart_rates = []
|
||||
cadences = []
|
||||
|
||||
prev_point = None
|
||||
for point in track_points:
|
||||
# Parse coordinates
|
||||
lat = float(point.get('lat'))
|
||||
lon = float(point.get('lon'))
|
||||
|
||||
# Parse elevation
|
||||
ele_elem = point.find('gpx:ele', ns)
|
||||
ele = float(ele_elem.text) if ele_elem is not None else None
|
||||
if ele is not None:
|
||||
elevations.append(ele)
|
||||
if start_elevation is None:
|
||||
start_elevation = ele
|
||||
min_elevation = min(min_elevation, ele)
|
||||
max_elevation = max(max_elevation, ele)
|
||||
|
||||
# Parse time
|
||||
time_elem = point.find('gpx:time', ns)
|
||||
time = datetime.fromisoformat(time_elem.text.replace('Z', '+00:00')) if time_elem else None
|
||||
|
||||
# Parse extensions (heart rate, cadence, etc.)
|
||||
extensions = point.find('gpx:extensions', ns)
|
||||
if extensions is not None:
|
||||
# Garmin TrackPointExtension
|
||||
tpe = extensions.find('gpx:TrackPointExtension', ns)
|
||||
if tpe is not None:
|
||||
hr_elem = tpe.find('gpx:hr', ns)
|
||||
if hr_elem is not None:
|
||||
heart_rates.append(int(hr_elem.text))
|
||||
|
||||
cad_elem = tpe.find('gpx:cad', ns)
|
||||
if cad_elem is not None:
|
||||
cadences.append(int(cad_elem.text))
|
||||
|
||||
# Calculate distance from previous point
|
||||
if prev_point:
|
||||
prev_lat, prev_lon = prev_point
|
||||
total_distance += haversine(prev_lat, prev_lon, lat, lon)
|
||||
|
||||
prev_point = (lat, lon)
|
||||
|
||||
# Calculate duration
|
||||
if 'start_time' in locals() and time is not None:
|
||||
duration = (time - start_time).total_seconds()
|
||||
else:
|
||||
duration = None
|
||||
|
||||
# Calculate elevation gain/loss
|
||||
elevation_gain = 0
|
||||
elevation_loss = 0
|
||||
if elevations:
|
||||
prev_ele = elevations[0]
|
||||
for ele in elevations[1:]:
|
||||
if ele > prev_ele:
|
||||
elevation_gain += ele - prev_ele
|
||||
else:
|
||||
elevation_loss += prev_ele - ele
|
||||
prev_ele = ele
|
||||
|
||||
# Calculate averages
|
||||
avg_heart_rate = sum(heart_rates) / len(heart_rates) if heart_rates else None
|
||||
avg_cadence = sum(cadences) / len(cadences) if cadences else None
|
||||
|
||||
return {
|
||||
"activityType": {"typeKey": "other"},
|
||||
"summaryDTO": {
|
||||
"startTime": start_time.isoformat() if 'start_time' in locals() else None,
|
||||
"duration": duration,
|
||||
"distance": total_distance,
|
||||
"elevationGain": elevation_gain,
|
||||
"elevationLoss": elevation_loss,
|
||||
"minElevation": min_elevation,
|
||||
"maxElevation": max_elevation,
|
||||
"maxHR": max(heart_rates) if heart_rates else None,
|
||||
"avgHR": avg_heart_rate,
|
||||
"cadence": avg_cadence,
|
||||
"calories": None # Calories not typically in GPX files
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing GPX file {file_path}: {str(e)}")
|
||||
return None
|
||||
|
||||
def haversine(lat1, lon1, lat2, lon2):
|
||||
"""
|
||||
Calculate the great circle distance between two points
|
||||
on the earth (specified in decimal degrees)
|
||||
Returns distance in meters
|
||||
"""
|
||||
# Convert decimal degrees to radians
|
||||
lon1, lat1, lon2, lat2 = map(math.radians, [lon1, lat1, lon2, lat2])
|
||||
|
||||
# Haversine formula
|
||||
dlon = lon2 - lon1
|
||||
dlat = lat2 - lat1
|
||||
a = math.sin(dlat/2)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2)**2
|
||||
c = 2 * math.asin(math.sqrt(a))
|
||||
|
||||
# Radius of earth in meters
|
||||
r = 6371000
|
||||
return c * r
|
||||
96
examples/GarminSync/garminsync/utils.py
Normal file
96
examples/GarminSync/garminsync/utils.py
Normal file
@@ -0,0 +1,96 @@
|
||||
import logging
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
# Configure logging
|
||||
def setup_logger(name="garminsync", level=logging.INFO):
|
||||
"""Setup logger with consistent formatting"""
|
||||
logger = logging.getLogger(name)
|
||||
|
||||
# Prevent duplicate handlers
|
||||
if logger.handlers:
|
||||
return logger
|
||||
|
||||
logger.setLevel(level)
|
||||
|
||||
# Create console handler
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setLevel(level)
|
||||
|
||||
# Create formatter
|
||||
formatter = logging.Formatter(
|
||||
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
handler.setFormatter(formatter)
|
||||
|
||||
# Add handler to logger
|
||||
logger.addHandler(handler)
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
# Create default logger instance
|
||||
logger = setup_logger()
|
||||
|
||||
|
||||
def format_timestamp(timestamp_str=None):
|
||||
"""Format timestamp string for display"""
|
||||
if not timestamp_str:
|
||||
return "Never"
|
||||
|
||||
try:
|
||||
# Parse ISO format timestamp
|
||||
dt = datetime.fromisoformat(timestamp_str.replace("Z", "+00:00"))
|
||||
return dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
except (ValueError, AttributeError):
|
||||
return timestamp_str
|
||||
|
||||
|
||||
def safe_filename(filename):
|
||||
"""Make filename safe for filesystem"""
|
||||
import re
|
||||
|
||||
# Replace problematic characters
|
||||
safe_name = re.sub(r'[<>:"/\\|?*]', "_", filename)
|
||||
# Replace spaces and colons commonly found in timestamps
|
||||
safe_name = safe_name.replace(":", "-").replace(" ", "_")
|
||||
return safe_name
|
||||
|
||||
|
||||
def bytes_to_human_readable(bytes_count):
|
||||
"""Convert bytes to human readable format"""
|
||||
if bytes_count == 0:
|
||||
return "0 B"
|
||||
|
||||
for unit in ["B", "KB", "MB", "GB"]:
|
||||
if bytes_count < 1024.0:
|
||||
return f"{bytes_count:.1f} {unit}"
|
||||
bytes_count /= 1024.0
|
||||
return f"{bytes_count:.1f} TB"
|
||||
|
||||
|
||||
def validate_cron_expression(cron_expr):
|
||||
"""Basic validation of cron expression"""
|
||||
try:
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
# Try to create a CronTrigger with the expression
|
||||
CronTrigger.from_crontab(cron_expr)
|
||||
return True
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
|
||||
|
||||
# Utility function for error handling
|
||||
def handle_db_error(func):
|
||||
"""Decorator for database operations with error handling"""
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
logger.error(f"Database operation failed in {func.__name__}: {e}")
|
||||
raise
|
||||
|
||||
return wrapper
|
||||
1
examples/GarminSync/garminsync/web/__init__.py
Normal file
1
examples/GarminSync/garminsync/web/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Empty file to mark this directory as a Python package
|
||||
Binary file not shown.
Binary file not shown.
107
examples/GarminSync/garminsync/web/app.py
Normal file
107
examples/GarminSync/garminsync/web/app.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.templating import Jinja2Templates
|
||||
|
||||
from .routes import router
|
||||
|
||||
app = FastAPI(title="GarminSync Dashboard")
|
||||
|
||||
# Get the current directory path
|
||||
current_dir = Path(__file__).parent
|
||||
|
||||
# Mount static files and templates with error handling
|
||||
static_dir = current_dir / "static"
|
||||
templates_dir = current_dir / "templates"
|
||||
|
||||
if static_dir.exists():
|
||||
app.mount("/static", StaticFiles(directory=str(static_dir)), name="static")
|
||||
|
||||
if templates_dir.exists():
|
||||
templates = Jinja2Templates(directory=str(templates_dir))
|
||||
else:
|
||||
templates = None
|
||||
|
||||
# Include API routes
|
||||
app.include_router(router)
|
||||
|
||||
|
||||
@app.get("/")
|
||||
async def dashboard(request: Request):
|
||||
"""Dashboard route with fallback for missing templates"""
|
||||
if not templates:
|
||||
# Return JSON response if templates are not available
|
||||
from garminsync.database import get_offline_stats
|
||||
|
||||
stats = get_offline_stats()
|
||||
return JSONResponse(
|
||||
{
|
||||
"message": "GarminSync Dashboard",
|
||||
"stats": stats,
|
||||
"note": "Web UI templates not found, showing JSON response",
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
# Get current statistics
|
||||
from garminsync.database import get_offline_stats
|
||||
|
||||
stats = get_offline_stats()
|
||||
|
||||
return templates.TemplateResponse(
|
||||
"dashboard.html", {"request": request, "stats": stats}
|
||||
)
|
||||
except Exception as e:
|
||||
return JSONResponse(
|
||||
{
|
||||
"error": f"Failed to load dashboard: {str(e)}",
|
||||
"message": "Dashboard unavailable, API endpoints still functional",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""Health check endpoint"""
|
||||
return {"status": "healthy", "service": "GarminSync Dashboard"}
|
||||
|
||||
|
||||
@app.get("/config")
|
||||
async def config_page(request: Request):
|
||||
"""Configuration page"""
|
||||
if not templates:
|
||||
return JSONResponse(
|
||||
{
|
||||
"message": "Configuration endpoint",
|
||||
"note": "Use /api/schedule endpoints for configuration",
|
||||
}
|
||||
)
|
||||
|
||||
return templates.TemplateResponse("config.html", {"request": request})
|
||||
|
||||
|
||||
@app.get("/activities")
|
||||
async def activities_page(request: Request):
|
||||
"""Activities page route"""
|
||||
if not templates:
|
||||
return JSONResponse({"message": "Activities endpoint"})
|
||||
|
||||
return templates.TemplateResponse("activities.html", {"request": request})
|
||||
|
||||
|
||||
# Error handlers
|
||||
@app.exception_handler(404)
|
||||
async def not_found_handler(request: Request, exc):
|
||||
return JSONResponse(
|
||||
status_code=404, content={"error": "Not found", "path": str(request.url.path)}
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(500)
|
||||
async def server_error_handler(request: Request, exc):
|
||||
return JSONResponse(
|
||||
status_code=500, content={"error": "Internal server error", "detail": str(exc)}
|
||||
)
|
||||
478
examples/GarminSync/garminsync/web/routes.py
Normal file
478
examples/GarminSync/garminsync/web/routes.py
Normal file
@@ -0,0 +1,478 @@
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from pydantic import BaseModel
|
||||
|
||||
from garminsync.database import Activity, DaemonConfig, SyncLog, get_session
|
||||
|
||||
router = APIRouter(prefix="/api")
|
||||
|
||||
|
||||
class ScheduleConfig(BaseModel):
|
||||
enabled: bool
|
||||
cron_schedule: str
|
||||
|
||||
|
||||
@router.get("/status")
|
||||
async def get_status():
|
||||
"""Get current daemon status"""
|
||||
session = get_session()
|
||||
try:
|
||||
config = session.query(DaemonConfig).first()
|
||||
|
||||
# Get recent logs
|
||||
logs = session.query(SyncLog).order_by(SyncLog.timestamp.desc()).limit(10).all()
|
||||
|
||||
# Convert to dictionaries to avoid session issues
|
||||
daemon_data = {
|
||||
"running": config.status == "running" if config else False,
|
||||
"next_run": config.next_run if config else None,
|
||||
"schedule": config.schedule_cron if config else None,
|
||||
"last_run": config.last_run if config else None,
|
||||
"enabled": config.enabled if config else False,
|
||||
}
|
||||
|
||||
# Add sync status
|
||||
from garminsync.daemon import daemon_instance
|
||||
daemon_data["sync_in_progress"] = daemon_instance.is_sync_in_progress() if hasattr(daemon_instance, 'is_sync_in_progress') else False
|
||||
|
||||
log_data = []
|
||||
for log in logs:
|
||||
log_data.append(
|
||||
{
|
||||
"timestamp": log.timestamp,
|
||||
"operation": log.operation,
|
||||
"status": log.status,
|
||||
"message": log.message,
|
||||
"activities_processed": log.activities_processed,
|
||||
"activities_downloaded": log.activities_downloaded,
|
||||
}
|
||||
)
|
||||
|
||||
return {"daemon": daemon_data, "recent_logs": log_data}
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@router.post("/schedule")
|
||||
async def update_schedule(config: ScheduleConfig):
|
||||
"""Update daemon schedule configuration"""
|
||||
session = get_session()
|
||||
try:
|
||||
daemon_config = session.query(DaemonConfig).first()
|
||||
|
||||
if not daemon_config:
|
||||
daemon_config = DaemonConfig()
|
||||
session.add(daemon_config)
|
||||
|
||||
daemon_config.enabled = config.enabled
|
||||
daemon_config.schedule_cron = config.cron_schedule
|
||||
session.commit()
|
||||
|
||||
return {"message": "Configuration updated successfully"}
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to update configuration: {str(e)}"
|
||||
)
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@router.post("/sync/trigger")
|
||||
async def trigger_sync():
|
||||
"""Manually trigger a sync operation"""
|
||||
try:
|
||||
# Import here to avoid circular imports
|
||||
import os
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from garminsync.database import Activity, sync_database
|
||||
from garminsync.garmin import GarminClient
|
||||
|
||||
# Create client and sync
|
||||
client = GarminClient()
|
||||
sync_database(client)
|
||||
|
||||
# Download missing activities
|
||||
session = get_session()
|
||||
try:
|
||||
missing_activities = (
|
||||
session.query(Activity).filter_by(downloaded=False).all()
|
||||
)
|
||||
downloaded_count = 0
|
||||
|
||||
data_dir = Path(os.getenv("DATA_DIR", "data"))
|
||||
data_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for activity in missing_activities:
|
||||
try:
|
||||
fit_data = client.download_activity_fit(activity.activity_id)
|
||||
|
||||
timestamp = activity.start_time.replace(":", "-").replace(" ", "_")
|
||||
filename = f"activity_{activity.activity_id}_{timestamp}.fit"
|
||||
filepath = data_dir / filename
|
||||
|
||||
with open(filepath, "wb") as f:
|
||||
f.write(fit_data)
|
||||
|
||||
activity.filename = str(filepath)
|
||||
activity.downloaded = True
|
||||
activity.last_sync = datetime.now().isoformat()
|
||||
downloaded_count += 1
|
||||
session.commit()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Failed to download activity {activity.activity_id}: {e}")
|
||||
session.rollback()
|
||||
|
||||
return {
|
||||
"message": f"Sync completed successfully. Downloaded {downloaded_count} activities."
|
||||
}
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Sync failed: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/activities/stats")
|
||||
async def get_activity_stats():
|
||||
"""Get activity statistics"""
|
||||
from garminsync.database import get_offline_stats
|
||||
|
||||
return get_offline_stats()
|
||||
|
||||
|
||||
@router.get("/logs")
|
||||
async def get_logs(
|
||||
status: str = None,
|
||||
operation: str = None,
|
||||
date: str = None,
|
||||
page: int = 1,
|
||||
per_page: int = 20,
|
||||
):
|
||||
"""Get sync logs with filtering and pagination"""
|
||||
session = get_session()
|
||||
try:
|
||||
query = session.query(SyncLog)
|
||||
|
||||
# Apply filters
|
||||
if status:
|
||||
query = query.filter(SyncLog.status == status)
|
||||
if operation:
|
||||
query = query.filter(SyncLog.operation == operation)
|
||||
if date:
|
||||
# Filter by date (assuming ISO format)
|
||||
query = query.filter(SyncLog.timestamp.like(f"{date}%"))
|
||||
|
||||
# Get total count for pagination
|
||||
total = query.count()
|
||||
|
||||
# Apply pagination
|
||||
logs = (
|
||||
query.order_by(SyncLog.timestamp.desc())
|
||||
.offset((page - 1) * per_page)
|
||||
.limit(per_page)
|
||||
.all()
|
||||
)
|
||||
|
||||
log_data = []
|
||||
for log in logs:
|
||||
log_data.append(
|
||||
{
|
||||
"id": log.id,
|
||||
"timestamp": log.timestamp,
|
||||
"operation": log.operation,
|
||||
"status": log.status,
|
||||
"message": log.message,
|
||||
"activities_processed": log.activities_processed,
|
||||
"activities_downloaded": log.activities_downloaded,
|
||||
}
|
||||
)
|
||||
|
||||
return {"logs": log_data, "total": total, "page": page, "per_page": per_page}
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@router.post("/daemon/start")
|
||||
async def start_daemon():
|
||||
"""Start the daemon process"""
|
||||
from garminsync.daemon import daemon_instance
|
||||
|
||||
try:
|
||||
# Start the daemon in a separate thread to avoid blocking
|
||||
import threading
|
||||
|
||||
daemon_thread = threading.Thread(target=daemon_instance.start)
|
||||
daemon_thread.daemon = True
|
||||
daemon_thread.start()
|
||||
|
||||
# Update daemon status in database
|
||||
session = get_session()
|
||||
config = session.query(DaemonConfig).first()
|
||||
if not config:
|
||||
config = DaemonConfig()
|
||||
session.add(config)
|
||||
config.status = "running"
|
||||
session.commit()
|
||||
|
||||
return {"message": "Daemon started successfully"}
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Failed to start daemon: {str(e)}")
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@router.post("/daemon/stop")
|
||||
async def stop_daemon():
|
||||
"""Stop the daemon process"""
|
||||
from garminsync.daemon import daemon_instance
|
||||
|
||||
try:
|
||||
# Stop the daemon
|
||||
daemon_instance.stop()
|
||||
|
||||
# Update daemon status in database
|
||||
session = get_session()
|
||||
config = session.query(DaemonConfig).first()
|
||||
if config:
|
||||
config.status = "stopped"
|
||||
session.commit()
|
||||
|
||||
return {"message": "Daemon stopped successfully"}
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Failed to stop daemon: {str(e)}")
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@router.delete("/logs")
|
||||
async def clear_logs():
|
||||
"""Clear all sync logs"""
|
||||
session = get_session()
|
||||
try:
|
||||
session.query(SyncLog).delete()
|
||||
session.commit()
|
||||
return {"message": "Logs cleared successfully"}
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Failed to clear logs: {str(e)}")
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
@router.post("/activities/{activity_id}/reprocess")
|
||||
async def reprocess_activity(activity_id: int):
|
||||
"""Reprocess a single activity to update metrics"""
|
||||
from garminsync.database import Activity, get_session
|
||||
from garminsync.activity_parser import get_activity_metrics
|
||||
|
||||
session = get_session()
|
||||
try:
|
||||
activity = session.query(Activity).get(activity_id)
|
||||
if not activity:
|
||||
raise HTTPException(status_code=404, detail="Activity not found")
|
||||
|
||||
metrics = get_activity_metrics(activity, force_reprocess=True)
|
||||
if metrics:
|
||||
# Update activity metrics
|
||||
activity.activity_type = metrics.get("activityType", {}).get("typeKey")
|
||||
activity.duration = int(float(metrics.get("duration", 0))) if metrics.get("duration") else activity.duration
|
||||
activity.distance = float(metrics.get("distance", 0)) if metrics.get("distance") else activity.distance
|
||||
activity.max_heart_rate = int(float(metrics.get("maxHR", 0))) if metrics.get("maxHR") else activity.max_heart_rate
|
||||
activity.avg_heart_rate = int(float(metrics.get("avgHR", 0))) if metrics.get("avgHR") else activity.avg_heart_rate
|
||||
activity.avg_power = float(metrics.get("avgPower", 0)) if metrics.get("avgPower") else activity.avg_power
|
||||
activity.calories = int(float(metrics.get("calories", 0))) if metrics.get("calories") else activity.calories
|
||||
|
||||
# Mark as reprocessed
|
||||
activity.reprocessed = True
|
||||
session.commit()
|
||||
return {"message": f"Activity {activity_id} reprocessed successfully"}
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Reprocessing failed: {str(e)}")
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
@router.post("/reprocess")
|
||||
async def reprocess_activities(all: bool = False):
|
||||
"""Reprocess all activities or just missing ones"""
|
||||
from garminsync.daemon import daemon_instance
|
||||
|
||||
try:
|
||||
# Trigger reprocess job in daemon
|
||||
daemon_instance.reprocess_activities()
|
||||
return {"message": "Reprocess job started in background"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to start reprocess job: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/activities")
|
||||
async def get_activities(
|
||||
page: int = 1,
|
||||
per_page: int = 50,
|
||||
activity_type: str = None,
|
||||
date_from: str = None,
|
||||
date_to: str = None,
|
||||
):
|
||||
"""Get paginated activities with filtering"""
|
||||
session = get_session()
|
||||
try:
|
||||
query = session.query(Activity)
|
||||
|
||||
# Apply filters
|
||||
if activity_type:
|
||||
query = query.filter(Activity.activity_type == activity_type)
|
||||
if date_from:
|
||||
query = query.filter(Activity.start_time >= date_from)
|
||||
if date_to:
|
||||
query = query.filter(Activity.start_time <= date_to)
|
||||
|
||||
# Get total count for pagination
|
||||
total = query.count()
|
||||
|
||||
# Apply pagination
|
||||
activities = (
|
||||
query.order_by(Activity.start_time.desc())
|
||||
.offset((page - 1) * per_page)
|
||||
.limit(per_page)
|
||||
.all()
|
||||
)
|
||||
|
||||
activity_data = []
|
||||
for activity in activities:
|
||||
activity_data.append(
|
||||
{
|
||||
"activity_id": activity.activity_id,
|
||||
"start_time": activity.start_time,
|
||||
"activity_type": activity.activity_type,
|
||||
"duration": activity.duration,
|
||||
"distance": activity.distance,
|
||||
"max_heart_rate": activity.max_heart_rate,
|
||||
"avg_heart_rate": activity.avg_heart_rate,
|
||||
"avg_power": activity.avg_power,
|
||||
"calories": activity.calories,
|
||||
"filename": activity.filename,
|
||||
"downloaded": activity.downloaded,
|
||||
"created_at": activity.created_at,
|
||||
"last_sync": activity.last_sync,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"activities": activity_data,
|
||||
"total": total,
|
||||
"page": page,
|
||||
"per_page": per_page,
|
||||
}
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@router.get("/activities/{activity_id}")
|
||||
async def get_activity_details(activity_id: int):
|
||||
"""Get detailed activity information"""
|
||||
session = get_session()
|
||||
try:
|
||||
activity = (
|
||||
session.query(Activity).filter(Activity.activity_id == activity_id).first()
|
||||
)
|
||||
if not activity:
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Activity with ID {activity_id} not found"
|
||||
)
|
||||
|
||||
return {
|
||||
"id": activity.activity_id,
|
||||
"name": activity.filename or "Unnamed Activity",
|
||||
"distance": activity.distance,
|
||||
"duration": activity.duration,
|
||||
"start_time": activity.start_time,
|
||||
"activity_type": activity.activity_type,
|
||||
"max_heart_rate": activity.max_heart_rate,
|
||||
"avg_power": activity.avg_power,
|
||||
"calories": activity.calories,
|
||||
"filename": activity.filename,
|
||||
"downloaded": activity.downloaded,
|
||||
"created_at": activity.created_at,
|
||||
"last_sync": activity.last_sync,
|
||||
}
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@router.get("/dashboard/stats")
|
||||
async def get_dashboard_stats():
|
||||
"""Get comprehensive dashboard statistics"""
|
||||
from garminsync.database import get_offline_stats
|
||||
|
||||
return get_offline_stats()
|
||||
|
||||
|
||||
@router.get("/api/activities")
|
||||
async def get_api_activities(page: int = 1, per_page: int = 10):
|
||||
"""Get paginated activities for API"""
|
||||
session = get_session()
|
||||
try:
|
||||
# Use the existing get_paginated method from Activity class
|
||||
pagination = Activity.get_paginated(page, per_page)
|
||||
activities = pagination.items
|
||||
total_pages = pagination.pages
|
||||
current_page = pagination.page
|
||||
total_items = pagination.total
|
||||
|
||||
if not activities and page > 1:
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"No activities found for page {page}"
|
||||
)
|
||||
|
||||
if not activities and page == 1 and total_items == 0:
|
||||
raise HTTPException(status_code=404, detail="No activities found")
|
||||
|
||||
if not activities:
|
||||
raise HTTPException(status_code=404, detail="No activities found")
|
||||
|
||||
return {
|
||||
"activities": [
|
||||
{
|
||||
"id": activity.activity_id,
|
||||
"name": activity.filename or "Unnamed Activity",
|
||||
"distance": activity.distance,
|
||||
"duration": activity.duration,
|
||||
"start_time": activity.start_time,
|
||||
"activity_type": activity.activity_type,
|
||||
"max_heart_rate": activity.max_heart_rate,
|
||||
"avg_power": activity.avg_power,
|
||||
"calories": activity.calories,
|
||||
"downloaded": activity.downloaded,
|
||||
"created_at": activity.created_at,
|
||||
"last_sync": activity.last_sync,
|
||||
"device": activity.device or "Unknown",
|
||||
"intensity": activity.intensity or "Unknown",
|
||||
"average_speed": activity.average_speed,
|
||||
"elevation_gain": activity.elevation_gain,
|
||||
"heart_rate_zones": activity.heart_rate_zones or [],
|
||||
"power_zones": activity.power_zones or [],
|
||||
"training_effect": activity.training_effect or 0,
|
||||
"training_effect_label": activity.training_effect_label
|
||||
or "Unknown",
|
||||
}
|
||||
for activity in activities
|
||||
],
|
||||
"total_pages": total_pages,
|
||||
"current_page": current_page,
|
||||
"total_items": total_items,
|
||||
"page_size": per_page,
|
||||
"status": "success",
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"An error occurred while fetching activities: {str(e)}",
|
||||
)
|
||||
finally:
|
||||
session.close()
|
||||
140
examples/GarminSync/garminsync/web/static/activities.js
Normal file
140
examples/GarminSync/garminsync/web/static/activities.js
Normal file
@@ -0,0 +1,140 @@
|
||||
class ActivitiesPage {
|
||||
constructor() {
|
||||
this.currentPage = 1;
|
||||
this.pageSize = 25;
|
||||
this.totalPages = 1;
|
||||
this.activities = [];
|
||||
this.filters = {};
|
||||
this.init();
|
||||
}
|
||||
|
||||
init() {
|
||||
this.loadActivities();
|
||||
this.setupEventListeners();
|
||||
}
|
||||
|
||||
async loadActivities() {
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
page: this.currentPage,
|
||||
per_page: this.pageSize,
|
||||
...this.filters
|
||||
});
|
||||
|
||||
const response = await fetch(`/api/activities?${params}`);
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to load activities');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
this.activities = data.activities;
|
||||
this.totalPages = Math.ceil(data.total / this.pageSize);
|
||||
|
||||
this.renderTable();
|
||||
this.renderPagination();
|
||||
} catch (error) {
|
||||
console.error('Failed to load activities:', error);
|
||||
this.showError('Failed to load activities');
|
||||
}
|
||||
}
|
||||
|
||||
renderTable() {
|
||||
const tbody = document.getElementById('activities-tbody');
|
||||
if (!tbody) return;
|
||||
|
||||
if (!this.activities || this.activities.length === 0) {
|
||||
tbody.innerHTML = '<tr><td colspan="6">No activities found</td></tr>';
|
||||
return;
|
||||
}
|
||||
|
||||
tbody.innerHTML = '';
|
||||
|
||||
this.activities.forEach((activity, index) => {
|
||||
const row = this.createTableRow(activity, index);
|
||||
tbody.appendChild(row);
|
||||
});
|
||||
}
|
||||
|
||||
createTableRow(activity, index) {
|
||||
const row = document.createElement('tr');
|
||||
row.className = index % 2 === 0 ? 'row-even' : 'row-odd';
|
||||
|
||||
row.innerHTML = `
|
||||
<td>${Utils.formatDate(activity.start_time)}</td>
|
||||
<td>${activity.activity_type || '-'}</td>
|
||||
<td>${Utils.formatDuration(activity.duration)}</td>
|
||||
<td>${Utils.formatDistance(activity.distance)}</td>
|
||||
<td>${Utils.formatHeartRate(activity.max_heart_rate)}</td>
|
||||
<td>${Utils.formatHeartRate(activity.avg_heart_rate)}</td>
|
||||
<td>${Utils.formatPower(activity.avg_power)}</td>
|
||||
<td>${activity.calories ? activity.calories.toLocaleString() : '-'}</td>
|
||||
`;
|
||||
|
||||
return row;
|
||||
}
|
||||
|
||||
renderPagination() {
|
||||
const pagination = document.getElementById('pagination');
|
||||
if (!pagination) return;
|
||||
|
||||
if (this.totalPages <= 1) {
|
||||
pagination.innerHTML = '';
|
||||
return;
|
||||
}
|
||||
|
||||
let paginationHtml = '';
|
||||
|
||||
// Previous button
|
||||
paginationHtml += `
|
||||
<li class="${this.currentPage === 1 ? 'disabled' : ''}">
|
||||
<a href="#" onclick="activitiesPage.changePage(${this.currentPage - 1}); return false;">Previous</a>
|
||||
</li>
|
||||
`;
|
||||
|
||||
// Page numbers
|
||||
for (let i = 1; i <= this.totalPages; i++) {
|
||||
if (i === 1 || i === this.totalPages || (i >= this.currentPage - 2 && i <= this.currentPage + 2)) {
|
||||
paginationHtml += `
|
||||
<li class="${i === this.currentPage ? 'active' : ''}">
|
||||
<a href="#" onclick="activitiesPage.changePage(${i}); return false;">${i}</a>
|
||||
</li>
|
||||
`;
|
||||
} else if (i === this.currentPage - 3 || i === this.currentPage + 3) {
|
||||
paginationHtml += '<li><span>...</span></li>';
|
||||
}
|
||||
}
|
||||
|
||||
// Next button
|
||||
paginationHtml += `
|
||||
<li class="${this.currentPage === this.totalPages ? 'disabled' : ''}">
|
||||
<a href="#" onclick="activitiesPage.changePage(${this.currentPage + 1}); return false;">Next</a>
|
||||
</li>
|
||||
`;
|
||||
|
||||
pagination.innerHTML = paginationHtml;
|
||||
}
|
||||
|
||||
changePage(page) {
|
||||
if (page < 1 || page > this.totalPages) return;
|
||||
this.currentPage = page;
|
||||
this.loadActivities();
|
||||
}
|
||||
|
||||
setupEventListeners() {
|
||||
// We can add filter event listeners here if needed
|
||||
}
|
||||
|
||||
showError(message) {
|
||||
const tbody = document.getElementById('activities-tbody');
|
||||
if (tbody) {
|
||||
tbody.innerHTML = `<tr><td colspan="6">Error: ${message}</td></tr>`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize activities page when DOM is loaded
|
||||
let activitiesPage;
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
activitiesPage = new ActivitiesPage();
|
||||
});
|
||||
3
examples/GarminSync/garminsync/web/static/app.js
Normal file
3
examples/GarminSync/garminsync/web/static/app.js
Normal file
@@ -0,0 +1,3 @@
|
||||
// This file is deprecated and no longer used.
|
||||
// The functionality has been moved to home.js, activities.js, and logs.js
|
||||
// This file is kept for backward compatibility but is empty.
|
||||
1
examples/GarminSync/garminsync/web/static/charts.js
Normal file
1
examples/GarminSync/garminsync/web/static/charts.js
Normal file
@@ -0,0 +1 @@
|
||||
// This file is deprecated and no longer used.
|
||||
200
examples/GarminSync/garminsync/web/static/components.css
Normal file
200
examples/GarminSync/garminsync/web/static/components.css
Normal file
@@ -0,0 +1,200 @@
|
||||
/* Table Styling */
|
||||
.activities-table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.activities-table thead {
|
||||
background-color: #000;
|
||||
color: white;
|
||||
}
|
||||
|
||||
.activities-table th {
|
||||
padding: 12px 16px;
|
||||
text-align: left;
|
||||
font-weight: 600;
|
||||
border-right: 1px solid #333;
|
||||
}
|
||||
|
||||
.activities-table th:last-child {
|
||||
border-right: none;
|
||||
}
|
||||
|
||||
.activities-table td {
|
||||
padding: 12px 16px;
|
||||
border-bottom: 1px solid #eee;
|
||||
}
|
||||
|
||||
.activities-table .row-even {
|
||||
background-color: #f8f9fa;
|
||||
}
|
||||
|
||||
.activities-table .row-odd {
|
||||
background-color: #ffffff;
|
||||
}
|
||||
|
||||
.activities-table tr:hover {
|
||||
background-color: #e9ecef;
|
||||
}
|
||||
|
||||
/* Sync Button Styling */
|
||||
.btn-primary.btn-large {
|
||||
width: 100%;
|
||||
padding: 15px;
|
||||
font-size: 16px;
|
||||
font-weight: 600;
|
||||
border-radius: var(--border-radius);
|
||||
background: linear-gradient(135deg, #007bff 0%, #0056b3 100%);
|
||||
border: none;
|
||||
color: white;
|
||||
cursor: pointer;
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
.btn-primary.btn-large:hover {
|
||||
transform: translateY(-2px);
|
||||
box-shadow: 0 4px 12px rgba(0,123,255,0.3);
|
||||
}
|
||||
|
||||
.btn-primary.btn-large:disabled {
|
||||
opacity: 0.6;
|
||||
cursor: not-allowed;
|
||||
transform: none;
|
||||
}
|
||||
|
||||
/* Statistics Card */
|
||||
.statistics-card .stat-item {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
margin-bottom: 10px;
|
||||
padding: 8px 0;
|
||||
border-bottom: 1px solid #eee;
|
||||
}
|
||||
|
||||
.statistics-card .stat-item:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.statistics-card label {
|
||||
font-weight: 500;
|
||||
color: #666;
|
||||
}
|
||||
|
||||
.statistics-card span {
|
||||
font-weight: 600;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
/* Pagination */
|
||||
.pagination-container {
|
||||
margin-top: 20px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.pagination {
|
||||
display: flex;
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.pagination li {
|
||||
margin: 0 5px;
|
||||
}
|
||||
|
||||
.pagination a {
|
||||
display: block;
|
||||
padding: 8px 12px;
|
||||
text-decoration: none;
|
||||
color: var(--primary-color);
|
||||
border: 1px solid #ddd;
|
||||
border-radius: 4px;
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
.pagination a:hover {
|
||||
background-color: #f0f0f0;
|
||||
}
|
||||
|
||||
.pagination .active a {
|
||||
background-color: var(--primary-color);
|
||||
color: white;
|
||||
border-color: var(--primary-color);
|
||||
}
|
||||
|
||||
.pagination .disabled a {
|
||||
color: #ccc;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* Form elements */
|
||||
.form-group {
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
.form-group label {
|
||||
display: block;
|
||||
margin-bottom: 5px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.form-control {
|
||||
width: 100%;
|
||||
padding: 10px;
|
||||
border: 1px solid #ddd;
|
||||
border-radius: var(--border-radius);
|
||||
font-family: var(--font-family);
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.form-control:focus {
|
||||
outline: none;
|
||||
border-color: var(--primary-color);
|
||||
box-shadow: 0 0 0 2px rgba(0,123,255,0.25);
|
||||
}
|
||||
|
||||
/* Badges */
|
||||
.badge {
|
||||
display: inline-block;
|
||||
padding: 4px 8px;
|
||||
border-radius: 4px;
|
||||
font-size: 0.8rem;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.badge-success {
|
||||
background-color: var(--success-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.badge-error {
|
||||
background-color: var(--danger-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.badge-warning {
|
||||
background-color: var(--warning-color);
|
||||
color: #212529;
|
||||
}
|
||||
|
||||
/* Table responsive */
|
||||
.table-container {
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
/* Activities table card */
|
||||
.activities-table-card {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.activities-table-card .card-header {
|
||||
padding: 20px;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
/* Activities container */
|
||||
.activities-container {
|
||||
margin-top: 20px;
|
||||
}
|
||||
144
examples/GarminSync/garminsync/web/static/home.js
Normal file
144
examples/GarminSync/garminsync/web/static/home.js
Normal file
@@ -0,0 +1,144 @@
|
||||
class HomePage {
|
||||
constructor() {
|
||||
this.logSocket = null;
|
||||
this.statsRefreshInterval = null;
|
||||
this.init();
|
||||
}
|
||||
|
||||
init() {
|
||||
this.attachEventListeners();
|
||||
this.setupRealTimeUpdates();
|
||||
this.loadInitialData();
|
||||
}
|
||||
|
||||
attachEventListeners() {
|
||||
const syncButton = document.getElementById('sync-now-btn');
|
||||
if (syncButton) {
|
||||
syncButton.addEventListener('click', () => this.triggerSync());
|
||||
}
|
||||
}
|
||||
|
||||
async triggerSync() {
|
||||
const btn = document.getElementById('sync-now-btn');
|
||||
const status = document.getElementById('sync-status');
|
||||
|
||||
if (!btn || !status) return;
|
||||
|
||||
btn.disabled = true;
|
||||
btn.innerHTML = '<i class="icon-loading"></i> Syncing...';
|
||||
status.textContent = 'Sync in progress...';
|
||||
status.className = 'sync-status syncing';
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/sync/trigger', {method: 'POST'});
|
||||
const result = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
status.textContent = 'Sync completed successfully';
|
||||
status.className = 'sync-status success';
|
||||
this.updateStats();
|
||||
} else {
|
||||
throw new Error(result.detail || 'Sync failed');
|
||||
}
|
||||
} catch (error) {
|
||||
status.textContent = `Sync failed: ${error.message}`;
|
||||
status.className = 'sync-status error';
|
||||
} finally {
|
||||
btn.disabled = false;
|
||||
btn.innerHTML = '<i class="icon-sync"></i> Sync Now';
|
||||
|
||||
// Reset status message after 5 seconds
|
||||
setTimeout(() => {
|
||||
if (status.className.includes('success')) {
|
||||
status.textContent = 'Ready to sync';
|
||||
status.className = 'sync-status';
|
||||
}
|
||||
}, 5000);
|
||||
}
|
||||
}
|
||||
|
||||
setupRealTimeUpdates() {
|
||||
// Poll for log updates every 5 seconds during active operations
|
||||
this.startLogPolling();
|
||||
|
||||
// Update stats every 30 seconds
|
||||
this.statsRefreshInterval = setInterval(() => {
|
||||
this.updateStats();
|
||||
}, 30000);
|
||||
}
|
||||
|
||||
async startLogPolling() {
|
||||
// For now, we'll update logs every 10 seconds
|
||||
setInterval(() => {
|
||||
this.updateLogs();
|
||||
}, 10000);
|
||||
}
|
||||
|
||||
async updateStats() {
|
||||
try {
|
||||
const response = await fetch('/api/dashboard/stats');
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch stats');
|
||||
}
|
||||
|
||||
const stats = await response.json();
|
||||
|
||||
const totalEl = document.getElementById('total-activities');
|
||||
const downloadedEl = document.getElementById('downloaded-activities');
|
||||
const missingEl = document.getElementById('missing-activities');
|
||||
|
||||
if (totalEl) totalEl.textContent = stats.total;
|
||||
if (downloadedEl) downloadedEl.textContent = stats.downloaded;
|
||||
if (missingEl) missingEl.textContent = stats.missing;
|
||||
} catch (error) {
|
||||
console.error('Failed to update stats:', error);
|
||||
}
|
||||
}
|
||||
|
||||
async updateLogs() {
|
||||
try {
|
||||
const response = await fetch('/api/status');
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch logs');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
this.renderLogs(data.recent_logs);
|
||||
} catch (error) {
|
||||
console.error('Failed to update logs:', error);
|
||||
}
|
||||
}
|
||||
|
||||
renderLogs(logs) {
|
||||
const logContent = document.getElementById('log-content');
|
||||
if (!logContent) return;
|
||||
|
||||
if (!logs || logs.length === 0) {
|
||||
logContent.innerHTML = '<div class="log-entry">No recent activity</div>';
|
||||
return;
|
||||
}
|
||||
|
||||
const logsHtml = logs.map(log => `
|
||||
<div class="log-entry">
|
||||
<span class="timestamp">${Utils.formatTimestamp(log.timestamp)}</span>
|
||||
<span class="status ${log.status === 'success' ? 'success' : 'error'}">
|
||||
${log.status}
|
||||
</span>
|
||||
${log.operation}: ${log.message || ''}
|
||||
${log.activities_downloaded > 0 ? `Downloaded ${log.activities_downloaded} activities` : ''}
|
||||
</div>
|
||||
`).join('');
|
||||
|
||||
logContent.innerHTML = logsHtml;
|
||||
}
|
||||
|
||||
async loadInitialData() {
|
||||
// Load initial logs
|
||||
await this.updateLogs();
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize home page when DOM is loaded
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
new HomePage();
|
||||
});
|
||||
179
examples/GarminSync/garminsync/web/static/logs.js
Normal file
179
examples/GarminSync/garminsync/web/static/logs.js
Normal file
@@ -0,0 +1,179 @@
|
||||
// Global variables for pagination and filtering
|
||||
let currentPage = 1;
|
||||
const logsPerPage = 20;
|
||||
let totalLogs = 0;
|
||||
let currentFilters = {};
|
||||
|
||||
class LogsPage {
|
||||
constructor() {
|
||||
this.currentPage = 1;
|
||||
this.init();
|
||||
}
|
||||
|
||||
init() {
|
||||
this.loadLogs();
|
||||
this.setupEventListeners();
|
||||
}
|
||||
|
||||
async loadLogs() {
|
||||
try {
|
||||
// Build query string from filters
|
||||
const params = new URLSearchParams({
|
||||
page: this.currentPage,
|
||||
per_page: logsPerPage,
|
||||
...currentFilters
|
||||
}).toString();
|
||||
|
||||
const response = await fetch(`/api/logs?${params}`);
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch logs');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
totalLogs = data.total;
|
||||
this.renderLogs(data.logs);
|
||||
this.renderPagination();
|
||||
} catch (error) {
|
||||
console.error('Error loading logs:', error);
|
||||
Utils.showError('Failed to load logs: ' + error.message);
|
||||
}
|
||||
}
|
||||
|
||||
renderLogs(logs) {
|
||||
const tbody = document.getElementById('logs-tbody');
|
||||
if (!tbody) return;
|
||||
|
||||
tbody.innerHTML = '';
|
||||
|
||||
if (!logs || logs.length === 0) {
|
||||
tbody.innerHTML = '<tr><td colspan="6">No logs found</td></tr>';
|
||||
return;
|
||||
}
|
||||
|
||||
logs.forEach(log => {
|
||||
const row = document.createElement('tr');
|
||||
row.className = 'row-odd'; // For alternating row colors
|
||||
|
||||
row.innerHTML = `
|
||||
<td>${Utils.formatTimestamp(log.timestamp)}</td>
|
||||
<td>${log.operation}</td>
|
||||
<td><span class="badge badge-${log.status === 'success' ? 'success' :
|
||||
log.status === 'error' ? 'error' :
|
||||
'warning'}">${log.status}</span></td>
|
||||
<td>${log.message || ''}</td>
|
||||
<td>${log.activities_processed}</td>
|
||||
<td>${log.activities_downloaded}</td>
|
||||
`;
|
||||
|
||||
tbody.appendChild(row);
|
||||
});
|
||||
}
|
||||
|
||||
renderPagination() {
|
||||
const totalPages = Math.ceil(totalLogs / logsPerPage);
|
||||
const pagination = document.getElementById('pagination');
|
||||
if (!pagination) return;
|
||||
|
||||
if (totalPages <= 1) {
|
||||
pagination.innerHTML = '';
|
||||
return;
|
||||
}
|
||||
|
||||
let paginationHtml = '';
|
||||
|
||||
// Previous button
|
||||
paginationHtml += `
|
||||
<li class="${this.currentPage === 1 ? 'disabled' : ''}">
|
||||
<a href="#" onclick="logsPage.changePage(${this.currentPage - 1}); return false;">Previous</a>
|
||||
</li>
|
||||
`;
|
||||
|
||||
// Page numbers
|
||||
for (let i = 1; i <= totalPages; i++) {
|
||||
if (i === 1 || i === totalPages || (i >= this.currentPage - 2 && i <= this.currentPage + 2)) {
|
||||
paginationHtml += `
|
||||
<li class="${i === this.currentPage ? 'active' : ''}">
|
||||
<a href="#" onclick="logsPage.changePage(${i}); return false;">${i}</a>
|
||||
</li>
|
||||
`;
|
||||
} else if (i === this.currentPage - 3 || i === this.currentPage + 3) {
|
||||
paginationHtml += '<li><span>...</span></li>';
|
||||
}
|
||||
}
|
||||
|
||||
// Next button
|
||||
paginationHtml += `
|
||||
<li class="${this.currentPage === totalPages ? 'disabled' : ''}">
|
||||
<a href="#" onclick="logsPage.changePage(${this.currentPage + 1}); return false;">Next</a>
|
||||
</li>
|
||||
`;
|
||||
|
||||
pagination.innerHTML = paginationHtml;
|
||||
}
|
||||
|
||||
changePage(page) {
|
||||
if (page < 1 || page > Math.ceil(totalLogs / logsPerPage)) return;
|
||||
this.currentPage = page;
|
||||
this.loadLogs();
|
||||
}
|
||||
|
||||
refreshLogs() {
|
||||
this.currentPage = 1;
|
||||
this.loadLogs();
|
||||
}
|
||||
|
||||
applyFilters() {
|
||||
currentFilters = {
|
||||
status: document.getElementById('status-filter').value,
|
||||
operation: document.getElementById('operation-filter').value,
|
||||
date: document.getElementById('date-filter').value
|
||||
};
|
||||
|
||||
this.currentPage = 1;
|
||||
this.loadLogs();
|
||||
}
|
||||
|
||||
async clearLogs() {
|
||||
if (!confirm('Are you sure you want to clear all logs? This cannot be undone.')) return;
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/logs', { method: 'DELETE' });
|
||||
if (response.ok) {
|
||||
Utils.showSuccess('Logs cleared successfully');
|
||||
this.refreshLogs();
|
||||
} else {
|
||||
throw new Error('Failed to clear logs');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error clearing logs:', error);
|
||||
Utils.showError('Failed to clear logs: ' + error.message);
|
||||
}
|
||||
}
|
||||
|
||||
setupEventListeners() {
|
||||
// Event listeners are handled in the global functions below
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize logs page when DOM is loaded
|
||||
let logsPage;
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
logsPage = new LogsPage();
|
||||
});
|
||||
|
||||
// Global functions for backward compatibility with HTML onclick attributes
|
||||
function changePage(page) {
|
||||
if (logsPage) logsPage.changePage(page);
|
||||
}
|
||||
|
||||
function refreshLogs() {
|
||||
if (logsPage) logsPage.refreshLogs();
|
||||
}
|
||||
|
||||
function applyFilters() {
|
||||
if (logsPage) logsPage.applyFilters();
|
||||
}
|
||||
|
||||
function clearLogs() {
|
||||
if (logsPage) logsPage.clearLogs();
|
||||
}
|
||||
52
examples/GarminSync/garminsync/web/static/navigation.js
Normal file
52
examples/GarminSync/garminsync/web/static/navigation.js
Normal file
@@ -0,0 +1,52 @@
|
||||
class Navigation {
|
||||
constructor() {
|
||||
this.currentPage = this.getCurrentPage();
|
||||
this.render();
|
||||
}
|
||||
|
||||
getCurrentPage() {
|
||||
return window.location.pathname === '/activities' ? 'activities' : 'home';
|
||||
}
|
||||
|
||||
render() {
|
||||
const nav = document.querySelector('.navigation');
|
||||
if (nav) {
|
||||
nav.innerHTML = this.getNavigationHTML();
|
||||
this.attachEventListeners();
|
||||
}
|
||||
}
|
||||
|
||||
getNavigationHTML() {
|
||||
return `
|
||||
<nav class="nav-tabs">
|
||||
<button class="nav-tab ${this.currentPage === 'home' ? 'active' : ''}"
|
||||
data-page="home">Home</button>
|
||||
<button class="nav-tab ${this.currentPage === 'activities' ? 'active' : ''}"
|
||||
data-page="activities">Activities</button>
|
||||
</nav>
|
||||
`;
|
||||
}
|
||||
|
||||
attachEventListeners() {
|
||||
const tabs = document.querySelectorAll('.nav-tab');
|
||||
tabs.forEach(tab => {
|
||||
tab.addEventListener('click', (e) => {
|
||||
const page = e.target.getAttribute('data-page');
|
||||
this.navigateToPage(page);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
navigateToPage(page) {
|
||||
if (page === 'home') {
|
||||
window.location.href = '/';
|
||||
} else if (page === 'activities') {
|
||||
window.location.href = '/activities';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize navigation when DOM is loaded
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
new Navigation();
|
||||
});
|
||||
78
examples/GarminSync/garminsync/web/static/responsive.css
Normal file
78
examples/GarminSync/garminsync/web/static/responsive.css
Normal file
@@ -0,0 +1,78 @@
|
||||
/* Mobile-first responsive design */
|
||||
@media (max-width: 768px) {
|
||||
.layout-grid {
|
||||
grid-template-columns: 1fr;
|
||||
gap: 15px;
|
||||
}
|
||||
|
||||
.sidebar {
|
||||
order: 2;
|
||||
}
|
||||
|
||||
.main-content {
|
||||
order: 1;
|
||||
}
|
||||
|
||||
.activities-table {
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.activities-table th,
|
||||
.activities-table td {
|
||||
padding: 8px 10px;
|
||||
}
|
||||
|
||||
.nav-tabs {
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.container {
|
||||
padding: 0 10px;
|
||||
}
|
||||
|
||||
.card {
|
||||
padding: 15px;
|
||||
}
|
||||
|
||||
.btn {
|
||||
padding: 8px 15px;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.btn-large {
|
||||
padding: 12px 20px;
|
||||
font-size: 15px;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 480px) {
|
||||
.activities-table {
|
||||
display: block;
|
||||
overflow-x: auto;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.stat-item {
|
||||
flex-direction: column;
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
.log-content {
|
||||
padding: 5px;
|
||||
font-size: 0.8rem;
|
||||
}
|
||||
|
||||
.log-entry {
|
||||
padding: 5px;
|
||||
}
|
||||
|
||||
.pagination a {
|
||||
padding: 6px 10px;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.form-control {
|
||||
padding: 8px;
|
||||
font-size: 14px;
|
||||
}
|
||||
}
|
||||
268
examples/GarminSync/garminsync/web/static/style.css
Normal file
268
examples/GarminSync/garminsync/web/static/style.css
Normal file
@@ -0,0 +1,268 @@
|
||||
/* CSS Variables for consistent theming */
|
||||
:root {
|
||||
--primary-color: #007bff;
|
||||
--secondary-color: #6c757d;
|
||||
--success-color: #28a745;
|
||||
--danger-color: #dc3545;
|
||||
--warning-color: #ffc107;
|
||||
--light-gray: #f8f9fa;
|
||||
--dark-gray: #343a40;
|
||||
--border-radius: 8px;
|
||||
--box-shadow: 0 2px 10px rgba(0,0,0,0.1);
|
||||
--font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||
}
|
||||
|
||||
/* Reset and base styles */
|
||||
* {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: var(--font-family);
|
||||
background-color: #f5f7fa;
|
||||
color: #333;
|
||||
line-height: 1.6;
|
||||
}
|
||||
|
||||
/* CSS Grid Layout System */
|
||||
.container {
|
||||
max-width: 1200px;
|
||||
margin: 0 auto;
|
||||
padding: 0 20px;
|
||||
}
|
||||
|
||||
.layout-grid {
|
||||
display: grid;
|
||||
grid-template-columns: 300px 1fr;
|
||||
gap: 20px;
|
||||
min-height: calc(100vh - 60px);
|
||||
}
|
||||
|
||||
/* Modern Card Components */
|
||||
.card {
|
||||
background: white;
|
||||
border-radius: var(--border-radius);
|
||||
box-shadow: var(--box-shadow);
|
||||
padding: 20px;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.card-header {
|
||||
font-weight: 600;
|
||||
font-size: 1.2rem;
|
||||
margin-bottom: 15px;
|
||||
padding-bottom: 10px;
|
||||
border-bottom: 1px solid #eee;
|
||||
}
|
||||
|
||||
/* Navigation */
|
||||
.navigation {
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.nav-tabs {
|
||||
display: flex;
|
||||
background: white;
|
||||
border-radius: var(--border-radius);
|
||||
box-shadow: var(--box-shadow);
|
||||
padding: 5px;
|
||||
}
|
||||
|
||||
.nav-tab {
|
||||
flex: 1;
|
||||
padding: 12px 20px;
|
||||
border: none;
|
||||
background: transparent;
|
||||
cursor: pointer;
|
||||
font-weight: 500;
|
||||
border-radius: var(--border-radius);
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
.nav-tab:hover {
|
||||
background-color: #f0f0f0;
|
||||
}
|
||||
|
||||
.nav-tab.active {
|
||||
background-color: var(--primary-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
/* Buttons */
|
||||
.btn {
|
||||
padding: 10px 20px;
|
||||
border: none;
|
||||
border-radius: var(--border-radius);
|
||||
cursor: pointer;
|
||||
font-weight: 500;
|
||||
transition: all 0.2s ease;
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.btn-primary {
|
||||
background: linear-gradient(135deg, var(--primary-color) 0%, #0056b3 100%);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.btn-primary:hover:not(:disabled) {
|
||||
transform: translateY(-2px);
|
||||
box-shadow: 0 4px 12px rgba(0,123,255,0.3);
|
||||
}
|
||||
|
||||
.btn-primary:disabled {
|
||||
opacity: 0.6;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.btn-secondary {
|
||||
background-color: var(--secondary-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.btn-success {
|
||||
background-color: var(--success-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.btn-danger {
|
||||
background-color: var(--danger-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.btn-warning {
|
||||
background-color: var(--warning-color);
|
||||
color: #212529;
|
||||
}
|
||||
|
||||
.btn-large {
|
||||
padding: 15px 25px;
|
||||
font-size: 16px;
|
||||
}
|
||||
|
||||
/* Icons */
|
||||
.icon-sync::before {
|
||||
content: "↻";
|
||||
margin-right: 8px;
|
||||
}
|
||||
|
||||
.icon-loading::before {
|
||||
content: "⏳";
|
||||
margin-right: 8px;
|
||||
}
|
||||
|
||||
/* Status display */
|
||||
.sync-status {
|
||||
margin-top: 15px;
|
||||
padding: 10px;
|
||||
border-radius: var(--border-radius);
|
||||
text-align: center;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.sync-status.syncing {
|
||||
background-color: #e3f2fd;
|
||||
color: var(--primary-color);
|
||||
}
|
||||
|
||||
.sync-status.success {
|
||||
background-color: #e8f5e9;
|
||||
color: var(--success-color);
|
||||
}
|
||||
|
||||
.sync-status.error {
|
||||
background-color: #ffebee;
|
||||
color: var(--danger-color);
|
||||
}
|
||||
|
||||
/* Statistics */
|
||||
.stat-item {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
margin-bottom: 10px;
|
||||
padding: 8px 0;
|
||||
border-bottom: 1px solid #eee;
|
||||
}
|
||||
|
||||
.stat-item:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.stat-item label {
|
||||
font-weight: 500;
|
||||
color: #666;
|
||||
}
|
||||
|
||||
.stat-item span {
|
||||
font-weight: 600;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
/* Log display */
|
||||
.log-content {
|
||||
max-height: 400px;
|
||||
overflow-y: auto;
|
||||
padding: 10px;
|
||||
background-color: #f8f9fa;
|
||||
border-radius: var(--border-radius);
|
||||
font-family: monospace;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
.log-entry {
|
||||
margin-bottom: 8px;
|
||||
padding: 8px;
|
||||
border-left: 3px solid #ddd;
|
||||
background-color: white;
|
||||
border-radius: 0 var(--border-radius) var(--border-radius) 0;
|
||||
}
|
||||
|
||||
.log-entry .timestamp {
|
||||
font-size: 0.8rem;
|
||||
color: #666;
|
||||
margin-right: 10px;
|
||||
}
|
||||
|
||||
.log-entry .status {
|
||||
padding: 2px 6px;
|
||||
border-radius: 4px;
|
||||
font-size: 0.8rem;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.log-entry .status.success {
|
||||
background-color: var(--success-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.log-entry .status.error {
|
||||
background-color: var(--danger-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
/* Responsive Design */
|
||||
@media (max-width: 768px) {
|
||||
.layout-grid {
|
||||
grid-template-columns: 1fr;
|
||||
gap: 15px;
|
||||
}
|
||||
|
||||
.sidebar {
|
||||
order: 2;
|
||||
}
|
||||
|
||||
.main-content {
|
||||
order: 1;
|
||||
}
|
||||
|
||||
.nav-tabs {
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.container {
|
||||
padding: 0 10px;
|
||||
}
|
||||
}
|
||||
56
examples/GarminSync/garminsync/web/static/utils.js
Normal file
56
examples/GarminSync/garminsync/web/static/utils.js
Normal file
@@ -0,0 +1,56 @@
|
||||
// Utility functions for the GarminSync application
|
||||
|
||||
class Utils {
|
||||
// Format date for display
|
||||
static formatDate(dateStr) {
|
||||
if (!dateStr) return '-';
|
||||
return new Date(dateStr).toLocaleDateString();
|
||||
}
|
||||
|
||||
// Format duration from seconds to HH:MM:SS
|
||||
static formatDuration(seconds) {
|
||||
if (!seconds) return '-';
|
||||
const hours = Math.floor(seconds / 3600);
|
||||
const minutes = Math.floor((seconds % 3600) / 60);
|
||||
const secondsLeft = seconds % 60;
|
||||
return `${hours}:${minutes.toString().padStart(2, '0')}:${secondsLeft.toString().padStart(2, '0')}`;
|
||||
}
|
||||
|
||||
// Format distance from meters to kilometers
|
||||
static formatDistance(meters) {
|
||||
if (!meters) return '-';
|
||||
return `${(meters / 1000).toFixed(1)} km`;
|
||||
}
|
||||
|
||||
// Format power from watts
|
||||
static formatPower(watts) {
|
||||
return watts ? `${Math.round(watts)}W` : '-';
|
||||
}
|
||||
|
||||
// Format heart rate (adds 'bpm')
|
||||
static formatHeartRate(hr) {
|
||||
return hr ? `${hr} bpm` : '-';
|
||||
}
|
||||
|
||||
// Show error message
|
||||
static showError(message) {
|
||||
console.error(message);
|
||||
// In a real implementation, you might want to show this in the UI
|
||||
alert(`Error: ${message}`);
|
||||
}
|
||||
|
||||
// Show success message
|
||||
static showSuccess(message) {
|
||||
console.log(message);
|
||||
// In a real implementation, you might want to show this in the UI
|
||||
}
|
||||
|
||||
// Format timestamp for log entries
|
||||
static formatTimestamp(timestamp) {
|
||||
if (!timestamp) return '';
|
||||
return new Date(timestamp).toLocaleString();
|
||||
}
|
||||
}
|
||||
|
||||
// Make Utils available globally
|
||||
window.Utils = Utils;
|
||||
44
examples/GarminSync/garminsync/web/templates/activities.html
Normal file
44
examples/GarminSync/garminsync/web/templates/activities.html
Normal file
@@ -0,0 +1,44 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block content %}
|
||||
<div class="container">
|
||||
<div class="navigation"></div>
|
||||
|
||||
<div class="activities-container">
|
||||
<div class="card activities-table-card">
|
||||
<div class="card-header">
|
||||
<h3>Activities</h3>
|
||||
</div>
|
||||
<div class="table-container">
|
||||
<table class="activities-table" id="activities-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Date</th>
|
||||
<th>Activity Type</th>
|
||||
<th>Duration</th>
|
||||
<th>Distance</th>
|
||||
<th>Max HR</th>
|
||||
<th>Avg HR</th>
|
||||
<th>Power</th>
|
||||
<th>Calories</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="activities-tbody">
|
||||
<!-- Data populated by JavaScript -->
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div class="pagination-container">
|
||||
<div class="pagination" id="pagination">
|
||||
<!-- Pagination controls -->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
{% block page_scripts %}
|
||||
<script src="/static/activities.js"></script>
|
||||
{% endblock %}
|
||||
154
examples/GarminSync/garminsync/web/templates/activity.html
Normal file
154
examples/GarminSync/garminsync/web/templates/activity.html
Normal file
@@ -0,0 +1,154 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Activity Details - GarminSync</title>
|
||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet">
|
||||
<link href="/static/styles.css" rel="stylesheet">
|
||||
</head>
|
||||
<body>
|
||||
<div class="container mt-4">
|
||||
<h1 class="mb-4">Activity Details</h1>
|
||||
|
||||
<div id="activity-details">
|
||||
<!-- Activity details will be populated by JavaScript -->
|
||||
</div>
|
||||
|
||||
<div class="mt-4">
|
||||
<h2>Analysis Metrics</h2>
|
||||
<table class="table table-striped" id="metrics-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Metric</th>
|
||||
<th>Value</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<!-- Metrics will be populated by JavaScript -->
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div class="mt-4">
|
||||
<button id="reprocess-btn" class="btn btn-warning">
|
||||
<span id="spinner" class="spinner-border spinner-border-sm d-none" role="status" aria-hidden="true"></span>
|
||||
Reprocess Activity
|
||||
</button>
|
||||
<div id="reprocess-result" class="mt-2"></div>
|
||||
</div>
|
||||
|
||||
<div class="mt-4">
|
||||
<a href="/activities" class="btn btn-secondary">Back to Activities</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="/static/utils.js"></script>
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', async function() {
|
||||
const activityId = new URLSearchParams(window.location.search).get('id');
|
||||
if (!activityId) {
|
||||
showError('Activity ID not provided');
|
||||
return;
|
||||
}
|
||||
|
||||
// Load activity details
|
||||
await loadActivity(activityId);
|
||||
|
||||
// Setup reprocess button
|
||||
document.getElementById('reprocess-btn').addEventListener('click', () => {
|
||||
reprocessActivity(activityId);
|
||||
});
|
||||
});
|
||||
|
||||
async function loadActivity(activityId) {
|
||||
try {
|
||||
const response = await fetch(`/api/activities/${activityId}`);
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to load activity details');
|
||||
}
|
||||
|
||||
const activity = await response.json();
|
||||
renderActivity(activity);
|
||||
} catch (error) {
|
||||
showError(`Error loading activity: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
function renderActivity(activity) {
|
||||
const detailsEl = document.getElementById('activity-details');
|
||||
detailsEl.innerHTML = `
|
||||
<div class="card">
|
||||
<div class="card-body">
|
||||
<h5 class="card-title">${activity.name}</h5>
|
||||
<p class="card-text">
|
||||
<strong>Date:</strong> ${formatDateTime(activity.start_time)}<br>
|
||||
<strong>Type:</strong> ${activity.activity_type}<br>
|
||||
<strong>Duration:</strong> ${formatDuration(activity.duration)}<br>
|
||||
<strong>Distance:</strong> ${formatDistance(activity.distance)}<br>
|
||||
<strong>Status:</strong>
|
||||
<span class="badge ${activity.reprocessed ? 'bg-success' : 'bg-secondary'}">
|
||||
${activity.reprocessed ? 'Processed' : 'Not Processed'}
|
||||
</span>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
|
||||
// Render metrics
|
||||
const metrics = [
|
||||
{ name: 'Max Heart Rate', value: activity.max_heart_rate, unit: 'bpm' },
|
||||
{ name: 'Avg Heart Rate', value: activity.avg_heart_rate, unit: 'bpm' },
|
||||
{ name: 'Avg Power', value: activity.avg_power, unit: 'W' },
|
||||
{ name: 'Calories', value: activity.calories, unit: 'kcal' },
|
||||
{ name: 'Gear Ratio', value: activity.gear_ratio, unit: '' },
|
||||
{ name: 'Gear Inches', value: activity.gear_inches, unit: '' }
|
||||
];
|
||||
|
||||
const tableBody = document.getElementById('metrics-table').querySelector('tbody');
|
||||
tableBody.innerHTML = '';
|
||||
|
||||
metrics.forEach(metric => {
|
||||
if (metric.value !== undefined) {
|
||||
const row = document.createElement('tr');
|
||||
row.innerHTML = `<td>${metric.name}</td><td>${metric.value} ${metric.unit}</td>`;
|
||||
tableBody.appendChild(row);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function reprocessActivity(activityId) {
|
||||
const btn = document.getElementById('reprocess-btn');
|
||||
const spinner = document.getElementById('spinner');
|
||||
const resultEl = document.getElementById('reprocess-result');
|
||||
|
||||
btn.disabled = true;
|
||||
spinner.classList.remove('d-none');
|
||||
resultEl.innerHTML = '';
|
||||
resultEl.classList.remove('alert-success', 'alert-danger');
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/activities/${activityId}/reprocess`, {
|
||||
method: 'POST'
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.text();
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
resultEl.innerHTML = `<div class="alert alert-success">Activity reprocessed successfully!</div>`;
|
||||
|
||||
// Reload activity data to show updated metrics
|
||||
await loadActivity(activityId);
|
||||
} catch (error) {
|
||||
console.error('Reprocess error:', error);
|
||||
resultEl.innerHTML = `<div class="alert alert-danger">${error.message || 'Reprocessing failed'}</div>`;
|
||||
} finally {
|
||||
spinner.classList.add('d-none');
|
||||
btn.disabled = false;
|
||||
}
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
19
examples/GarminSync/garminsync/web/templates/base.html
Normal file
19
examples/GarminSync/garminsync/web/templates/base.html
Normal file
@@ -0,0 +1,19 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>GarminSync</title>
|
||||
<link href="/static/style.css" rel="stylesheet">
|
||||
<link href="/static/components.css" rel="stylesheet">
|
||||
<link href="/static/responsive.css" rel="stylesheet">
|
||||
</head>
|
||||
<body>
|
||||
{% block content %}{% endblock %}
|
||||
|
||||
<script src="/static/navigation.js"></script>
|
||||
<script src="/static/utils.js"></script>
|
||||
|
||||
{% block page_scripts %}{% endblock %}
|
||||
</body>
|
||||
</html>
|
||||
151
examples/GarminSync/garminsync/web/templates/config.html
Normal file
151
examples/GarminSync/garminsync/web/templates/config.html
Normal file
@@ -0,0 +1,151 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block content %}
|
||||
<div class="container">
|
||||
<div class="navigation"></div>
|
||||
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h3>GarminSync Configuration</h3>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<div class="card mb-4">
|
||||
<div class="card-header">Daemon Settings</div>
|
||||
<div class="card-body">
|
||||
<form id="daemon-config-form">
|
||||
<div class="form-group">
|
||||
<label for="daemon-enabled">Enable Daemon</label>
|
||||
<input type="checkbox" id="daemon-enabled" {% if config.enabled %}checked{% endif %}>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="cron-schedule">Synchronization Schedule</label>
|
||||
<input type="text" class="form-control" id="cron-schedule"
|
||||
value="{{ config.schedule_cron }}"
|
||||
placeholder="0 */6 * * *"
|
||||
title="Cron expression (every 6 hours by default)">
|
||||
<small class="form-text text-muted">
|
||||
Cron format: minute hour day(month) month day(week)
|
||||
</small>
|
||||
</div>
|
||||
<button type="submit" class="btn btn-primary">Save Settings</button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card">
|
||||
<div class="card-header">Daemon Status</div>
|
||||
<div class="card-body">
|
||||
<div class="stat-item">
|
||||
<label>Current Status:</label>
|
||||
<span id="daemon-status-text">{{ config.status|capitalize }}</span>
|
||||
</div>
|
||||
<div class="stat-item">
|
||||
<label>Last Run:</label>
|
||||
<span id="daemon-last-run">{{ config.last_run or 'Never' }}</span>
|
||||
</div>
|
||||
<div class="stat-item">
|
||||
<label>Next Run:</label>
|
||||
<span id="daemon-next-run">{{ config.next_run or 'Not scheduled' }}</span>
|
||||
</div>
|
||||
|
||||
<div class="mt-3">
|
||||
<button id="start-daemon-btn" class="btn btn-success">
|
||||
Start Daemon
|
||||
</button>
|
||||
<button id="stop-daemon-btn" class="btn btn-danger">
|
||||
Stop Daemon
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
{% block page_scripts %}
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
// Form submission handler
|
||||
document.getElementById('daemon-config-form').addEventListener('submit', async function(e) {
|
||||
e.preventDefault();
|
||||
|
||||
const enabled = document.getElementById('daemon-enabled').checked;
|
||||
const cronSchedule = document.getElementById('cron-schedule').value;
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/schedule', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
enabled: enabled,
|
||||
cron_schedule: cronSchedule
|
||||
})
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
Utils.showSuccess('Configuration saved successfully');
|
||||
updateStatus();
|
||||
} else {
|
||||
const error = await response.json();
|
||||
Utils.showError(`Error: ${error.detail}`);
|
||||
}
|
||||
} catch (error) {
|
||||
Utils.showError('Failed to save configuration: ' + error.message);
|
||||
}
|
||||
});
|
||||
|
||||
// Daemon control buttons
|
||||
document.getElementById('start-daemon-btn').addEventListener('click', async function() {
|
||||
try {
|
||||
const response = await fetch('/api/daemon/start', { method: 'POST' });
|
||||
if (response.ok) {
|
||||
Utils.showSuccess('Daemon started successfully');
|
||||
updateStatus();
|
||||
} else {
|
||||
const error = await response.json();
|
||||
Utils.showError(`Error: ${error.detail}`);
|
||||
}
|
||||
} catch (error) {
|
||||
Utils.showError('Failed to start daemon: ' + error.message);
|
||||
}
|
||||
});
|
||||
|
||||
document.getElementById('stop-daemon-btn').addEventListener('click', async function() {
|
||||
try {
|
||||
const response = await fetch('/api/daemon/stop', { method: 'POST' });
|
||||
if (response.ok) {
|
||||
Utils.showSuccess('Daemon stopped successfully');
|
||||
updateStatus();
|
||||
} else {
|
||||
const error = await response.json();
|
||||
Utils.showError(`Error: ${error.detail}`);
|
||||
}
|
||||
} catch (error) {
|
||||
Utils.showError('Failed to stop daemon: ' + error.message);
|
||||
}
|
||||
});
|
||||
|
||||
// Initial status update
|
||||
updateStatus();
|
||||
|
||||
async function updateStatus() {
|
||||
try {
|
||||
const response = await fetch('/api/status');
|
||||
const data = await response.json();
|
||||
|
||||
// Update status display
|
||||
document.getElementById('daemon-status-text').textContent =
|
||||
data.daemon.running ? 'Running' : 'Stopped';
|
||||
document.getElementById('daemon-last-run').textContent =
|
||||
data.daemon.last_run || 'Never';
|
||||
document.getElementById('daemon-next-run').textContent =
|
||||
data.daemon.next_run || 'Not scheduled';
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to update status:', error);
|
||||
}
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
54
examples/GarminSync/garminsync/web/templates/dashboard.html
Normal file
54
examples/GarminSync/garminsync/web/templates/dashboard.html
Normal file
@@ -0,0 +1,54 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block content %}
|
||||
<div class="container">
|
||||
<div class="navigation"></div>
|
||||
|
||||
<div class="layout-grid">
|
||||
<!-- Left Sidebar -->
|
||||
<div class="sidebar">
|
||||
<div class="card sync-card">
|
||||
<button id="sync-now-btn" class="btn btn-primary btn-large">
|
||||
<i class="icon-sync"></i>
|
||||
Sync Now
|
||||
</button>
|
||||
<div class="sync-status" id="sync-status">
|
||||
Ready to sync
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card statistics-card">
|
||||
<h3>Statistics</h3>
|
||||
<div class="stat-item">
|
||||
<label>Total Activities:</label>
|
||||
<span id="total-activities">{{stats.total}}</span>
|
||||
</div>
|
||||
<div class="stat-item">
|
||||
<label>Downloaded:</label>
|
||||
<span id="downloaded-activities">{{stats.downloaded}}</span>
|
||||
</div>
|
||||
<div class="stat-item">
|
||||
<label>Missing:</label>
|
||||
<span id="missing-activities">{{stats.missing}}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Right Content Area -->
|
||||
<div class="main-content">
|
||||
<div class="card log-display">
|
||||
<div class="card-header">
|
||||
<h3>Log Data</h3>
|
||||
</div>
|
||||
<div class="log-content" id="log-content">
|
||||
<!-- Real-time log updates will appear here -->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
{% block page_scripts %}
|
||||
<script src="/static/home.js"></script>
|
||||
{% endblock %}
|
||||
79
examples/GarminSync/garminsync/web/templates/logs.html
Normal file
79
examples/GarminSync/garminsync/web/templates/logs.html
Normal file
@@ -0,0 +1,79 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block content %}
|
||||
<div class="container">
|
||||
<div class="navigation"></div>
|
||||
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h3>Sync Logs</h3>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<!-- Filters -->
|
||||
<div class="card mb-4">
|
||||
<div class="card-header">Filters</div>
|
||||
<div class="card-body">
|
||||
<div class="form-group">
|
||||
<label for="status-filter">Status</label>
|
||||
<select id="status-filter" class="form-control">
|
||||
<option value="">All Statuses</option>
|
||||
<option value="success">Success</option>
|
||||
<option value="error">Error</option>
|
||||
<option value="partial">Partial</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="operation-filter">Operation</label>
|
||||
<select id="operation-filter" class="form-control">
|
||||
<option value="">All Operations</option>
|
||||
<option value="sync">Sync</option>
|
||||
<option value="download">Download</option>
|
||||
<option value="daemon">Daemon</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="date-filter">Date</label>
|
||||
<input type="date" id="date-filter" class="form-control">
|
||||
</div>
|
||||
|
||||
<button class="btn btn-primary" onclick="applyFilters()">Apply Filters</button>
|
||||
<button class="btn btn-secondary" onclick="refreshLogs()">Refresh</button>
|
||||
<button class="btn btn-warning" onclick="clearLogs()">Clear Logs</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Logs Table -->
|
||||
<div class="table-container">
|
||||
<table class="activities-table" id="logs-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Timestamp</th>
|
||||
<th>Operation</th>
|
||||
<th>Status</th>
|
||||
<th>Message</th>
|
||||
<th>Activities Processed</th>
|
||||
<th>Activities Downloaded</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="logs-tbody">
|
||||
<!-- Populated by JavaScript -->
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!-- Pagination -->
|
||||
<div class="pagination-container">
|
||||
<div class="pagination" id="pagination">
|
||||
<!-- Populated by JavaScript -->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
{% block page_scripts %}
|
||||
<script src="/static/logs.js"></script>
|
||||
{% endblock %}
|
||||
134
examples/GarminSync/garminsync/web/test_ui.py
Normal file
134
examples/GarminSync/garminsync/web/test_ui.py
Normal file
@@ -0,0 +1,134 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Simple test script to verify the new UI is working correctly
|
||||
"""
|
||||
|
||||
import sys
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
|
||||
# Add the parent directory to the path to import garminsync modules
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
|
||||
|
||||
def test_ui_endpoints():
|
||||
"""Test that the new UI endpoints are working correctly"""
|
||||
base_url = "http://localhost:8000"
|
||||
|
||||
# Test endpoints to check
|
||||
endpoints = [
|
||||
"/",
|
||||
"/activities",
|
||||
"/config",
|
||||
"/logs",
|
||||
"/api/status",
|
||||
"/api/activities/stats",
|
||||
"/api/dashboard/stats",
|
||||
]
|
||||
|
||||
print("Testing UI endpoints...")
|
||||
|
||||
failed_endpoints = []
|
||||
|
||||
for endpoint in endpoints:
|
||||
try:
|
||||
url = base_url + endpoint
|
||||
print(f"Testing {url}...")
|
||||
|
||||
response = requests.get(url, timeout=10)
|
||||
|
||||
if response.status_code == 200:
|
||||
print(f" ✓ {endpoint} - OK")
|
||||
else:
|
||||
print(f" ✗ {endpoint} - Status code: {response.status_code}")
|
||||
failed_endpoints.append(endpoint)
|
||||
|
||||
except requests.exceptions.ConnectionError:
|
||||
print(f" ✗ {endpoint} - Connection error (server not running?)")
|
||||
failed_endpoints.append(endpoint)
|
||||
except requests.exceptions.Timeout:
|
||||
print(f" ✗ {endpoint} - Timeout")
|
||||
failed_endpoints.append(endpoint)
|
||||
except Exception as e:
|
||||
print(f" ✗ {endpoint} - Error: {e}")
|
||||
failed_endpoints.append(endpoint)
|
||||
|
||||
if failed_endpoints:
|
||||
print(f"\nFailed endpoints: {failed_endpoints}")
|
||||
return False
|
||||
else:
|
||||
print("\nAll endpoints are working correctly!")
|
||||
return True
|
||||
|
||||
|
||||
def test_api_endpoints():
|
||||
"""Test that the new API endpoints are working correctly"""
|
||||
base_url = "http://localhost:8000"
|
||||
|
||||
# Test API endpoints
|
||||
api_endpoints = [
|
||||
("/api/activities", "GET"),
|
||||
(
|
||||
"/api/activities/1",
|
||||
"GET",
|
||||
), # This might fail if activity doesn't exist, which is OK
|
||||
("/api/dashboard/stats", "GET"),
|
||||
]
|
||||
|
||||
print("\nTesting API endpoints...")
|
||||
|
||||
for endpoint, method in api_endpoints:
|
||||
try:
|
||||
url = base_url + endpoint
|
||||
print(f"Testing {method} {url}...")
|
||||
|
||||
if method == "GET":
|
||||
response = requests.get(url, timeout=10)
|
||||
else:
|
||||
response = requests.post(url, timeout=10)
|
||||
|
||||
# For activity details, 404 is acceptable if activity doesn't exist
|
||||
if endpoint == "/api/activities/1" and response.status_code == 404:
|
||||
print(f" ✓ {endpoint} - OK (404 expected if activity doesn't exist)")
|
||||
continue
|
||||
|
||||
if response.status_code == 200:
|
||||
print(f" ✓ {endpoint} - OK")
|
||||
# Try to parse JSON
|
||||
try:
|
||||
data = response.json()
|
||||
print(
|
||||
f" Response keys: {list(data.keys()) if isinstance(data, dict) else 'Not a dict'}"
|
||||
)
|
||||
except:
|
||||
print(" Response is not JSON")
|
||||
else:
|
||||
print(f" ✗ {endpoint} - Status code: {response.status_code}")
|
||||
|
||||
except requests.exceptions.ConnectionError:
|
||||
print(f" ✗ {endpoint} - Connection error (server not running?)")
|
||||
except requests.exceptions.Timeout:
|
||||
print(f" ✗ {endpoint} - Timeout")
|
||||
except Exception as e:
|
||||
print(f" ✗ {endpoint} - Error: {e}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("GarminSync UI Test Script")
|
||||
print("=" * 30)
|
||||
|
||||
# Test UI endpoints
|
||||
ui_success = test_ui_endpoints()
|
||||
|
||||
# Test API endpoints
|
||||
test_api_endpoints()
|
||||
|
||||
print("\n" + "=" * 30)
|
||||
if ui_success:
|
||||
print("UI tests completed successfully!")
|
||||
sys.exit(0)
|
||||
else:
|
||||
print("Some UI tests failed!")
|
||||
sys.exit(1)
|
||||
Reference in New Issue
Block a user