checkpoint 2

This commit is contained in:
2025-08-22 20:29:04 -07:00
parent 6273138a65
commit 6c1fe70fa2
13 changed files with 678 additions and 122 deletions

View File

@@ -0,0 +1,130 @@
import os
import gzip
import fitdecode
import xml.etree.ElementTree as ET
from datetime import datetime
def detect_file_type(file_path):
"""Detect file format (FIT, XML, or unknown)"""
try:
with open(file_path, 'rb') as f:
header = f.read(128)
if b'<?xml' in header[:20]:
return 'xml'
if len(header) >= 8 and header[4:8] == b'.FIT':
return 'fit'
if (len(header) >= 8 and
(header[0:4] == b'.FIT' or
header[4:8] == b'FIT.' or
header[8:12] == b'.FIT')):
return 'fit'
return 'unknown'
except Exception as e:
return 'error'
def parse_xml_file(file_path):
"""Parse XML (TCX) file to extract activity metrics"""
try:
tree = ET.parse(file_path)
root = tree.getroot()
namespaces = {'ns': 'http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2'}
sport = root.find('.//ns:Activity', namespaces).get('Sport', 'other')
distance = root.find('.//ns:DistanceMeters', namespaces)
distance = float(distance.text) if distance is not None else None
duration = root.find('.//ns:TotalTimeSeconds', namespaces)
duration = float(duration.text) if duration is not None else None
calories = root.find('.//ns:Calories', namespaces)
calories = int(calories.text) if calories is not None else None
hr_values = []
for hr in root.findall('.//ns:HeartRateBpm/ns:Value', namespaces):
try:
hr_values.append(int(hr.text))
except:
continue
max_hr = max(hr_values) if hr_values else None
return {
"activityType": {"typeKey": sport},
"summaryDTO": {
"duration": duration,
"distance": distance,
"maxHR": max_hr,
"avgPower": None,
"calories": calories
}
}
except Exception:
return None
def parse_fit_file(file_path):
"""Parse FIT file to extract activity metrics"""
metrics = {}
try:
with open(file_path, 'rb') as f:
magic = f.read(2)
f.seek(0)
is_gzipped = magic == b'\x1f\x8b'
if is_gzipped:
with gzip.open(file_path, 'rb') as gz_file:
from io import BytesIO
with BytesIO(gz_file.read()) as fit_data:
fit = fitdecode.FitReader(fit_data)
for frame in fit:
if frame.frame_type == fitdecode.FrameType.DATA and frame.name == 'session':
metrics = {
"sport": frame.get_value("sport"),
"total_timer_time": frame.get_value("total_timer_time"),
"total_distance": frame.get_value("total_distance"),
"max_heart_rate": frame.get_value("max_heart_rate"),
"avg_power": frame.get_value("avg_power"),
"total_calories": frame.get_value("total_calories")
}
break
else:
with fitdecode.FitReader(file_path) as fit:
for frame in fit:
if frame.frame_type == fitdecode.FrameType.DATA and frame.name == 'session':
metrics = {
"sport": frame.get_value("sport"),
"total_timer_time": frame.get_value("total_timer_time"),
"total_distance": frame.get_value("total_distance"),
"max_heart_rate": frame.get_value("max_heart_rate"),
"avg_power": frame.get_value("avg_power"),
"total_calories": frame.get_value("total_calories")
}
break
return {
"activityType": {"typeKey": metrics.get("sport", "other")},
"summaryDTO": {
"duration": metrics.get("total_timer_time"),
"distance": metrics.get("total_distance"),
"maxHR": metrics.get("max_heart_rate"),
"avgPower": metrics.get("avg_power"),
"calories": metrics.get("total_calories")
}
}
except Exception:
return None
def get_activity_metrics(activity, client=None):
"""
Get activity metrics from local file or Garmin API
Returns parsed metrics or None
"""
metrics = None
if activity.filename and os.path.exists(activity.filename):
file_type = detect_file_type(activity.filename)
if file_type == 'fit':
metrics = parse_fit_file(activity.filename)
elif file_type == 'xml':
metrics = parse_xml_file(activity.filename)
if not metrics and client:
try:
metrics = client.get_activity_details(activity.activity_id)
except Exception:
pass
return metrics

View File

@@ -173,13 +173,20 @@ def daemon_mode(
bool, typer.Option("--status", help="Show daemon status")
] = False,
port: Annotated[int, typer.Option("--port", help="Web UI port")] = 8080,
run_migrations: Annotated[
bool,
typer.Option(
"--run-migrations/--skip-migrations",
help="Run database migrations on startup (default: run)"
)
] = True,
):
"""Daemon mode operations"""
from .daemon import GarminSyncDaemon
if start:
daemon = GarminSyncDaemon()
daemon.start(web_port=port)
daemon.start(web_port=port, run_migrations=run_migrations)
elif stop:
# Implementation for stopping daemon (PID file or signal)
typer.echo("Stopping daemon...")

View File

@@ -10,6 +10,7 @@ from apscheduler.triggers.cron import CronTrigger
from .database import Activity, DaemonConfig, SyncLog, get_session
from .garmin import GarminClient
from .utils import logger
from .activity_parser import get_activity_metrics
class GarminSyncDaemon:
@@ -18,8 +19,17 @@ class GarminSyncDaemon:
self.running = False
self.web_server = None
def start(self, web_port=8888):
"""Start daemon with scheduler and web UI"""
def start(self, web_port=8888, run_migrations=True):
"""Start daemon with scheduler and web UI
:param web_port: Port for the web UI
:param run_migrations: Whether to run database migrations on startup
"""
# Set migration flag for entrypoint
if run_migrations:
os.environ['RUN_MIGRATIONS'] = "1"
else:
os.environ['RUN_MIGRATIONS'] = "0"
try:
# Load configuration from database
config_data = self.load_config()
@@ -105,26 +115,38 @@ class GarminSyncDaemon:
for activity in missing_activities:
try:
# Use the correct method name
# Download FIT file
fit_data = client.download_activity_fit(activity.activity_id)
# Save the file
# Save to file
import os
from pathlib import Path
data_dir = Path(os.getenv("DATA_DIR", "data"))
data_dir.mkdir(parents=True, exist_ok=True)
timestamp = activity.start_time.replace(":", "-").replace(" ", "_")
filename = f"activity_{activity.activity_id}_{timestamp}.fit"
filepath = data_dir / filename
with open(filepath, "wb") as f:
f.write(fit_data)
# Update activity record
activity.filename = str(filepath)
activity.downloaded = True
activity.last_sync = datetime.now().isoformat()
# Get metrics immediately after download
metrics = get_activity_metrics(activity, client)
if metrics:
# Update metrics if available
activity.activity_type = metrics.get("activityType", {}).get("typeKey")
activity.duration = int(float(metrics.get("summaryDTO", {}).get("duration", 0)))
activity.distance = float(metrics.get("summaryDTO", {}).get("distance", 0))
activity.max_heart_rate = int(float(metrics.get("summaryDTO", {}).get("maxHR", 0)))
activity.avg_power = float(metrics.get("summaryDTO", {}).get("avgPower", 0))
activity.calories = int(float(metrics.get("summaryDTO", {}).get("calories", 0)))
session.commit()
downloaded_count += 1
session.commit()
@@ -135,7 +157,8 @@ class GarminSyncDaemon:
session.rollback()
self.log_operation(
"sync", "success", f"Downloaded {downloaded_count} new activities"
"sync", "success",
f"Downloaded {downloaded_count} new activities and updated metrics"
)
# Update last run time

View File

@@ -120,6 +120,8 @@ def get_session():
return Session()
from garminsync.activity_parser import get_activity_metrics
def sync_database(garmin_client):
"""Sync local database with Garmin Connect activities.
@@ -134,36 +136,70 @@ def sync_database(garmin_client):
print("No activities returned from Garmin API")
return
for activity in activities:
# Check if activity is a dictionary and has required fields
if not isinstance(activity, dict):
print(f"Invalid activity data: {activity}")
for activity_data in activities:
if not isinstance(activity_data, dict):
print(f"Invalid activity data: {activity_data}")
continue
# Safely access dictionary keys
activity_id = activity.get("activityId")
start_time = activity.get("startTimeLocal")
avg_heart_rate = activity.get("averageHR", None)
calories = activity.get("calories", None)
activity_id = activity_data.get("activityId")
start_time = activity_data.get("startTimeLocal")
if not activity_id or not start_time:
print(f"Missing required fields in activity: {activity}")
print(f"Missing required fields in activity: {activity_data}")
continue
existing = (
session.query(Activity).filter_by(activity_id=activity_id).first()
)
existing = session.query(Activity).filter_by(activity_id=activity_id).first()
# Create or update basic activity info
if not existing:
new_activity = Activity(
activity = Activity(
activity_id=activity_id,
start_time=start_time,
avg_heart_rate=avg_heart_rate,
calories=calories,
downloaded=False,
created_at=datetime.now().isoformat(),
last_sync=datetime.now().isoformat(),
)
session.add(new_activity)
session.add(activity)
session.flush() # Assign ID
else:
activity = existing
# Update metrics using shared parser
metrics = get_activity_metrics(activity, garmin_client)
if metrics:
activity.activity_type = metrics.get("activityType", {}).get("typeKey")
# Extract duration in seconds
duration = metrics.get("summaryDTO", {}).get("duration")
if duration is not None:
activity.duration = int(float(duration))
# Extract distance in meters
distance = metrics.get("summaryDTO", {}).get("distance")
if distance is not None:
activity.distance = float(distance)
# Extract heart rates
max_hr = metrics.get("summaryDTO", {}).get("maxHR")
if max_hr is not None:
activity.max_heart_rate = int(float(max_hr))
avg_hr = metrics.get("summaryDTO", {}).get("avgHR", None) or \
metrics.get("summaryDTO", {}).get("averageHR", None)
if avg_hr is not None:
activity.avg_heart_rate = int(float(avg_hr))
# Extract power and calories
avg_power = metrics.get("summaryDTO", {}).get("avgPower")
if avg_power is not None:
activity.avg_power = float(avg_power)
calories = metrics.get("summaryDTO", {}).get("calories")
if calories is not None:
activity.calories = int(float(calories))
# Update sync timestamp
activity.last_sync = datetime.now().isoformat()
session.commit()
except SQLAlchemyError as e:

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python3
"""
Migration script to populate new activity fields from Garmin API
Migration script to populate new activity fields from FIT files or Garmin API
"""
import os
@@ -16,11 +16,22 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from garminsync.database import Activity, get_session, init_db
from garminsync.garmin import GarminClient
from garminsync.activity_parser import get_activity_metrics
def add_columns_to_database():
"""Add new columns to the activities table if they don't exist"""
print("Adding new columns to database...")
# Add the parent directory to the path to import garminsync modules
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from garminsync.database import Activity, get_session, init_db
from garminsync.garmin import GarminClient
def add_columns_to_database():
"""Add new columns to the activities table if they don't exist"""
print("Adding new columns to database...", flush=True)
# Get database engine
db_path = os.path.join(os.getenv("DATA_DIR", "data"), "garmin.db")
@@ -49,7 +60,7 @@ def add_columns_to_database():
with engine.connect() as conn:
for column_name in new_columns:
if column_name not in existing_columns:
print(f"Adding column {column_name}...")
print(f"Adding column {column_name}...", flush=True)
if column_name in ["distance", "avg_power"]:
conn.execute(
text(
@@ -69,21 +80,22 @@ def add_columns_to_database():
)
)
conn.commit()
print(f"Column {column_name} added successfully")
print(f"Column {column_name} added successfully", flush=True)
else:
print(f"Column {column_name} already exists")
print(f"Column {column_name} already exists", flush=True)
print("Database schema updated successfully")
print("Database schema updated successfully", flush=True)
return True
except Exception as e:
print(f"Failed to update database schema: {e}")
print(f"Failed to update database schema: {e}", flush=True)
return False
def migrate_activities():
"""Migrate activities to populate new fields from Garmin API"""
print("Starting activity migration...")
"""Migrate activities to populate new fields from FIT files or Garmin API"""
print("Starting activity migration...", flush=True)
# First, add columns to database
if not add_columns_to_database():
@@ -92,9 +104,9 @@ def migrate_activities():
# Initialize Garmin client
try:
client = GarminClient()
print("Garmin client initialized successfully")
print("Garmin client initialized successfully", flush=True)
except Exception as e:
print(f"Failed to initialize Garmin client: {e}")
print(f"Failed to initialize Garmin client: {e}", flush=True)
# Continue with migration but without Garmin data
client = None
@@ -106,12 +118,12 @@ def migrate_activities():
activities = (
session.query(Activity).filter(Activity.activity_type.is_(None)).all()
)
print(f"Found {len(activities)} activities to migrate")
print(f"Found {len(activities)} activities to migrate", flush=True)
# If no activities found, try to get all activities (in case activity_type column was just added)
if len(activities) == 0:
activities = session.query(Activity).all()
print(f"Found {len(activities)} total activities")
print(f"Found {len(activities)} total activities", flush=True)
updated_count = 0
error_count = 0
@@ -119,13 +131,16 @@ def migrate_activities():
for i, activity in enumerate(activities):
try:
print(
f"Processing activity {i+1}/{len(activities)} (ID: {activity.activity_id})"
f"Processing activity {i+1}/{len(activities)} (ID: {activity.activity_id})",
flush=True
)
# Fetch detailed activity data from Garmin (if client is available)
activity_details = None
if client:
activity_details = client.get_activity_details(activity.activity_id)
# Use shared parser to get activity metrics
activity_details = get_activity_metrics(activity, client)
if activity_details is not None:
print(f" Successfully parsed metrics for activity {activity.activity_id}", flush=True)
else:
print(f" Could not retrieve metrics for activity {activity.activity_id}", flush=True)
# Update activity fields if we have details
if activity_details:
@@ -170,19 +185,19 @@ def migrate_activities():
# Print progress every 10 activities
if (i + 1) % 10 == 0:
print(f" Progress: {i+1}/{len(activities)} activities processed")
print(f" Progress: {i+1}/{len(activities)} activities processed", flush=True)
except Exception as e:
print(f" Error processing activity {activity.activity_id}: {e}")
print(f" Error processing activity {activity.activity_id}: {e}", flush=True)
session.rollback()
error_count += 1
continue
print(f"Migration completed. Updated: {updated_count}, Errors: {error_count}")
print(f"Migration completed. Updated: {updated_count}, Errors: {error_count}", flush=True)
return True # Allow partial success
except Exception as e:
print(f"Migration failed: {e}")
print(f"Migration failed: {e}", flush=True)
return False
finally:
session.close()