diff --git a/entrypoint.sh b/entrypoint.sh
index 336e491..a61423c 100644
--- a/entrypoint.sh
+++ b/entrypoint.sh
@@ -2,8 +2,8 @@
# Run database migrations
echo "Running database migrations..."
-export ALEMBIC_CONFIG=./migrations/alembic.ini
-export ALEMBIC_SCRIPT_LOCATION=./migrations/versions
+export ALEMBIC_CONFIG=${ALEMBIC_CONFIG:-./migrations/alembic.ini}
+export ALEMBIC_SCRIPT_LOCATION=${ALEMBIC_SCRIPT_LOCATION:-./migrations/versions}
alembic upgrade head
if [ $? -ne 0 ]; then
echo "Migration failed!" >&2
diff --git a/garminsync/__pycache__/__init__.cpython-310.pyc b/garminsync/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000..28741c6
Binary files /dev/null and b/garminsync/__pycache__/__init__.cpython-310.pyc differ
diff --git a/garminsync/__pycache__/cli.cpython-310.pyc b/garminsync/__pycache__/cli.cpython-310.pyc
new file mode 100644
index 0000000..37af08f
Binary files /dev/null and b/garminsync/__pycache__/cli.cpython-310.pyc differ
diff --git a/garminsync/__pycache__/config.cpython-310.pyc b/garminsync/__pycache__/config.cpython-310.pyc
new file mode 100644
index 0000000..146b6f3
Binary files /dev/null and b/garminsync/__pycache__/config.cpython-310.pyc differ
diff --git a/garminsync/__pycache__/daemon.cpython-310.pyc b/garminsync/__pycache__/daemon.cpython-310.pyc
new file mode 100644
index 0000000..5b145b9
Binary files /dev/null and b/garminsync/__pycache__/daemon.cpython-310.pyc differ
diff --git a/garminsync/__pycache__/database.cpython-310.pyc b/garminsync/__pycache__/database.cpython-310.pyc
new file mode 100644
index 0000000..683c691
Binary files /dev/null and b/garminsync/__pycache__/database.cpython-310.pyc differ
diff --git a/garminsync/__pycache__/garmin.cpython-310.pyc b/garminsync/__pycache__/garmin.cpython-310.pyc
new file mode 100644
index 0000000..d1c7df3
Binary files /dev/null and b/garminsync/__pycache__/garmin.cpython-310.pyc differ
diff --git a/garminsync/database.py b/garminsync/database.py
index 1b43faf..2479684 100644
--- a/garminsync/database.py
+++ b/garminsync/database.py
@@ -21,6 +21,7 @@ class Activity(Base):
duration = Column(Integer, nullable=True)
distance = Column(Float, nullable=True)
max_heart_rate = Column(Integer, nullable=True)
+ avg_heart_rate = Column(Integer, nullable=True)
avg_power = Column(Float, nullable=True)
calories = Column(Integer, nullable=True)
filename = Column(String, unique=True, nullable=True)
@@ -63,6 +64,7 @@ class Activity(Base):
"start_time": self.start_time,
"activity_type": self.activity_type,
"max_heart_rate": self.max_heart_rate,
+ "avg_heart_rate": self.avg_heart_rate,
"avg_power": self.avg_power,
"calories": self.calories,
}
@@ -141,6 +143,8 @@ def sync_database(garmin_client):
# Safely access dictionary keys
activity_id = activity.get("activityId")
start_time = activity.get("startTimeLocal")
+ avg_heart_rate = activity.get("averageHR", None)
+ calories = activity.get("calories", None)
if not activity_id or not start_time:
print(f"Missing required fields in activity: {activity}")
@@ -153,6 +157,8 @@ def sync_database(garmin_client):
new_activity = Activity(
activity_id=activity_id,
start_time=start_time,
+ avg_heart_rate=avg_heart_rate,
+ calories=calories,
downloaded=False,
created_at=datetime.now().isoformat(),
last_sync=datetime.now().isoformat(),
diff --git a/garminsync/web/routes.py b/garminsync/web/routes.py
index 0a3c236..24a378e 100644
--- a/garminsync/web/routes.py
+++ b/garminsync/web/routes.py
@@ -304,6 +304,7 @@ async def get_activities(
"duration": activity.duration,
"distance": activity.distance,
"max_heart_rate": activity.max_heart_rate,
+ "avg_heart_rate": activity.avg_heart_rate,
"avg_power": activity.avg_power,
"calories": activity.calories,
"filename": activity.filename,
diff --git a/garminsync/web/static/activities.js b/garminsync/web/static/activities.js
index dd378f9..6dc16eb 100644
--- a/garminsync/web/static/activities.js
+++ b/garminsync/web/static/activities.js
@@ -65,8 +65,10 @@ class ActivitiesPage {
${activity.activity_type || '-'} |
${Utils.formatDuration(activity.duration)} |
${Utils.formatDistance(activity.distance)} |
- ${activity.max_heart_rate || '-'} |
+ ${Utils.formatHeartRate(activity.max_heart_rate)} |
+ ${Utils.formatHeartRate(activity.avg_heart_rate)} |
${Utils.formatPower(activity.avg_power)} |
+ ${activity.calories ? activity.calories.toLocaleString() : '-'} |
`;
return row;
diff --git a/garminsync/web/static/utils.js b/garminsync/web/static/utils.js
index 1a1e74c..294918f 100644
--- a/garminsync/web/static/utils.js
+++ b/garminsync/web/static/utils.js
@@ -7,12 +7,13 @@ class Utils {
return new Date(dateStr).toLocaleDateString();
}
- // Format duration from seconds to HH:MM
+ // Format duration from seconds to HH:MM:SS
static formatDuration(seconds) {
if (!seconds) return '-';
const hours = Math.floor(seconds / 3600);
const minutes = Math.floor((seconds % 3600) / 60);
- return `${hours}:${minutes.toString().padStart(2, '0')}`;
+ const secondsLeft = seconds % 60;
+ return `${hours}:${minutes.toString().padStart(2, '0')}:${secondsLeft.toString().padStart(2, '0')}`;
}
// Format distance from meters to kilometers
@@ -26,6 +27,11 @@ class Utils {
return watts ? `${Math.round(watts)}W` : '-';
}
+ // Format heart rate (adds 'bpm')
+ static formatHeartRate(hr) {
+ return hr ? `${hr} bpm` : '-';
+ }
+
// Show error message
static showError(message) {
console.error(message);
diff --git a/garminsync/web/templates/activities.html b/garminsync/web/templates/activities.html
index 48d039f..2303f25 100644
--- a/garminsync/web/templates/activities.html
+++ b/garminsync/web/templates/activities.html
@@ -18,7 +18,9 @@
Duration |
Distance |
Max HR |
+ Avg HR |
Power |
+ Calories |
diff --git a/mandates.md b/mandates.md
new file mode 100644
index 0000000..6680ee0
--- /dev/null
+++ b/mandates.md
@@ -0,0 +1,9 @@
+
+- use the just_run_* tools via the MCP server
+- all installs should be done in the docker container.
+- NO installs on the host
+- database upgrades should be handled during container server start up
+- always rebuild the container before running tests
+- if you need clarification return to PLAN mode
+- force rereading of the mandates on each cycle
+
\ No newline at end of file
diff --git a/migrations/versions/20240822165438_add_hr_and_calories_columns.py b/migrations/versions/20240822165438_add_hr_and_calories_columns.py
new file mode 100644
index 0000000..d505977
--- /dev/null
+++ b/migrations/versions/20240822165438_add_hr_and_calories_columns.py
@@ -0,0 +1,23 @@
+"""Add avg_heart_rate and calories columns to activities table
+
+Revision ID: 20240822165438
+Revises: 20240821150000
+Create Date: 2024-08-22 16:54:38.123456
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+# revision identifiers, used by Alembic.
+revision = '20240822165438'
+down_revision = '20240821150000'
+branch_labels = None
+depends_on = None
+
+def upgrade():
+ op.add_column('activities', sa.Column('avg_heart_rate', sa.Integer(), nullable=True))
+ op.add_column('activities', sa.Column('calories', sa.Integer(), nullable=True))
+
+def downgrade():
+ op.drop_column('activities', 'avg_heart_rate')
+ op.drop_column('activities', 'calories')
diff --git a/migrations/versions/__pycache__/env.cpython-310.pyc b/migrations/versions/__pycache__/env.cpython-310.pyc
new file mode 100644
index 0000000..7d0ef02
Binary files /dev/null and b/migrations/versions/__pycache__/env.cpython-310.pyc differ
diff --git a/patches/garth_data_weight.py b/patches/garth_data_weight.py
new file mode 100644
index 0000000..d8d82d0
--- /dev/null
+++ b/patches/garth_data_weight.py
@@ -0,0 +1,80 @@
+from datetime import date, datetime, timedelta
+from itertools import chain
+
+from pydantic import Field, ValidationInfo, field_validator
+from pydantic.dataclasses import dataclass
+from typing_extensions import Self
+
+from .. import http
+from ..utils import (
+ camel_to_snake_dict,
+ format_end_date,
+ get_localized_datetime,
+)
+from ._base import MAX_WORKERS, Data
+
+
+@dataclass
+class WeightData(Data):
+ sample_pk: int
+ calendar_date: date
+ weight: int
+ source_type: str
+ weight_delta: float
+ datetime_utc: datetime = Field(..., alias="timestamp_gmt")
+ datetime_local: datetime = Field(..., alias="date")
+ bmi: float | None = None
+ body_fat: float | None = None
+ body_water: float | None = None
+ bone_mass: int | None = None
+ muscle_mass: int | None = None
+ physique_rating: float | None = None
+ visceral_fat: float | None = None
+ metabolic_age: int | None = None
+
+ @field_validator("datetime_local", mode="before")
+ @classmethod
+ def to_localized_datetime(cls, v: int, info: ValidationInfo) -> datetime:
+ return get_localized_datetime(info.data["datetime_utc"].timestamp() * 1000, v)
+
+ @classmethod
+ def get(
+ cls, day: date | str, *, client: http.Client | None = None
+ ) -> Self | None:
+ client = client or http.client
+ path = f"/weight-service/weight/dayview/{day}"
+ data = client.connectapi(path)
+ day_weight_list = data["dateWeightList"] if data else []
+
+ if not day_weight_list:
+ return None
+
+ # Get first (most recent) weight entry for the day
+ weight_data = camel_to_snake_dict(day_weight_list[0])
+ return cls(**weight_data)
+
+ @classmethod
+ def list(
+ cls,
+ end: date | str | None = None,
+ days: int = 1,
+ *,
+ client: http.Client | None = None,
+ max_workers: int = MAX_WORKERS,
+ ) -> list[Self]:
+ client = client or http.client
+ end = format_end_date(end)
+ start = end - timedelta(days=days - 1)
+
+ data = client.connectapi(
+ f"/weight-service/weight/range/{start}/{end}?includeAll=true"
+ )
+ weight_summaries = data["dailyWeightSummaries"] if data else []
+ weight_metrics = chain.from_iterable(
+ summary["allWeightMetrics"] for summary in weight_summaries
+ )
+ weight_data_list = (
+ cls(**camel_to_snake_dict(weight_data))
+ for weight_data in weight_metrics
+ )
+ return sorted(weight_data_list, key=lambda d: d.datetime_utc)
diff --git a/tests/__pycache__/test_sync.cpython-310-pytest-8.1.1.pyc b/tests/__pycache__/test_sync.cpython-310-pytest-8.1.1.pyc
new file mode 100644
index 0000000..bcebf20
Binary files /dev/null and b/tests/__pycache__/test_sync.cpython-310-pytest-8.1.1.pyc differ
diff --git a/tests/activity_table_validation.sh b/tests/activity_table_validation.sh
new file mode 100755
index 0000000..b81a2bd
--- /dev/null
+++ b/tests/activity_table_validation.sh
@@ -0,0 +1,114 @@
+#!/bin/bash
+
+# Activity Table Validation Script
+# This script tests the activity table implementation
+
+# Configuration
+API_URL="http://localhost:8888/api/api/activities" # Changed port to 8888 to match container
+TIMEOUT=10
+
+# Function to display test results
+display_result() {
+ local test_name=$1
+ local result=$2
+ local message=$3
+
+ if [ "$result" = "PASS" ]; then
+ echo "✅ $test_name: $message"
+ else
+ echo "❌ $test_name: $message"
+ fi
+}
+
+# Function to wait for API to be ready
+wait_for_api() {
+ echo "Waiting for API to start..."
+ attempts=0
+ max_attempts=60 # Increased timeout to 60 seconds
+
+ while true; do
+ # Check for startup messages
+ if curl -s -m 1 "http://localhost:8888" | grep -q "Uvicorn running on" || \
+ curl -s -m 1 "http://localhost:8888" | grep -q "Application startup complete" || \
+ curl -s -m 1 "http://localhost:8888" | grep -q "Server is ready"; then
+ echo "API started successfully"
+ break
+ fi
+
+ attempts=$((attempts+1))
+ if [ $attempts -ge $max_attempts ]; then
+ echo "API failed to start within $max_attempts seconds"
+ exit 1
+ fi
+
+ sleep 1
+ done
+}
+
+# Wait for API to be ready
+wait_for_api
+
+# Test 1: Basic API response
+echo "Running basic API response test..."
+response=$(curl -s -m $TIMEOUT "$API_URL" | jq '.')
+if [ $? -eq 0 ]; then
+ if [[ "$response" == *"activities"* ]] && [[ "$response" == *"total_pages"* ]] && [[ "$response" == *"status"* ]]; then
+ display_result "Basic API Response" PASS "API returns expected structure"
+ else
+ display_result "Basic API Response" FAIL "API response doesn't contain expected fields"
+ fi
+else
+ display_result "Basic API Response" FAIL "API request failed"
+fi
+
+# Test 2: Pagination test
+echo "Running pagination test..."
+page1=$(curl -s -m $TIMEOUT "$API_URL?page=1" | jq '.')
+page2=$(curl -s -m $TIMEOUT "$API_URL?page=2" | jq '.')
+
+if [ $? -eq 0 ]; then
+ page1_count=$(echo "$page1" | jq '.activities | length')
+ page2_count=$(echo "$page2" | jq '.activities | length')
+
+ if [ "$page1_count" -gt 0 ] && [ "$page2_count" -gt 0 ]; then
+ display_result "Pagination Test" PASS "Both pages contain activities"
+ else
+ display_result "Pagination Test" FAIL "One or more pages are empty"
+ fi
+else
+ display_result "Pagination Test" FAIL "API request failed"
+fi
+
+# Test 3: Data consistency test
+echo "Running data consistency test..."
+activity_id=$(echo "$page1" | jq -r '.activities[0].id')
+activity_name=$(echo "$page1" | jq -r '.activities[0].name')
+
+details_response=$(curl -s -m $TIMEOUT "$API_URL/$activity_id" | jq '.')
+if [ $? -eq 0 ]; then
+ details_id=$(echo "$details_response" | jq -r '.id')
+ details_name=$(echo "$details_response" | jq -r '.name')
+
+ if [ "$activity_id" = "$details_id" ] && [ "$activity_name" = "$details_name" ]; then
+ display_result "Data Consistency Test" PASS "Activity details match API response"
+ else
+ display_result "Data Consistency Test" FAIL "Activity details don't match API response"
+ fi
+else
+ display_result "Data Consistency Test" FAIL "API request failed"
+fi
+
+# Test 4: Error handling test
+echo "Running error handling test..."
+error_response=$(curl -s -m $TIMEOUT "$API_URL/999999999" | jq '.')
+if [ $? -eq 0 ]; then
+ if [[ "$error_response" == *"detail"* ]] && [[ "$error_response" == *"not found"* ]]; then
+ display_result "Error Handling Test" PASS "API returns expected error for non-existent activity"
+ else
+ display_result "Error Handling Test" FAIL "API doesn't return expected error for non-existent activity"
+ fi
+else
+ display_result "Error Handling Test" FAIL "API request failed"
+fi
+
+echo "All tests completed."
diff --git a/tests/test_sync.py b/tests/test_sync.py
new file mode 100644
index 0000000..2243039
--- /dev/null
+++ b/tests/test_sync.py
@@ -0,0 +1,102 @@
+import pytest
+import sys
+from unittest.mock import Mock, patch
+
+# Add the project root to the Python path
+sys.path.insert(0, '/app')
+
+from garminsync.database import sync_database
+from garminsync.garmin import GarminClient
+
+
+def test_sync_database_with_valid_activities():
+ """Test sync_database with valid API response"""
+ mock_client = Mock(spec=GarminClient)
+ mock_client.get_activities.return_value = [
+ {"activityId": 12345, "startTimeLocal": "2023-01-01T10:00:00"},
+ {"activityId": 67890, "startTimeLocal": "2023-01-02T11:00:00"}
+ ]
+
+ with patch('garminsync.database.get_session') as mock_session:
+ mock_session.return_value.query.return_value.filter_by.return_value.first.return_value = None
+
+ sync_database(mock_client)
+
+ # Verify get_activities was called
+ mock_client.get_activities.assert_called_once_with(0, 1000)
+
+ # Verify database operations
+ mock_session.return_value.add.assert_called()
+ mock_session.return_value.commit.assert_called()
+
+
+def test_sync_database_with_none_activities():
+ """Test sync_database with None response from API"""
+ mock_client = Mock(spec=GarminClient)
+ mock_client.get_activities.return_value = None
+
+ with patch('garminsync.database.get_session') as mock_session:
+ sync_database(mock_client)
+
+ # Verify get_activities was called
+ mock_client.get_activities.assert_called_once_with(0, 1000)
+
+ # Verify no database operations
+ mock_session.return_value.add.assert_not_called()
+ mock_session.return_value.commit.assert_not_called()
+
+
+def test_sync_database_with_missing_fields():
+ """Test sync_database with activities missing required fields"""
+ mock_client = Mock(spec=GarminClient)
+ mock_client.get_activities.return_value = [
+ {"activityId": 12345}, # Missing startTimeLocal
+ {"startTimeLocal": "2023-01-02T11:00:00"}, # Missing activityId
+ {"activityId": 67890, "startTimeLocal": "2023-01-03T12:00:00"} # Valid
+ ]
+
+ with patch('garminsync.database.get_session') as mock_session:
+ mock_session.return_value.query.return_value.filter_by.return_value.first.return_value = None
+
+ sync_database(mock_client)
+
+ # Verify only one activity was added (the valid one)
+ assert mock_session.return_value.add.call_count == 1
+ mock_session.return_value.commit.assert_called()
+
+
+def test_sync_database_with_existing_activities():
+ """Test sync_database doesn't duplicate existing activities"""
+ mock_client = Mock(spec=GarminClient)
+ mock_client.get_activities.return_value = [
+ {"activityId": 12345, "startTimeLocal": "2023-01-01T10:00:00"}
+ ]
+
+ with patch('garminsync.database.get_session') as mock_session:
+ # Mock existing activity
+ mock_session.return_value.query.return_value.filter_by.return_value.first.return_value = Mock()
+
+ sync_database(mock_client)
+
+ # Verify no new activities were added
+ mock_session.return_value.add.assert_not_called()
+ mock_session.return_value.commit.assert_called()
+
+
+def test_sync_database_with_invalid_activity_data():
+ """Test sync_database with invalid activity data types"""
+ mock_client = Mock(spec=GarminClient)
+ mock_client.get_activities.return_value = [
+ "invalid activity data", # Not a dict
+ None, # None value
+ {"activityId": 12345, "startTimeLocal": "2023-01-01T10:00:00"} # Valid
+ ]
+
+ with patch('garminsync.database.get_session') as mock_session:
+ mock_session.return_value.query.return_value.filter_by.return_value.first.return_value = None
+
+ sync_database(mock_client)
+
+ # Verify only one activity was added (the valid one)
+ assert mock_session.return_value.add.call_count == 1
+ mock_session.return_value.commit.assert_called()