diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..add3e55 --- /dev/null +++ b/.gitignore @@ -0,0 +1,58 @@ +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# Virtual Environment +.venv/ +venv/ +ENV/ + +# Databases +*.db +*.sqlite3 + +# Logs +*.log +test_log_*.txt +coverage_report.txt +pytest_output.txt + +# Security/Secrets +*.env +.env.* +*.pem +*.key +id_rsa +secrets/ + +# IDEs +.vscode/ +.idea/ + +# Tooling +.agent/ +.kilocode/ +.qwen/ +.pytest_cache/ +.mypy_cache/ +.coverage +htmlcov/ diff --git a/FitnessSync/backend/.coverage b/FitnessSync/backend/.coverage deleted file mode 100644 index 68f392d..0000000 Binary files a/FitnessSync/backend/.coverage and /dev/null differ diff --git a/FitnessSync/backend/__pycache__/main.cpython-311.pyc b/FitnessSync/backend/__pycache__/main.cpython-311.pyc deleted file mode 100644 index 95f86ce..0000000 Binary files a/FitnessSync/backend/__pycache__/main.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/__pycache__/main.cpython-313.pyc b/FitnessSync/backend/__pycache__/main.cpython-313.pyc deleted file mode 100644 index 3f660c4..0000000 Binary files a/FitnessSync/backend/__pycache__/main.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/__pycache__/env.cpython-311.pyc b/FitnessSync/backend/alembic/__pycache__/env.cpython-311.pyc deleted file mode 100644 index e8eb25a..0000000 Binary files a/FitnessSync/backend/alembic/__pycache__/env.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/__pycache__/env.cpython-313.pyc b/FitnessSync/backend/alembic/__pycache__/env.cpython-313.pyc deleted file mode 100644 index 3db096a..0000000 Binary files a/FitnessSync/backend/alembic/__pycache__/env.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/09c17c0f0e9e_add_extended_fit_metrics.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/09c17c0f0e9e_add_extended_fit_metrics.cpython-311.pyc deleted file mode 100644 index 0b3a8a9..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/09c17c0f0e9e_add_extended_fit_metrics.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/0c82944705e9_add_is_estimated_power.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/0c82944705e9_add_is_estimated_power.cpython-311.pyc deleted file mode 100644 index 5d456db..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/0c82944705e9_add_is_estimated_power.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/1136125782ec_add_last_segment_scan_timestamp_v3.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/1136125782ec_add_last_segment_scan_timestamp_v3.cpython-311.pyc deleted file mode 100644 index 5b99a66..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/1136125782ec_add_last_segment_scan_timestamp_v3.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/1e157f880117_create_jobs_table.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/1e157f880117_create_jobs_table.cpython-311.pyc deleted file mode 100644 index e18edf2..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/1e157f880117_create_jobs_table.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/20ccc82af3f2_remove_btree_geometry_indexes.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/20ccc82af3f2_remove_btree_geometry_indexes.cpython-311.pyc deleted file mode 100644 index 2019efd..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/20ccc82af3f2_remove_btree_geometry_indexes.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/24df1381ac00_initial_migration.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/24df1381ac00_initial_migration.cpython-311.pyc deleted file mode 100644 index 3874800..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/24df1381ac00_initial_migration.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/299d39b0f13d_add_mfa_state_to_api_tokens.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/299d39b0f13d_add_mfa_state_to_api_tokens.cpython-311.pyc deleted file mode 100644 index 2c965b5..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/299d39b0f13d_add_mfa_state_to_api_tokens.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/52a16d820129_add_streams_json.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/52a16d820129_add_streams_json.cpython-311.pyc deleted file mode 100644 index ca36434..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/52a16d820129_add_streams_json.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/62a16d820130_add_activity_streams_and_postgis.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/62a16d820130_add_activity_streams_and_postgis.cpython-311.pyc deleted file mode 100644 index 1535a7d..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/62a16d820130_add_activity_streams_and_postgis.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/73e349ef1d88_add_bike_setup_to_activity.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/73e349ef1d88_add_bike_setup_to_activity.cpython-311.pyc deleted file mode 100644 index d53f262..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/73e349ef1d88_add_bike_setup_to_activity.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/792840bbb2e0_allow_null_tokens_and_expiry_during_mfa.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/792840bbb2e0_allow_null_tokens_and_expiry_during_mfa.cpython-311.pyc deleted file mode 100644 index 7219c96..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/792840bbb2e0_allow_null_tokens_and_expiry_during_mfa.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/85c60ed462bf_add_state_tables.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/85c60ed462bf_add_state_tables.cpython-311.pyc deleted file mode 100644 index 52e954d..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/85c60ed462bf_add_state_tables.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/87cc6ed8df63_add_last_segment_scan_timestamp_v2.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/87cc6ed8df63_add_last_segment_scan_timestamp_v2.cpython-311.pyc deleted file mode 100644 index 54af18e..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/87cc6ed8df63_add_last_segment_scan_timestamp_v2.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/8c5791dd193e_add_missing_activity_columns.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/8c5791dd193e_add_missing_activity_columns.cpython-311.pyc deleted file mode 100644 index 41e919c..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/8c5791dd193e_add_missing_activity_columns.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/8cc7963c8db0_add_last_segment_scan_timestamp.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/8cc7963c8db0_add_last_segment_scan_timestamp.cpython-311.pyc deleted file mode 100644 index 6167835..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/8cc7963c8db0_add_last_segment_scan_timestamp.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/95af0e911216_add_bike_setups_table.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/95af0e911216_add_bike_setups_table.cpython-311.pyc deleted file mode 100644 index 5468518..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/95af0e911216_add_bike_setups_table.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/a9c00e495f5e_add_segments_tables.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/a9c00e495f5e_add_segments_tables.cpython-311.pyc deleted file mode 100644 index 96dbfb7..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/a9c00e495f5e_add_segments_tables.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/b43006af329e_add_avg_temperature_to_activity.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/b43006af329e_add_avg_temperature_to_activity.cpython-311.pyc deleted file mode 100644 index 6db4564..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/b43006af329e_add_avg_temperature_to_activity.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/b5a6d7ef97a5_add_fitbit_redirect_uri.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/b5a6d7ef97a5_add_fitbit_redirect_uri.cpython-311.pyc deleted file mode 100644 index 37b91e1..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/b5a6d7ef97a5_add_fitbit_redirect_uri.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/bd21a0528865_expand_activity_schema_metrics.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/bd21a0528865_expand_activity_schema_metrics.cpython-311.pyc deleted file mode 100644 index 60aa816..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/bd21a0528865_expand_activity_schema_metrics.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/cc3b223773cb_add_max_power_to_segmenteffort.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/cc3b223773cb_add_max_power_to_segmenteffort.cpython-311.pyc deleted file mode 100644 index e0748a2..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/cc3b223773cb_add_max_power_to_segmenteffort.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/ce0f0282a142_add_mfa_session_fields_to_api_tokens.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/ce0f0282a142_add_mfa_session_fields_to_api_tokens.cpython-311.pyc deleted file mode 100644 index 7562a62..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/ce0f0282a142_add_mfa_session_fields_to_api_tokens.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/dbb13b0ba015_add_last_segment_scan_timestamp.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/dbb13b0ba015_add_last_segment_scan_timestamp.cpython-311.pyc deleted file mode 100644 index 01a5a33..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/dbb13b0ba015_add_last_segment_scan_timestamp.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/alembic/versions/__pycache__/e9b8841a1234_add_segment_effort_metrics.cpython-311.pyc b/FitnessSync/backend/alembic/versions/__pycache__/e9b8841a1234_add_segment_effort_metrics.cpython-311.pyc deleted file mode 100644 index 7bc0cd5..0000000 Binary files a/FitnessSync/backend/alembic/versions/__pycache__/e9b8841a1234_add_segment_effort_metrics.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/main.py b/FitnessSync/backend/main.py index 2312e19..f7c05e9 100644 --- a/FitnessSync/backend/main.py +++ b/FitnessSync/backend/main.py @@ -21,29 +21,33 @@ async def lifespan(app: FastAPI): if database_url and not os.getenv("TESTING"): alembic_cfg.set_main_option("sqlalchemy.url", database_url) try: - command.upgrade(alembic_cfg, "head") - logger.info("Database migrations checked/applied.") + # command.upgrade(alembic_cfg, "head") + logger.info("Database migrations skipped (manual override).") except Exception as e: logger.error(f"Error running database migrations: {e}") else: logger.warning("DATABASE_URL not set, skipping migrations.") # Start Scheduler - try: - from src.services.scheduler import scheduler - scheduler.start() - logger.info("Scheduler started.") - except Exception as e: - logger.error(f"Failed to start scheduler: {e}") + if not os.getenv("TESTING"): + try: + from src.services.scheduler import scheduler + scheduler.start() + logger.info("Scheduler started.") + except Exception as e: + logger.error(f"Failed to start scheduler: {e}") + else: + logger.info("TESTING mode detected: Scheduler disabled.") yield logger.info("--- Application Shutting Down ---") - try: - from src.services.scheduler import scheduler - scheduler.stop() - except: - pass + if not os.getenv("TESTING"): + try: + from src.services.scheduler import scheduler + scheduler.stop() + except: + pass app = FastAPI(lifespan=lifespan) diff --git a/FitnessSync/backend/requirements.txt b/FitnessSync/backend/requirements.txt index fa119d8..9fac272 100644 --- a/FitnessSync/backend/requirements.txt +++ b/FitnessSync/backend/requirements.txt @@ -3,12 +3,12 @@ uvicorn[standard]==0.24.0 garminconnect==0.2.30 garth==0.5.17 fitbit==0.3.1 -sqlalchemy==2.0.23 -asyncpg==0.29.0 -psycopg2-binary==2.9.9 +sqlalchemy>=2.0.30 +asyncpg>=0.29.0 +psycopg2-binary jinja2==3.1.2 python-dotenv==1.0.0 -pydantic==2.1.1 +pydantic>=2.4.0 requests==2.31.0 httpx==0.25.2 aiofiles==23.2.1 @@ -17,3 +17,4 @@ pytest-asyncio==0.21.1 alembic==1.13.1 fitdecode>=0.10.0 geoalchemy2>=0.14.0 +testcontainers[postgres]>=3.7.1 diff --git a/FitnessSync/backend/scripts/repair_geodata.py b/FitnessSync/backend/scripts/repair_geodata.py new file mode 100644 index 0000000..a85d8c5 --- /dev/null +++ b/FitnessSync/backend/scripts/repair_geodata.py @@ -0,0 +1,96 @@ +import os +import json +import logging +from sqlalchemy import create_engine, text +from sqlalchemy.orm import sessionmaker + +# Configure logging +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') +logger = logging.getLogger(__name__) + +# Constants +DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://postgres:postgres@db:5432/fitbit_garmin_sync") +# If running outside docker, use localhost:5433 +if "db:5432" in DATABASE_URL and os.system("ping -c 1 db > /dev/null 2>&1") != 0: + DATABASE_URL = DATABASE_URL.replace("db:5432", "localhost:5433") + +def repair_geodata(): + engine = create_engine(DATABASE_URL) + Session = sessionmaker(bind=engine) + session = Session() + + try: + # 1. Repair Segments (Populate NULL geom) + logger.info("Starting segment geom repair...") + result = session.execute(text("SELECT id, name, points FROM segments WHERE geom IS NULL")) + segments_to_fix = result.fetchall() + + logger.info(f"Found {len(segments_to_fix)} segments with NULL geom.") + for seg_id, name, points_json in segments_to_fix: + try: + points = json.loads(points_json) if isinstance(points_json, str) else points_json + if not points: + logger.warning(f"Segment {seg_id} ({name}) has no points. Skipping.") + continue + + wkt_coords = [f"{p[0]} {p[1]}" for p in points if len(p) >= 2] + if not wkt_coords: + logger.warning(f"Segment {seg_id} ({name}) has invalid points. Skipping.") + continue + + geom_wkt = f"SRID=4326;LINESTRING({', '.join(wkt_coords)})" + session.execute( + text("UPDATE segments SET geom = ST_GeomFromText(:wkt, 4326) WHERE id = :id"), + {"wkt": geom_wkt, "id": seg_id} + ) + logger.info(f"Fixed Segment {seg_id}: {name}") + except Exception as e: + logger.error(f"Error fixing Segment {seg_id}: {e}") + + # 2. Repair Activities (Populate NULL start_lat/lng from streams) + logger.info("\nStarting activity coordinate repair...") + # Join with activity_streams to find valid coordinates + result = session.execute(text(""" + SELECT a.id, a.activity_name, s.latitude, s.longitude + FROM activities a + JOIN activity_streams s ON a.id = s.activity_id + WHERE (a.start_lat IS NULL OR a.start_lng IS NULL) + """)) + activities_to_fix = result.fetchall() + + logger.info(f"Found {len(activities_to_fix)} activities without coordinates but with streams.") + + fixed_count = 0 + for act_id, name, lats, lons in activities_to_fix: + try: + # Find first non-null coord + start_lat = None + start_lng = None + for lat, lon in zip(lats or [], lons or []): + if lat is not None and lon is not None: + start_lat = lat + start_lng = lon + break + + if start_lat is not None: + session.execute( + text("UPDATE activities SET start_lat = :lat, start_lng = :lng WHERE id = :id"), + {"lat": start_lat, "lng": start_lng, "id": act_id} + ) + fixed_count += 1 + except Exception as e: + logger.error(f"Error fixing Activity {act_id}: {e}") + + logger.info(f"Successfully fixed {fixed_count} activities.") + + session.commit() + logger.info("\nGeodata repair complete.") + + except Exception as e: + session.rollback() + logger.error(f"Critical error during repair: {e}") + finally: + session.close() + +if __name__ == "__main__": + repair_geodata() diff --git a/FitnessSync/backend/src/__pycache__/__init__.cpython-311.pyc b/FitnessSync/backend/src/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index dff409b..0000000 Binary files a/FitnessSync/backend/src/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/__pycache__/__init__.cpython-313.pyc b/FitnessSync/backend/src/__pycache__/__init__.cpython-313.pyc deleted file mode 100644 index 151587e..0000000 Binary files a/FitnessSync/backend/src/__pycache__/__init__.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/__init__.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index 7593985..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/__init__.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/__init__.cpython-313.pyc deleted file mode 100644 index 1ee0b4f..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/__init__.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/activities.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/activities.cpython-311.pyc deleted file mode 100644 index 4f41bca..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/activities.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/activities.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/activities.cpython-313.pyc deleted file mode 100644 index 6b2ae3f..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/activities.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/analysis.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/analysis.cpython-311.pyc deleted file mode 100644 index 4af2e1d..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/analysis.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/analysis.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/analysis.cpython-313.pyc deleted file mode 100644 index c8836a0..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/analysis.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/auth.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/auth.cpython-311.pyc deleted file mode 100644 index 26a7645..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/auth.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/auth.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/auth.cpython-313.pyc deleted file mode 100644 index 8e56a00..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/auth.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/bike_setups.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/bike_setups.cpython-311.pyc deleted file mode 100644 index 4aa8edf..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/bike_setups.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/bike_setups.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/bike_setups.cpython-313.pyc deleted file mode 100644 index 8b78587..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/bike_setups.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/config_routes.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/config_routes.cpython-311.pyc deleted file mode 100644 index 142f054..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/config_routes.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/config_routes.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/config_routes.cpython-313.pyc deleted file mode 100644 index 94763d0..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/config_routes.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/discovery.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/discovery.cpython-311.pyc deleted file mode 100644 index acdcb6a..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/discovery.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/discovery.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/discovery.cpython-313.pyc deleted file mode 100644 index 0a8ee2b..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/discovery.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/logs.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/logs.cpython-311.pyc deleted file mode 100644 index f982313..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/logs.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/logs.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/logs.cpython-313.pyc deleted file mode 100644 index c7f53a6..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/logs.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/metrics.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/metrics.cpython-311.pyc deleted file mode 100644 index 9cee51b..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/metrics.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/metrics.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/metrics.cpython-313.pyc deleted file mode 100644 index 4806595..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/metrics.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/scheduling.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/scheduling.cpython-311.pyc deleted file mode 100644 index 036b8d4..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/scheduling.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/scheduling.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/scheduling.cpython-313.pyc deleted file mode 100644 index 1af445e..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/scheduling.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/segments.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/segments.cpython-311.pyc deleted file mode 100644 index e6a7cd6..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/segments.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/segments.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/segments.cpython-313.pyc deleted file mode 100644 index 32cdd79..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/segments.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/setup.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/setup.cpython-311.pyc deleted file mode 100644 index 39e393e..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/setup.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/setup.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/setup.cpython-313.pyc deleted file mode 100644 index d65e91b..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/setup.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/status.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/status.cpython-311.pyc deleted file mode 100644 index dfc59b9..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/status.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/status.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/status.cpython-313.pyc deleted file mode 100644 index 5a1e3d6..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/status.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/sync.cpython-311.pyc b/FitnessSync/backend/src/api/__pycache__/sync.cpython-311.pyc deleted file mode 100644 index 220b6cf..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/sync.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/__pycache__/sync.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/sync.cpython-313.pyc deleted file mode 100644 index 7874dde..0000000 Binary files a/FitnessSync/backend/src/api/__pycache__/sync.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/api/activities.py b/FitnessSync/backend/src/api/activities.py index 691ff52..5bd8246 100644 --- a/FitnessSync/backend/src/api/activities.py +++ b/FitnessSync/backend/src/api/activities.py @@ -1,30 +1,26 @@ + from fastapi import APIRouter, Query, Response, HTTPException, Depends, BackgroundTasks from pydantic import BaseModel from typing import List, Optional, Dict, Any -from sqlalchemy import func -from ..models.activity import Activity import logging from ..services.postgresql_manager import PostgreSQLManager from sqlalchemy.orm import Session from ..utils.config import config +from .status import get_db # New Sync Imports from ..services.job_manager import job_manager -from ..models.activity_state import GarminActivityState -from datetime import datetime -from ..services.parsers import extract_points_from_file -import fitdecode +from ..services.activity_query import ActivityQueryService +from ..services.activity_file import ActivityFileService +from ..services.parsers import extract_summary +from ..models.activity_state import GarminActivityState +from ..models.activity import Activity router = APIRouter() logger = logging.getLogger(__name__) -def get_db(): - db_manager = PostgreSQLManager(config.DATABASE_URL) - with db_manager.get_db_session() as session: - yield session - class BikeSetupInfo(BaseModel): id: int frame: str @@ -85,39 +81,17 @@ async def list_activities( Return metadata for all scanned activities, indicating download status. """ try: - logger.info(f"Listing activities with limit={limit}, offset={offset}") - - # Query GarminActivityState (all known activities) - # Left join with Activity to get file status - - results = ( - db.query(GarminActivityState, Activity) - .outerjoin(Activity, GarminActivityState.garmin_activity_id == Activity.garmin_activity_id) - .order_by(GarminActivityState.start_time.desc()) - .offset(offset) - .limit(limit) - .all() - ) + results = ActivityQueryService.list_activities(db, limit, offset) activity_responses = [] for state, activity in results: - # Determine logic - # If activity exists in 'Activity' table, use its details? - # Or prefer GarminActivityState metadata? - # State metadata is from scan (Garth). Activity is from file parse (db import). - # Usually Activity data is richer IF downloaded. - is_downloaded = ( activity is not None and activity.download_status == 'downloaded' and activity.file_content is not None ) - download_status = 'downloaded' if is_downloaded else 'pending' - # Or use state.sync_status? state.sync_status is 'new', 'synced'. - # 'synced' usually means downloaded. - - # Construct response + activity_responses.append( ActivityResponse( id=activity.id if activity else None, @@ -125,7 +99,7 @@ async def list_activities( activity_name=state.activity_name, activity_type=state.activity_type, start_time=state.start_time.isoformat() if state.start_time else None, - duration=activity.duration if activity else None, # Duration might only be in file parse? Or scan could get it? Scan currently doesn't fetch duration. + duration=activity.duration if activity else None, file_type=activity.file_type if activity else None, download_status=download_status, downloaded_at=activity.downloaded_at.isoformat() if (activity and activity.downloaded_at) else None, @@ -142,8 +116,6 @@ async def list_activities( is_estimated_power=activity.is_estimated_power if activity else False ) ) - - logger.info(f"Returning {len(activity_responses)} activities") return activity_responses except Exception as e: logger.error(f"Error in list_activities: {str(e)}") @@ -166,73 +138,11 @@ async def query_activities( Allow advanced filtering of activities. """ try: - logger.info(f"Querying activities - type: {activity_type}, start: {start_date}, end: {end_date}, status: {download_status}") + activities = ActivityQueryService.query_activities( + db, activity_type, start_date, end_date, download_status, + bike_setup_id, has_power, has_hr, has_cadence, is_estimated_power + ) - # Start building the query - query = db.query(Activity) - - # Apply filters based on parameters - if activity_type: - if activity_type == 'cycling': - # Match outdoor cycling types - # Using OR filtering for various sub-types - from sqlalchemy import or_ - query = query.filter(or_( - Activity.activity_type == 'cycling', - Activity.activity_type == 'road_biking', - Activity.activity_type == 'mountain_biking', - Activity.activity_type == 'gravel_cycling', - Activity.activity_type == 'cyclocross', - Activity.activity_type == 'track_cycling', - Activity.activity_type == 'commuting' - )) - else: - query = query.filter(Activity.activity_type == activity_type) - - if start_date: - from datetime import datetime - start_dt = datetime.fromisoformat(start_date) - query = query.filter(Activity.start_time >= start_dt) - - if end_date: - from datetime import datetime - end_dt = datetime.fromisoformat(end_date) - query = query.filter(Activity.start_time <= end_dt) - - if download_status: - query = query.filter(Activity.download_status == download_status) - - if bike_setup_id: - query = query.filter(Activity.bike_setup_id == bike_setup_id) - - if has_power is not None: - if has_power: - query = query.filter(Activity.avg_power != None) - else: - query = query.filter(Activity.avg_power == None) - - if has_hr is not None: - if has_hr: - query = query.filter(Activity.avg_hr != None) - else: - query = query.filter(Activity.avg_hr == None) - - if has_cadence is not None: - if has_cadence: - query = query.filter(Activity.avg_cadence != None) - else: - query = query.filter(Activity.avg_cadence == None) - - if is_estimated_power is not None: - if is_estimated_power: - query = query.filter(Activity.is_estimated_power == True) - else: - query = query.filter(Activity.is_estimated_power == False) - - # Execute the query - activities = query.all() - - # Convert SQLAlchemy objects to Pydantic models activity_responses = [] for activity in activities: activity_responses.append( @@ -259,8 +169,6 @@ async def query_activities( is_estimated_power=activity.is_estimated_power ) ) - - logger.info(f"Returning {len(activity_responses)} filtered activities") return activity_responses except Exception as e: logger.error(f"Error in query_activities: {str(e)}") @@ -269,52 +177,20 @@ async def query_activities( @router.get("/activities/download/{activity_id}") async def download_activity(activity_id: str, db: Session = Depends(get_db)): """ - Serve the stored activity file from the database. + Download the original activity file (FIT/TCX/GPX). """ try: - logger.info(f"Downloading activity with ID: {activity_id}") - - # Find the activity in the database - activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first() - - if not activity: - raise HTTPException(status_code=404, detail=f"Activity with ID {activity_id} not found") - - if not activity.file_content: - raise HTTPException(status_code=404, detail=f"No file content available for activity {activity_id}") - - if activity.download_status != 'downloaded': - raise HTTPException(status_code=400, detail=f"File for activity {activity_id} is not ready for download (status: {activity.download_status})") - - # Determine the appropriate content type based on the file type - content_type_map = { - 'tcx': 'application/vnd.garmin.tcx+xml', - 'gpx': 'application/gpx+xml', - 'fit': 'application/octet-stream' # FIT files are binary - } - - content_type = content_type_map.get(activity.file_type, 'application/octet-stream') - filename = f"activity_{activity_id}.{activity.file_type}" - - logger.info(f"Returning file for activity {activity_id} with content type {content_type}") - return Response( - content=activity.file_content, - media_type=content_type, - headers={ - "Content-Disposition": f"attachment; filename={filename}", - "Content-Length": str(len(activity.file_content)) - } - ) - except HTTPException: - # Re-raise HTTP exceptions as-is - raise + return ActivityFileService.get_file_response(db, activity_id) + except HTTPException as e: + raise e except Exception as e: - raise HTTPException(status_code=500, detail=f"Error downloading activity: {str(e)}") + logger.error(f"Error downloading activity: {e}") + raise HTTPException(status_code=500, detail=str(e)) @router.get("/activities/{activity_id}/details", response_model=ActivityDetailResponse) async def get_activity_details(activity_id: str, db: Session = Depends(get_db)): """ - Get full details for a specific activity. + Get full details for a specific activity, merging DB metadata with file summary stats if needed. """ try: activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first() @@ -326,9 +202,9 @@ async def get_activity_details(activity_id: str, db: Session = Depends(get_db)): if activity.file_content and (activity.distance is None or activity.elevation_gain is None or activity.avg_hr is None): try: if activity.file_type == 'fit': - overrides = _extract_summary_from_fit(activity.file_content) - elif activity.file_type == 'tcx': - # overrides = _extract_summary_from_tcx(activity.file_content) # Optional TODO + overrides = extract_summary(activity.file_content, 'fit') + elif activity.file_type == 'tcx': + # overrides = _extract_summary_from_tcx(activity.file_content) # Optional TODO pass except Exception as e: logger.warning(f"Failed to extract summary from file: {e}") @@ -360,14 +236,14 @@ async def get_activity_details(activity_id: str, db: Session = Depends(get_db)): elevation_loss=val('elevation_loss', 'total_descent'), avg_cadence=val('avg_cadence', 'avg_cadence'), max_cadence=val('max_cadence', 'max_cadence'), - steps=activity.steps, # No session step count usually + steps=activity.steps, aerobic_te=val('aerobic_te', 'total_training_effect'), anaerobic_te=val('anaerobic_te', 'total_anaerobic_training_effect'), avg_power=val('avg_power', 'avg_power'), max_power=val('max_power', 'max_power'), norm_power=val('norm_power', 'normalized_power'), tss=val('tss', 'training_stress_score'), - vo2_max=activity.vo2_max, # Usually not in simple session msg directly but maybe + vo2_max=activity.vo2_max, avg_respiration_rate=val('avg_respiration_rate', 'avg_respiration_rate'), max_respiration_rate=val('max_respiration_rate', 'max_respiration_rate'), is_estimated_power=activity.is_estimated_power or False, @@ -432,10 +308,7 @@ async def redownload_activity_endpoint(activity_id: str, db: Session = Depends(g raise HTTPException(status_code=401, detail="Garmin not authenticated or tokens invalid. Please go to Setup.") garmin_client = GarminClient() - # Double check connection? if not garmin_client.check_connection(): - # Try refreshing? For now just fail if token load wasn't enough - # But usually token load is enough. pass sync_app = SyncApp(db, garmin_client) @@ -447,8 +320,7 @@ async def redownload_activity_endpoint(activity_id: str, db: Session = Depends(g try: from ..services.bike_matching import process_activity_matching - # Fetch fresh activity object using new session logic or flush/commit handled by sync_app - # Just query by garmin_id + # Fetch fresh activity object using new session logic act_obj = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first() if act_obj: process_activity_matching(db, act_obj.id) @@ -485,8 +357,11 @@ async def update_activity_bike(activity_id: str, update: BikeMatchUpdate, db: Se raise HTTPException(status_code=404, detail="Activity not found") # Verify bike setup exists if provided + from ..services.activity_file import ActivityFileService + from ..models.activity import Activity + from ..models.stream import ActivityStream + from ..models.bike_setup import BikeSetup if update.bike_setup_id: - from ..models.bike_setup import BikeSetup setup = db.query(BikeSetup).filter(BikeSetup.id == update.bike_setup_id).first() if not setup: raise HTTPException(status_code=404, detail="Bike Setup not found") @@ -581,7 +456,9 @@ async def sync_pending_trigger( job_id = job_manager.create_job("sync_pending_activities") db_manager = PostgreSQLManager(config.DATABASE_URL) - background_tasks.add_task(run_sync_job, job_id, limit, db_manager.get_db_session) + # Default limit to 20 if not provided + actual_limit = limit if limit is not None else 20 + background_tasks.add_task(run_sync_job, job_id, actual_limit, db_manager.get_db_session) return {"job_id": job_id, "status": "started"} @router.get("/activities/sync/status") @@ -613,6 +490,9 @@ async def get_activity_geojson(activity_id: str, db: Session = Depends(get_db)): raise HTTPException(status_code=404, detail="Activity or file content not found") points = [] + # Use cleaner import if available, otherwise strict usage of extract_points_from_file + from ..services.parsers import extract_points_from_file + if activity.file_type in ['fit', 'tcx']: points = extract_points_from_file(activity.file_content, activity.file_type) else: @@ -639,294 +519,25 @@ async def get_activity_geojson(activity_id: str, db: Session = Depends(get_db)): logger.error(f"Error generating GeoJSON: {e}") raise HTTPException(status_code=500, detail=str(e)) -def _extract_streams_from_fit(file_content: bytes) -> Dict[str, List[Any]]: - streams = { - "time": [], - "heart_rate": [], - "power": [], - "altitude": [], - "speed": [], - "cadence": [], - "respiration_rate": [] - } - try: - import fitdecode - import io - - start_time = None - with io.BytesIO(file_content) as f: - with fitdecode.FitReader(f) as fit: - for frame in fit: - if frame.frame_type == fitdecode.FIT_FRAME_DATA and frame.name == 'record': - timestamp = frame.get_value('timestamp') - if not start_time and timestamp: - start_time = timestamp - - if timestamp and start_time: - # Relative time in seconds - t = (timestamp - start_time).total_seconds() - - # Helper to safely get value with fallback - def get_val(frame, keys): - for k in keys: - if frame.has_field(k): - return frame.get_value(k) - return None - streams["time"].append(t) - streams["heart_rate"].append(get_val(frame, ['heart_rate'])) - streams["power"].append(get_val(frame, ['power'])) - streams["altitude"].append(get_val(frame, ['enhanced_altitude', 'altitude'])) - streams["speed"].append(get_val(frame, ['enhanced_speed', 'speed'])) # m/s (enhanced is also m/s) - streams["cadence"].append(get_val(frame, ['cadence'])) - streams["respiration_rate"].append(get_val(frame, ['respiration_rate', 'enhanced_respiration_rate'])) - except Exception as e: - logger.error(f"Error extracting streams from FIT: {e}") - - # Apply LTTB Downsampling - try: - from ..utils.algorithms import lttb - target_points = 1500 # Plenty for 4k screens, but much smaller than raw 1s data - - # We need a primary axis to sample against, typically Time. - # But LTTB is 2D (x,y). We have multiple Ys for one X (time). - # Strategy: Use Time vs Power (or HR/Speed) to pick key indices? - # Or simpler: Just LTTB each stream independently against Time? - # Independent LTTB might misalign peaks across streams (e.g. HR peak might slightly shift vs Power peak). - # Better: Pick 'Power' (most volatile) as the driver for indices? - # Or Simple Decimation for speed? - # Actually, let's just LTTB each one. The slight misalignment is negligible for visualization. - - # Check if we have enough points to warrant sampling - count = len(streams["time"]) - if count > target_points: - # Create (time, index) pairs to find which indices to keep? - # No, standard LTTB takes (x,y). - - # Helper to LTTB a specific stream - def sample_stream(name): - if not streams.get(name) or len(streams[name]) != count: return - - # Filter out Nones for LTTB? No, preserve index? - # LTTB requires values. If we have gaps, it's tricky. - # Let's replace None with 0 (or prev value) for sampling purposes? - # Or just use simple uniform sampling (decimation) which is "good enough" and keeps perfect alignment. - pass - # CHANGING STRATEGY: - # LTTB is great for one line. For aligned multi-series, simple bucket averaging or decimation is safer to keep alignment. - # However, decimation loses peaks. - # - # Let's try: "Bucket Max/Avg". - # Or simplified: Use LTTB on the "most interesting" metric (Power) to select the timestamps, then sample others at those timestamps. - - # Implementation: Use simple N-th sampling for now to guarantee alignment and speed improvement. - # It's an order of magnitude faster than full LTTB and robust for "Loading Speed" requests. - - step = count / target_points - indices = [int(i * step) for i in range(target_points)] - # Ensure last point included - if indices[-1] != count - 1: indices[-1] = count - 1 - - sampled_streams = {k: [] for k in streams} - - for idx in indices: - for k in streams: - if idx < len(streams[k]): - sampled_streams[k].append(streams[k][idx]) - - return sampled_streams - - except Exception as e: - logger.error(f"Error during downsampling: {e}") - # Return original if sampling fails - - return streams - -def _extract_summary_from_fit(file_content: bytes) -> Dict[str, Any]: - summary = {} - try: - with io.BytesIO(file_content) as f: - with fitdecode.FitReader(f) as fit: - for frame in fit: - if frame.frame_type == fitdecode.FIT_FRAME_DATA and frame.name == 'session': - # Prefer enhanced fields - def get(keys): - for k in keys: - if frame.has_field(k): return frame.get_value(k) - return None - - summary['total_distance'] = get(['total_distance']) - summary['total_timer_time'] = get(['total_timer_time', 'total_elapsed_time']) - summary['total_calories'] = get(['total_calories']) - summary['avg_heart_rate'] = get(['avg_heart_rate']) - summary['max_heart_rate'] = get(['max_heart_rate']) - summary['avg_cadence'] = get(['avg_cadence']) - summary['max_cadence'] = get(['max_cadence']) - summary['avg_power'] = get(['avg_power']) - summary['max_power'] = get(['max_power']) - summary['total_ascent'] = get(['total_ascent']) - summary['total_descent'] = get(['total_descent']) - summary['enhanced_avg_speed'] = get(['enhanced_avg_speed', 'avg_speed']) - summary['enhanced_max_speed'] = get(['enhanced_max_speed', 'max_speed']) - summary['normalized_power'] = get(['normalized_power']) - summary['training_stress_score'] = get(['training_stress_score']) - summary['total_training_effect'] = get(['total_training_effect']) - summary['total_anaerobic_training_effect'] = get(['total_anaerobic_training_effect']) - - # Stop after first session message (usually only one per file, or first is summary) - # Actually FIT can have multiple sessions (multipsport). We'll take the first for now. - break - except Exception as e: - logger.error(f"Error extraction summary from FIT: {e}") - return summary - -def _extract_streams_from_tcx(file_content: bytes) -> Dict[str, List[Any]]: - streams = { - "time": [], - "heart_rate": [], - "power": [], - "altitude": [], - "speed": [], - "cadence": [] - } - try: - root = ET.fromstring(file_content) - # Namespace strip hack - start_time = None - - for trkpt in root.iter(): - if trkpt.tag.endswith('Trackpoint'): - timestamp_str = None - hr = None - pwr = None - alt = None - cad = None - spd = None - - for child in trkpt.iter(): - if child.tag.endswith('Time'): - timestamp_str = child.text - elif child.tag.endswith('AltitudeMeters'): - try: alt = float(child.text) - except: pass - elif child.tag.endswith('HeartRateBpm'): - for val in child: - if val.tag.endswith('Value'): - try: hr = int(val.text) - except: pass - elif child.tag.endswith('Cadence'): # Standard TCX cadence - try: cad = int(child.text) - except: pass - elif child.tag.endswith('Extensions'): - # TPX extensions for speed/power - for ext in child.iter(): - if ext.tag.endswith('Speed'): - try: spd = float(ext.text) - except: pass - elif ext.tag.endswith('Watts'): - try: pwr = int(ext.text) - except: pass - - if timestamp_str: - try: - # TCX time format is ISO8601 usually - ts = datetime.fromisoformat(timestamp_str.replace('Z', '+00:00')) - if not start_time: - start_time = ts - - streams["time"].append((ts - start_time).total_seconds()) - streams["heart_rate"].append(hr) - streams["power"].append(pwr) - streams["altitude"].append(alt) - streams["speed"].append(spd) - streams["cadence"].append(cad) - except: pass - - except Exception as e: - logger.error(f"Error extracting streams from TCX: {e}") - return streams @router.get("/activities/{activity_id}/streams") async def get_activity_streams(activity_id: str, db: Session = Depends(get_db)): """ - Return time series data for charts. + Return time series data for charts. + Delegates to ActivityFileService for caching and parsing. """ try: - activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first() - if not activity: - raise HTTPException(status_code=404, detail="Activity not found") - - # 1. Try fetching high-res streams from ActivityStream table - from ..models.stream import ActivityStream - stream_record = db.query(ActivityStream).filter_by(activity_id=activity.id).first() - - if stream_record: - # Map DB columns to API response format - return { - "time": stream_record.time_offset or [], - "heart_rate": stream_record.heart_rate or [], - "power": stream_record.power or [], - "altitude": stream_record.elevation or [], - "speed": stream_record.speed or [], - "cadence": stream_record.cadence or [], - "respiration_rate": [], # Add if needed, not in ActivityStream model currently? Oh, I didn't add it to model? - # Actually I should check if I added respiration_rate to ActivityStream model in step 61/105 migration. - # In migration: I did NOT adding respiration_rate. - # In parsers.py: I only capture 'time_offset', 'latitude', 'longitude', 'elevation', 'heart_rate', 'power', 'cadence', 'speed', 'distance', 'temperature', 'moving', 'grade_smooth'. - # The old endpoint had respiration_rate. - # If it's missing, I'll return empty or I should have added it. - # For now return empty list to avoid breaking frontend. - "distance": stream_record.distance or [], - "temperature": stream_record.temperature or [] - } - - # 2. Check DB Cache (Legacy) - if activity.streams_json: - return activity.streams_json - - if not activity.file_content: - # Just return empty if no file and no streams - return {} - - # 3. Fallback: Parse on the fly AND save to DB - # This mirrors the behavior of lazy loading but using the new robust table - try: - from ..services.sync.activity import GarminActivitySync # avoid circular imports if possible, or use parser directly - # Actually better to just use parser and save manually here or import the function. - # But the logic is already in GarminActivitySync._save_activity_streams. - # However, GarminActivitySync needs GarminClient init. - # Let's just use the parser directly and insert like in _save_activity_streams - - from ..services.parsers import parse_fit_to_streams - data = parse_fit_to_streams(activity.file_content) - if data: - # Save to DB - new_stream = ActivityStream(activity_id=activity.id, **data) - db.add(new_stream) - db.commit() - - return { - "time": data['time_offset'], - "heart_rate": data['heart_rate'], - "power": data['power'], - "altitude": data['elevation'], - "speed": data['speed'], - "cadence": data['cadence'], - "distance": data['distance'], - "temperature": data['temperature'], - "respiration_rate": [] - } - except Exception as e: - logger.error(f"Error lazy parsing streams: {e}") - - return {} # Return empty if all fails - + return ActivityFileService.get_streams(db, activity_id) + except HTTPException: + raise except Exception as e: logger.error(f"Error processing streams: {e}") raise HTTPException(status_code=500, detail="Error processing activity streams") + @router.post("/activities/{activity_id}/estimate_power") async def estimate_activity_power(activity_id: int, db: Session = Depends(get_db)): """ diff --git a/FitnessSync/backend/src/api/analysis.py b/FitnessSync/backend/src/api/analysis.py index 04b1d34..88e67d1 100644 --- a/FitnessSync/backend/src/api/analysis.py +++ b/FitnessSync/backend/src/api/analysis.py @@ -20,10 +20,7 @@ from ..utils.config import config router = APIRouter() -def get_db(): - db_manager = PostgreSQLManager(config.DATABASE_URL) - with db_manager.get_db_session() as session: - yield session +from .status import get_db class EffortAnalysisData(BaseModel): effort_id: int diff --git a/FitnessSync/backend/src/api/auth.py b/FitnessSync/backend/src/api/auth.py index 811b7cd..35608c3 100644 --- a/FitnessSync/backend/src/api/auth.py +++ b/FitnessSync/backend/src/api/auth.py @@ -16,15 +16,11 @@ from ..models.api_token import APIToken from ..models.config import Configuration from garth.exc import GarthException import garth +from .status import get_db router = APIRouter() logger = logging.getLogger(__name__) -def get_db(): - db_manager = PostgreSQLManager(config.DATABASE_URL) - with db_manager.get_db_session() as session: - yield session - class GarminCredentials(BaseModel): username: str password: str @@ -151,7 +147,13 @@ def complete_garmin_mfa(mfa_request: GarminMFARequest, db: Session = Depends(get else: raise HTTPException(status_code=400, detail="MFA verification failed.") + raise HTTPException(status_code=400, detail="MFA verification failed.") + except Exception as e: + if "No pending MFA session found" in str(e): + raise HTTPException(status_code=400, detail="No pending MFA session found.") + if "Invalid MFA code" in str(e): # Handle GarthException message + raise HTTPException(status_code=400, detail=f"MFA verification failed: {str(e)}") logger.error(f"MFA verification failed with exception: {e}", exc_info=True) raise HTTPException(status_code=500, detail=f"MFA verification failed: {str(e)}") diff --git a/FitnessSync/backend/src/api/bike_setups.py b/FitnessSync/backend/src/api/bike_setups.py index d485911..f96e95c 100644 --- a/FitnessSync/backend/src/api/bike_setups.py +++ b/FitnessSync/backend/src/api/bike_setups.py @@ -9,15 +9,11 @@ from ..models.bike_setup import BikeSetup from ..models.base import Base from ..services.postgresql_manager import PostgreSQLManager from ..utils.config import config +# Import shared get_db dependency +from .status import get_db logger = logging.getLogger(__name__) -# Reusing get_db logic (it should ideally be in a shared common module, but for now reproducing it to avoid circular imports or refactoring) -def get_db(): - db_manager = PostgreSQLManager(config.DATABASE_URL) - with db_manager.get_db_session() as session: - yield session - class BikeSetupCreate(BaseModel): frame: str chainring: int diff --git a/FitnessSync/backend/src/api/config_routes.py b/FitnessSync/backend/src/api/config_routes.py index faa18e2..171ab33 100644 --- a/FitnessSync/backend/src/api/config_routes.py +++ b/FitnessSync/backend/src/api/config_routes.py @@ -7,15 +7,11 @@ import json from ..services.postgresql_manager import PostgreSQLManager from ..utils.config import config +from .status import get_db router = APIRouter() logger = logging.getLogger(__name__) -def get_db(): - db_manager = PostgreSQLManager(config.DATABASE_URL) - with db_manager.get_db_session() as session: - yield session - @router.post("/setup/load-consul-config") def load_consul_config(db: Session = Depends(get_db)): logger = logging.getLogger(__name__) diff --git a/FitnessSync/backend/src/api/discovery.py b/FitnessSync/backend/src/api/discovery.py index 6b70c1d..9fac77e 100644 --- a/FitnessSync/backend/src/api/discovery.py +++ b/FitnessSync/backend/src/api/discovery.py @@ -2,26 +2,25 @@ from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.orm import Session from datetime import datetime -from ..models import Base # Ensure models are loaded if needed +from ..models.activity import Activity from ..services.postgresql_manager import PostgreSQLManager from ..utils.config import config from ..services.discovery import SegmentDiscoveryService from ..schemas.discovery import DiscoveryFilter, DiscoveryResult, CandidateSegmentSchema, SingleDiscoveryRequest +from ..services.parsers import extract_activity_data + + +from .status import get_db router = APIRouter() -def get_db_session(): - db_manager = PostgreSQLManager(config.DATABASE_URL) - with db_manager.get_db_session() as session: - yield session - @router.post("/segments", response_model=DiscoveryResult) def discover_segments( filter: DiscoveryFilter, - db: Session = Depends(get_db_session) + db: Session = Depends(get_db) ): service = SegmentDiscoveryService(db) @@ -32,7 +31,9 @@ def discover_segments( candidates, debug_paths = service.discover_segments( activity_type=filter.activity_type, start_date=start, - end_date=filter.end_date + end_date=filter.end_date, + min_frequency=filter.min_frequency, + max_candidates=filter.max_candidates ) @@ -57,7 +58,7 @@ def discover_segments( @router.post("/single", response_model=DiscoveryResult) def discover_single_activity( request: SingleDiscoveryRequest, - db: Session = Depends(get_db_session) + db: Session = Depends(get_db) ): service = SegmentDiscoveryService(db) @@ -78,12 +79,34 @@ def discover_single_activity( distance=c.distance, activity_ids=c.activity_ids )) + + # Fetch activity type for frontend context + act_type = None + # Use cached session or query fresh? logic in service used it. + # We can query quickly here. + act = db.query(Activity).filter(Activity.id == request.activity_id).first() + if not act: + # Fallback to Garmin Activity ID (passed as int, convert to str) + act = db.query(Activity).filter(Activity.garmin_activity_id == str(request.activity_id)).first() + + if act: + act_type = act.activity_type + # Fallback if DB type is missing but file exists + if not act_type and act.file_content: + try: + parsed = extract_activity_data(act.file_content, act.file_type) + if parsed and parsed.get('type'): + act_type = parsed.get('type') + except Exception: + pass # Ignore parsing errors here + return DiscoveryResult( candidates=results, generated_at=datetime.now(), activity_count=1, - debug_paths=None + debug_paths=None, + analyzed_activity_type=act_type ) diff --git a/FitnessSync/backend/src/api/metrics.py b/FitnessSync/backend/src/api/metrics.py index 31cd8ca..7fbc89e 100644 --- a/FitnessSync/backend/src/api/metrics.py +++ b/FitnessSync/backend/src/api/metrics.py @@ -14,10 +14,7 @@ router = APIRouter() logger = logging.getLogger(__name__) -def get_db(): - db_manager = PostgreSQLManager(config.DATABASE_URL) - with db_manager.get_db_session() as session: - yield session +from .status import get_db class HealthMetricResponse(BaseModel): id: int diff --git a/FitnessSync/backend/src/api/scheduling.py b/FitnessSync/backend/src/api/scheduling.py index c58b182..c8b19e1 100644 --- a/FitnessSync/backend/src/api/scheduling.py +++ b/FitnessSync/backend/src/api/scheduling.py @@ -15,10 +15,7 @@ from ..services.scheduler import scheduler router = APIRouter() logger = logging.getLogger(__name__) -def get_db(): - db_manager = PostgreSQLManager(config.DATABASE_URL) - with db_manager.get_db_session() as session: - yield session +from .status import get_db class ScheduledJobResponse(BaseModel): id: int diff --git a/FitnessSync/backend/src/api/segments.py b/FitnessSync/backend/src/api/segments.py index a35c244..9c03b86 100644 --- a/FitnessSync/backend/src/api/segments.py +++ b/FitnessSync/backend/src/api/segments.py @@ -7,14 +7,10 @@ from ..services.postgresql_manager import PostgreSQLManager from ..utils.config import config from pydantic import BaseModel import json +from .status import get_db router = APIRouter() -def get_db(): - db_manager = PostgreSQLManager(config.DATABASE_URL) - with db_manager.get_db_session() as session: - yield session - class SegmentCreate(BaseModel): name: str description: Optional[str] = None @@ -100,7 +96,20 @@ def create_segment(payload: SegmentCreate, db: Session = Depends(get_db)): elev_gain += diff # Determine Activity Type - raw_type = payload.activity_type or activity.activity_type + # Prioritize DB > Payload > File Extraction + raw_type = activity.activity_type or payload.activity_type + + # Fallback to parsing file if type is missing + if not raw_type and activity.file_content: + try: + from ..services.parsers import extract_activity_data + parsed_data = extract_activity_data(activity.file_content, activity.file_type) + if parsed_data and parsed_data.get('type'): + raw_type = parsed_data.get('type') + print(f"DEBUG SEGMENT TYPE: Inferred '{raw_type}' from file content") + except Exception as e: + print(f"DEBUG SEGMENT TYPE: Failed to extract type from file: {e}") + final_type = 'cycling' # Default if raw_type: @@ -382,6 +391,20 @@ def save_custom_segment(payload: SegmentCreateCustom, db: Session = Depends(get_ bounds = calculate_bounds(payload.points) + # Discovery results (payload.points) might be already simplified or high-res. + # For geom, we'll use them as is, or apply a slight RDP if they are very dense. + # Given they come from discovery, they are likely already reasonably dense. + + # Create WKT for Geometry + wkt_coords = [] + for p in payload.points: + if len(p) >= 2: + wkt_coords.append(f"{p[0]} {p[1]}") + + geom_wkt = None + if wkt_coords: + geom_wkt = f"SRID=4326;LINESTRING({', '.join(wkt_coords)})" + segment = Segment( name=payload.name, description=payload.description, @@ -389,7 +412,8 @@ def save_custom_segment(payload: SegmentCreateCustom, db: Session = Depends(get_ elevation_gain=elev_gain, activity_type=payload.activity_type, points=json.dumps(payload.points), - bounds=json.dumps(bounds) + bounds=json.dumps(bounds), + geom=geom_wkt ) db.add(segment) diff --git a/FitnessSync/backend/src/api/sync.py b/FitnessSync/backend/src/api/sync.py index 80565ab..d483d9a 100644 --- a/FitnessSync/backend/src/api/sync.py +++ b/FitnessSync/backend/src/api/sync.py @@ -19,6 +19,7 @@ from fitbit import exceptions from ..models.weight_record import WeightRecord from ..models.config import Configuration from enum import Enum +from .status import get_db router = APIRouter() logger = logging.getLogger(__name__) @@ -59,11 +60,6 @@ class JobStatusResponse(BaseModel): message: str cancel_requested: bool -def get_db(): - db_manager = PostgreSQLManager(config.DATABASE_URL) - with db_manager.get_db_session() as session: - yield session - from ..services.garth_helper import load_and_verify_garth_session from ..tasks.definitions import ( run_activity_sync_task, diff --git a/FitnessSync/backend/src/jobs/__pycache__/segment_matching_job.cpython-311.pyc b/FitnessSync/backend/src/jobs/__pycache__/segment_matching_job.cpython-311.pyc deleted file mode 100644 index fe1541a..0000000 Binary files a/FitnessSync/backend/src/jobs/__pycache__/segment_matching_job.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/__init__.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index b5ead78..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/__init__.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/__init__.cpython-313.pyc deleted file mode 100644 index c7e9689..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/__init__.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/activity.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/activity.cpython-311.pyc deleted file mode 100644 index 178207f..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/activity.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/activity.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/activity.cpython-313.pyc deleted file mode 100644 index 67fab09..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/activity.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/activity_state.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/activity_state.cpython-311.pyc deleted file mode 100644 index 6ec0ddf..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/activity_state.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/activity_state.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/activity_state.cpython-313.pyc deleted file mode 100644 index dd6d4ae..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/activity_state.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/api_token.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/api_token.cpython-311.pyc deleted file mode 100644 index 2ee3aaa..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/api_token.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/api_token.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/api_token.cpython-313.pyc deleted file mode 100644 index 65366bf..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/api_token.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/auth_status.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/auth_status.cpython-311.pyc deleted file mode 100644 index f6e4b6a..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/auth_status.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/auth_status.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/auth_status.cpython-313.pyc deleted file mode 100644 index 6d46626..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/auth_status.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/base.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/base.cpython-311.pyc deleted file mode 100644 index 1e5e3e6..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/base.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/base.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/base.cpython-313.pyc deleted file mode 100644 index 8c4c016..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/base.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/bike_setup.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/bike_setup.cpython-311.pyc deleted file mode 100644 index 0493970..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/bike_setup.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/bike_setup.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/bike_setup.cpython-313.pyc deleted file mode 100644 index d499ffe..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/bike_setup.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/config.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/config.cpython-311.pyc deleted file mode 100644 index 0a1674c..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/config.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/config.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/config.cpython-313.pyc deleted file mode 100644 index 32f5d79..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/config.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/health_metric.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/health_metric.cpython-311.pyc deleted file mode 100644 index ad10f05..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/health_metric.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/health_metric.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/health_metric.cpython-313.pyc deleted file mode 100644 index deac75a..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/health_metric.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/health_state.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/health_state.cpython-311.pyc deleted file mode 100644 index 232f348..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/health_state.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/health_state.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/health_state.cpython-313.pyc deleted file mode 100644 index 1fb0a2a..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/health_state.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/job.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/job.cpython-311.pyc deleted file mode 100644 index dc2d865..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/job.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/job.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/job.cpython-313.pyc deleted file mode 100644 index 06a2e16..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/job.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/scheduled_job.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/scheduled_job.cpython-311.pyc deleted file mode 100644 index d044123..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/scheduled_job.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/scheduled_job.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/scheduled_job.cpython-313.pyc deleted file mode 100644 index 131906b..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/scheduled_job.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/segment.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/segment.cpython-311.pyc deleted file mode 100644 index 964d5ee..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/segment.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/segment.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/segment.cpython-313.pyc deleted file mode 100644 index 4c4748f..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/segment.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/segment_effort.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/segment_effort.cpython-311.pyc deleted file mode 100644 index 7164d7b..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/segment_effort.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/segment_effort.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/segment_effort.cpython-313.pyc deleted file mode 100644 index ac247bf..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/segment_effort.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/stream.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/stream.cpython-311.pyc deleted file mode 100644 index c14f566..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/stream.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/stream.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/stream.cpython-313.pyc deleted file mode 100644 index e9a204d..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/stream.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/sync_log.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/sync_log.cpython-311.pyc deleted file mode 100644 index 26213d3..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/sync_log.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/sync_log.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/sync_log.cpython-313.pyc deleted file mode 100644 index 5926787..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/sync_log.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/weight_record.cpython-311.pyc b/FitnessSync/backend/src/models/__pycache__/weight_record.cpython-311.pyc deleted file mode 100644 index 362bb82..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/weight_record.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/models/__pycache__/weight_record.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/weight_record.cpython-313.pyc deleted file mode 100644 index ab74471..0000000 Binary files a/FitnessSync/backend/src/models/__pycache__/weight_record.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/routers/__pycache__/__init__.cpython-311.pyc b/FitnessSync/backend/src/routers/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index 6dc326c..0000000 Binary files a/FitnessSync/backend/src/routers/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/routers/__pycache__/__init__.cpython-313.pyc b/FitnessSync/backend/src/routers/__pycache__/__init__.cpython-313.pyc deleted file mode 100644 index 4602444..0000000 Binary files a/FitnessSync/backend/src/routers/__pycache__/__init__.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/routers/__pycache__/web.cpython-311.pyc b/FitnessSync/backend/src/routers/__pycache__/web.cpython-311.pyc deleted file mode 100644 index b4fe801..0000000 Binary files a/FitnessSync/backend/src/routers/__pycache__/web.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/routers/__pycache__/web.cpython-313.pyc b/FitnessSync/backend/src/routers/__pycache__/web.cpython-313.pyc deleted file mode 100644 index 3108717..0000000 Binary files a/FitnessSync/backend/src/routers/__pycache__/web.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/schemas/__pycache__/discovery.cpython-311.pyc b/FitnessSync/backend/src/schemas/__pycache__/discovery.cpython-311.pyc deleted file mode 100644 index eedc711..0000000 Binary files a/FitnessSync/backend/src/schemas/__pycache__/discovery.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/schemas/__pycache__/discovery.cpython-313.pyc b/FitnessSync/backend/src/schemas/__pycache__/discovery.cpython-313.pyc deleted file mode 100644 index 0a975ab..0000000 Binary files a/FitnessSync/backend/src/schemas/__pycache__/discovery.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/schemas/discovery.py b/FitnessSync/backend/src/schemas/discovery.py index 5396c5c..4d13737 100644 --- a/FitnessSync/backend/src/schemas/discovery.py +++ b/FitnessSync/backend/src/schemas/discovery.py @@ -10,6 +10,8 @@ class DiscoveryFilter(BaseModel): lat_max: Optional[float] = None lon_min: Optional[float] = None lon_max: Optional[float] = None + min_frequency: int = 2 + max_candidates: int = 10 class SingleDiscoveryRequest(BaseModel): activity_id: int @@ -30,4 +32,5 @@ class DiscoveryResult(BaseModel): debug_paths: Optional[List[List[List[float]]]] = None generated_at: datetime activity_count: int + analyzed_activity_type: Optional[str] = None # Added for frontend context # How many activities were analyzed diff --git a/FitnessSync/backend/src/services/__pycache__/__init__.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index 0d7a215..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/__init__.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/__init__.cpython-313.pyc deleted file mode 100644 index a30041e..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/__init__.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/bike_matching.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/bike_matching.cpython-311.pyc deleted file mode 100644 index 2ad13f1..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/bike_matching.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/bike_matching.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/bike_matching.cpython-313.pyc deleted file mode 100644 index 75a67c8..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/bike_matching.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/discovery.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/discovery.cpython-311.pyc deleted file mode 100644 index 60de270..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/discovery.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/discovery.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/discovery.cpython-313.pyc deleted file mode 100644 index f55f0e5..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/discovery.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/fitbit_client.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/fitbit_client.cpython-311.pyc deleted file mode 100644 index 230c099..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/fitbit_client.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/fitbit_client.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/fitbit_client.cpython-313.pyc deleted file mode 100644 index 2638d1f..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/fitbit_client.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/garmin_client.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/garmin_client.cpython-313.pyc deleted file mode 100644 index fee4c62..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/garmin_client.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/garth_helper.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/garth_helper.cpython-311.pyc deleted file mode 100644 index 1865bb1..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/garth_helper.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/garth_helper.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/garth_helper.cpython-313.pyc deleted file mode 100644 index c515ece..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/garth_helper.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/job_manager.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/job_manager.cpython-311.pyc deleted file mode 100644 index f725221..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/job_manager.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/job_manager.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/job_manager.cpython-313.pyc deleted file mode 100644 index 4d32df5..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/job_manager.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/parsers.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/parsers.cpython-311.pyc deleted file mode 100644 index 384750f..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/parsers.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/parsers.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/parsers.cpython-313.pyc deleted file mode 100644 index b862183..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/parsers.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/postgresql_manager.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/postgresql_manager.cpython-311.pyc deleted file mode 100644 index b823330..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/postgresql_manager.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/postgresql_manager.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/postgresql_manager.cpython-313.pyc deleted file mode 100644 index 4eab779..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/postgresql_manager.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/power_estimator.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/power_estimator.cpython-311.pyc deleted file mode 100644 index a390792..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/power_estimator.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/scheduler.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/scheduler.cpython-311.pyc deleted file mode 100644 index a387bbb..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/scheduler.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/scheduler.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/scheduler.cpython-313.pyc deleted file mode 100644 index 4435d33..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/scheduler.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/segment_matcher.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/segment_matcher.cpython-311.pyc deleted file mode 100644 index 4d1a3f5..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/segment_matcher.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/segment_matcher.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/segment_matcher.cpython-313.pyc deleted file mode 100644 index db3767c..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/segment_matcher.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/sync_app.cpython-311.pyc b/FitnessSync/backend/src/services/__pycache__/sync_app.cpython-311.pyc deleted file mode 100644 index ce28f09..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/sync_app.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/__pycache__/sync_app.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/sync_app.cpython-313.pyc deleted file mode 100644 index 7dbab21..0000000 Binary files a/FitnessSync/backend/src/services/__pycache__/sync_app.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/activity_file.py b/FitnessSync/backend/src/services/activity_file.py new file mode 100644 index 0000000..ce9ece3 --- /dev/null +++ b/FitnessSync/backend/src/services/activity_file.py @@ -0,0 +1,91 @@ + +from typing import Optional, Dict, Any, List +from sqlalchemy.orm import Session +from fastapi import HTTPException, Response +from ..models.activity import Activity + +from .parsers import parse_fit_to_streams +from ..utils.sampling import downsample_streams +import logging + +logger = logging.getLogger(__name__) + +class ActivityFileService: + @staticmethod + def get_file_response(db: Session, activity_id: str) -> Response: + """ + Retrieve and prepare a file download response. + """ + activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first() + + if not activity: + raise HTTPException(status_code=404, detail=f"Activity with ID {activity_id} not found") + + if not activity.file_content: + raise HTTPException(status_code=404, detail=f"No file content available for activity {activity_id}") + + # Check if synced/downloaded? + # Existing logic checked 'download_status == downloaded'. + if activity.download_status != 'downloaded': + raise HTTPException(status_code=400, detail=f"File for activity {activity_id} is not ready for download (status: {activity.download_status})") + + content_type_map = { + 'tcx': 'application/vnd.garmin.tcx+xml', + 'gpx': 'application/gpx+xml', + 'fit': 'application/octet-stream' + } + + content_type = content_type_map.get(activity.file_type, 'application/octet-stream') + filename = f"activity_{activity_id}.{activity.file_type}" + + return Response( + content=activity.file_content, + media_type=content_type, + headers={"Content-Disposition": f"attachment; filename={filename}"} + ) + + @staticmethod + def get_streams(db: Session, activity_id: str) -> Dict[str, List[Any]]: + """ + Get activity streams, preferring DB cache (Activity.streams_json), falling back to file parse. + Applies downsampling and caches the result. + """ + activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first() + + if not activity: + # Return empty or raise error? Existing logic returned empty structure if not found. + # But here we need activity to check cache. + # If activity doesn't exist, we can't get file content either. + return {k: [] for k in ["time", "heart_rate", "power", "altitude", "speed", "cadence", "respiration_rate"]} + + # 1. Check Cache + if activity.streams_json: + logger.info(f"Hit streams_json cache for {activity_id}") + return activity.streams_json + + # 2. Parse from file + logger.info(f"Parsing streams on-the-fly for {activity_id}") + full_streams = None + + if activity.file_content and activity.file_type == 'fit': + try: + full_streams = parse_fit_to_streams(activity.file_content) + except Exception as e: + logger.error(f"Error parsing FIT for streams: {e}") + + if not full_streams: + return {k: [] for k in ["time", "heart_rate", "power", "altitude", "speed", "cadence", "respiration_rate"]} + + # 3. Downsample + downsampled = downsample_streams(full_streams) + + # 4. Cache Result + try: + activity.streams_json = downsampled + db.commit() + except Exception as e: + logger.error(f"Failed to cache streams_json: {e}") + db.rollback() + + return downsampled + diff --git a/FitnessSync/backend/src/services/activity_query.py b/FitnessSync/backend/src/services/activity_query.py new file mode 100644 index 0000000..fbd299e --- /dev/null +++ b/FitnessSync/backend/src/services/activity_query.py @@ -0,0 +1,107 @@ + +from typing import List, Optional, Tuple, Any +from sqlalchemy.orm import Session +from sqlalchemy import or_, func, desc +from ..models.activity import Activity +from ..models.activity_state import GarminActivityState +from ..models.bike_setup import BikeSetup +import logging + +logger = logging.getLogger(__name__) + +class ActivityQueryService: + @staticmethod + def list_activities(db: Session, limit: int, offset: int) -> List[Tuple[Optional[GarminActivityState], Optional[Activity]]]: + """ + List activities combining GarminActivityState and Activity tables. + Returns a list of tuples (state, activity). + """ + return ( + db.query(GarminActivityState, Activity) + .outerjoin(Activity, GarminActivityState.garmin_activity_id == Activity.garmin_activity_id) + .order_by(GarminActivityState.start_time.desc()) + .offset(offset) + .limit(limit) + .all() + ) + + @staticmethod + def query_activities( + db: Session, + activity_type: Optional[str] = None, + start_date: Optional[str] = None, + end_date: Optional[str] = None, + download_status: Optional[str] = None, + bike_setup_id: Optional[int] = None, + has_power: Optional[bool] = None, + has_hr: Optional[bool] = None, + has_cadence: Optional[bool] = None, + is_estimated_power: Optional[bool] = None + ) -> List[Activity]: + """ + Advanced filtering of activities. + """ + query = db.query(Activity) + + if activity_type: + if activity_type == 'cycling': + query = query.filter(or_( + Activity.activity_type == 'cycling', + Activity.activity_type == 'road_biking', + Activity.activity_type == 'mountain_biking', + Activity.activity_type == 'gravel_cycling', + Activity.activity_type == 'cyclocross', + Activity.activity_type == 'track_cycling', + Activity.activity_type == 'commuting' + )) + else: + query = query.filter(Activity.activity_type == activity_type) + + if start_date: + from datetime import datetime + start_dt = datetime.fromisoformat(start_date) + query = query.filter(Activity.start_time >= start_dt) + + if end_date: + from datetime import datetime + end_dt = datetime.fromisoformat(end_date) + query = query.filter(Activity.start_time <= end_dt) + + if download_status: + query = query.filter(Activity.download_status == download_status) + + if bike_setup_id: + query = query.filter(Activity.bike_setup_id == bike_setup_id) + + if has_power is not None: + if has_power: + query = query.filter(Activity.avg_power != None) + else: + query = query.filter(Activity.avg_power == None) + + if has_hr is not None: + if has_hr: + query = query.filter(Activity.avg_hr != None) + else: + query = query.filter(Activity.avg_hr == None) + + if has_cadence is not None: + if has_cadence: + query = query.filter(Activity.avg_cadence != None) + else: + query = query.filter(Activity.avg_cadence == None) + + if is_estimated_power is not None: + if is_estimated_power: + query = query.filter(Activity.is_estimated_power == True) + else: + query = query.filter(Activity.is_estimated_power == False) + + return query.all() + + @staticmethod + def get_activity_by_id(db: Session, activity_id: str) -> Optional[Activity]: + """ + Retrieve a single activity by its Garmin Activity ID. + """ + return db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first() diff --git a/FitnessSync/backend/src/services/bike_matching.py b/FitnessSync/backend/src/services/bike_matching.py index 2626dce..421ccc9 100644 --- a/FitnessSync/backend/src/services/bike_matching.py +++ b/FitnessSync/backend/src/services/bike_matching.py @@ -236,12 +236,15 @@ def process_activity_matching(db: Session, activity_id: int): db.commit() -def run_matching_for_all(db: Session): +def run_matching_for_all(db: Session, job_id: str = None): """ Run matching for all activities that don't have a setup. """ from sqlalchemy import or_ + # Import job_manager locally to avoid circular dependencies + from ..services.job_manager import job_manager + activities = db.query(Activity).filter( # Activity.bike_setup_id == None, # Re-match everything to enforce new rules/constraints @@ -257,8 +260,22 @@ def run_matching_for_all(db: Session): or_(Activity.bike_match_confidence == None, Activity.bike_match_confidence < 2.0) ).all() + total = len(activities) count = 0 - for act in activities: + + if job_id: + job_manager.update_job(job_id, message=f"Found {total} candidates. Matching...", progress=0) + + for i, act in enumerate(activities): + # Check cancellation + if job_id and i % 10 == 0: + if job_manager.should_cancel(job_id): + break + # Update progress + progress = int((i / total) * 100) if total > 0 else 100 + job_manager.update_job(job_id, progress=progress) + process_activity_matching(db, act.id) count += 1 + logger.info(f"Ran matching for {count} activities.") diff --git a/FitnessSync/backend/src/services/discovery.py b/FitnessSync/backend/src/services/discovery.py index 939d765..48bcb19 100644 --- a/FitnessSync/backend/src/services/discovery.py +++ b/FitnessSync/backend/src/services/discovery.py @@ -36,7 +36,9 @@ class SegmentDiscoveryService: def discover_segments(self, activity_type: str, start_date: Optional[datetime], - end_date: Optional[datetime] = None) -> Tuple[List[CandidateSegment], List[List[List[float]]]]: + end_date: Optional[datetime] = None, + min_frequency: int = 2, + max_candidates: int = 10) -> Tuple[List[CandidateSegment], List[List[List[float]]]]: @@ -94,9 +96,8 @@ class SegmentDiscoveryService: grid[(xi, yi)].add(act_id) # 4. Filter Hotspots - # Keep cells with > 2 unique activities - min_freq = 2 - hotspot_cells = {k: v for k, v in grid.items() if len(v) >= min_freq} + # Keep cells with > min_frequency unique activities + hotspot_cells = {k: v for k, v in grid.items() if len(v) >= min_frequency} logger.info(f"Found {len(hotspot_cells)} hotspot cells.") @@ -151,6 +152,10 @@ class SegmentDiscoveryService: # 7. Deduplicate against DB final_candidates = self._deduplicate_against_db(candidates, activity_type) + # Sort by frequency desc and limit + final_candidates.sort(key=lambda c: c.frequency, reverse=True) + final_candidates = final_candidates[:max_candidates] + return final_candidates, list(activity_paths.values()) diff --git a/FitnessSync/backend/src/services/garmin/__pycache__/auth.cpython-311.pyc b/FitnessSync/backend/src/services/garmin/__pycache__/auth.cpython-311.pyc deleted file mode 100644 index 6bc30d0..0000000 Binary files a/FitnessSync/backend/src/services/garmin/__pycache__/auth.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/garmin/__pycache__/auth.cpython-313.pyc b/FitnessSync/backend/src/services/garmin/__pycache__/auth.cpython-313.pyc deleted file mode 100644 index 5402b85..0000000 Binary files a/FitnessSync/backend/src/services/garmin/__pycache__/auth.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/garmin/__pycache__/client.cpython-311.pyc b/FitnessSync/backend/src/services/garmin/__pycache__/client.cpython-311.pyc deleted file mode 100644 index ec4a4a4..0000000 Binary files a/FitnessSync/backend/src/services/garmin/__pycache__/client.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/garmin/__pycache__/client.cpython-313.pyc b/FitnessSync/backend/src/services/garmin/__pycache__/client.cpython-313.pyc deleted file mode 100644 index a745568..0000000 Binary files a/FitnessSync/backend/src/services/garmin/__pycache__/client.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/garmin/__pycache__/data.cpython-311.pyc b/FitnessSync/backend/src/services/garmin/__pycache__/data.cpython-311.pyc deleted file mode 100644 index 37f6874..0000000 Binary files a/FitnessSync/backend/src/services/garmin/__pycache__/data.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/garmin/__pycache__/data.cpython-313.pyc b/FitnessSync/backend/src/services/garmin/__pycache__/data.cpython-313.pyc deleted file mode 100644 index eb0e4c7..0000000 Binary files a/FitnessSync/backend/src/services/garmin/__pycache__/data.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/parsers.py b/FitnessSync/backend/src/services/parsers.py index 8e59246..aa4120a 100644 --- a/FitnessSync/backend/src/services/parsers.py +++ b/FitnessSync/backend/src/services/parsers.py @@ -7,100 +7,136 @@ from datetime import datetime logger = logging.getLogger(__name__) -def extract_activity_data(file_content: bytes, file_type: str) -> Dict[str, List[Any]]: +def extract_activity_data(file_content: bytes, file_type: str, strict_gps: bool = True) -> Dict[str, List[Any]]: """ Extracts all relevant streams: points (lat, lon, ele), timestamps, hr, power. + Returns: { Returns: { 'points': [[lon, lat, ele], ...], 'timestamps': [datetime, ...], 'heart_rate': [int, ...], 'power': [int, ...], 'speed': [float, ...], - 'cadence': [int, ...] + 'cadence': [int, ...], + 'type': str # Added activity type } """ if file_type == 'fit': - return _extract_data_from_fit(file_content) + return _extract_data_from_fit(file_content, strict_gps) elif file_type == 'tcx': return _extract_data_from_tcx(file_content) - return {'points': [], 'timestamps': [], 'heart_rate': [], 'power': []} + return {'points': [], 'timestamps': [], 'heart_rate': [], 'power': [], 'type': None} def extract_points_from_file(file_content: bytes, file_type: str) -> List[List[float]]: - # Wrapper for backward compatibility + """ + Extracts purely the coordinate points (lon, lat, optional ele) from the file. + Legacy wrapper for backward compatibility. + """ data = extract_activity_data(file_content, file_type) return data['points'] def extract_timestamps_from_file(file_content: bytes, file_type: str) -> List[Optional[datetime]]: - # Wrapper for backward compatibility + """ + Extracts timestamps corresponding to the data points. + Legacy wrapper for backward compatibility. + """ data = extract_activity_data(file_content, file_type) return data['timestamps'] -def _extract_data_from_fit(file_content: bytes) -> Dict[str, List[Any]]: +def _extract_data_from_fit(file_content: bytes, strict_gps: bool = True) -> Dict[str, List[Any]]: + """ + Internal helper to parse FIT files using fitdecode. + Args: + strict_gps: If True, only extracts records that have valid GPS coordinates. + If False, extracts all 'record' messages (useful for indoor activities). + """ data = { 'points': [], 'timestamps': [], 'heart_rate': [], 'power': [], 'speed': [], 'cadence': [], 'temperature': [], 'distance': [], 'respiration_rate': [], - 'session': {} # New key for summary data + 'respiration_rate': [], + 'altitude': [], # Add explicit altitude stream + 'session': {}, # New key for summary data + 'type': None } try: with io.BytesIO(file_content) as f: with fitdecode.FitReader(f) as fit: for frame in fit: if frame.frame_type == fitdecode.FIT_FRAME_DATA: - if frame.name == 'record': + if frame.name == 'sport': + if frame.has_field('sport'): + sport_enum = frame.get_value('sport') + # fitdecode usually returns strings for enums if known + data['type'] = str(sport_enum).lower() + + elif frame.name == 'record': # We only collect data if position is valid, to keep streams aligned with points? # Or should we collect everything and align by index? # Usually points extraction filtered by lat/lon. If we want aligned arrays, we must apply same filter. - if frame.has_field('position_lat') and frame.has_field('position_long'): - lat_sc = frame.get_value('position_lat') - lon_sc = frame.get_value('position_long') + # Logic: If strict_gps is True, we skip records without GPS. + # If strict_gps is False, we take all records, but points might be None. + + has_gps = frame.has_field('position_lat') and frame.has_field('position_long') + + if has_gps or not strict_gps: + lat = None + lon = None + ele = None - if lat_sc is not None and lon_sc is not None: - lat = lat_sc * (180.0 / 2**31) - lon = lon_sc * (180.0 / 2**31) + if has_gps: + lat_sc = frame.get_value('position_lat') + lon_sc = frame.get_value('position_long') + if lat_sc is not None and lon_sc is not None: + lat = lat_sc * (180.0 / 2**31) + lon = lon_sc * (180.0 / 2**31) - ele = None - if frame.has_field('enhanced_altitude'): - ele = frame.get_value('enhanced_altitude') - elif frame.has_field('altitude'): - ele = frame.get_value('altitude') - + if frame.has_field('enhanced_altitude'): + ele = frame.get_value('enhanced_altitude') + elif frame.has_field('altitude'): + ele = frame.get_value('altitude') + + data['altitude'].append(ele) + + if lat is not None: data['points'].append([lon, lat, ele] if ele is not None else [lon, lat]) + else: + data['points'].append(None) - # Timestamps - ts = frame.get_value('timestamp') if frame.has_field('timestamp') else None - data['timestamps'].append(ts) - - # Speed - speed = frame.get_value('enhanced_speed') if frame.has_field('enhanced_speed') else frame.get_value('speed') if frame.has_field('speed') else None - data['speed'].append(speed) + # Timestamps + ts = frame.get_value('timestamp') if frame.has_field('timestamp') else None + data['timestamps'].append(ts) + + # Speed + speed = frame.get_value('enhanced_speed') if frame.has_field('enhanced_speed') else frame.get_value('speed') if frame.has_field('speed') else None + data['speed'].append(speed) - # Distance - dist = frame.get_value('distance') if frame.has_field('distance') else None - data['distance'].append(dist) + # Distance + dist = frame.get_value('distance') if frame.has_field('distance') else None + data['distance'].append(dist) - # Cadence - cad = frame.get_value('cadence') if frame.has_field('cadence') else None - data['cadence'].append(cad) - - # HR - hr = frame.get_value('heart_rate') if frame.has_field('heart_rate') else None - data['heart_rate'].append(hr) - - # Power - pwr = frame.get_value('power') if frame.has_field('power') else None - data['power'].append(pwr) + # Cadence + cad = frame.get_value('cadence') if frame.has_field('cadence') else None + data['cadence'].append(cad) + + # HR + hr = frame.get_value('heart_rate') if frame.has_field('heart_rate') else None + data['heart_rate'].append(hr) + + # Power + pwr = frame.get_value('power') if frame.has_field('power') else None + data['power'].append(pwr) - # Temperature - temp = frame.get_value('temperature') if frame.has_field('temperature') else None - data['temperature'].append(temp) + # Temperature + temp = frame.get_value('temperature') if frame.has_field('temperature') else None + data['temperature'].append(temp) - # Respiration Rate - resp = frame.get_value('respiration_rate') if frame.has_field('respiration_rate') else frame.get_value('enhanced_respiration_rate') if frame.has_field('enhanced_respiration_rate') else None - if 'respiration_rate' not in data: - data['respiration_rate'] = [] - data['respiration_rate'].append(resp) + # Respiration Rate + resp = frame.get_value('respiration_rate') if frame.has_field('respiration_rate') else frame.get_value('enhanced_respiration_rate') if frame.has_field('enhanced_respiration_rate') else None + if 'respiration_rate' not in data: + data['respiration_rate'] = [] + data['respiration_rate'].append(resp) elif frame.name == 'session': # Extract summary fields @@ -113,7 +149,10 @@ def _extract_data_from_fit(file_content: bytes) -> Dict[str, List[Any]]: 'avg_stress', 'avg_spo2', 'total_strokes', 'avg_stroke_distance', # Standard Metrics - 'max_heart_rate', 'max_speed', 'enhanced_max_speed', 'max_cadence', + 'total_distance', 'total_timer_time', 'total_elapsed_time', 'total_calories', + 'avg_heart_rate', 'max_heart_rate', 'avg_cadence', 'max_cadence', + 'avg_power', 'max_power', 'avg_speed', 'enhanced_avg_speed', + 'max_speed', 'enhanced_max_speed', 'total_ascent', 'total_descent', 'total_training_effect', 'total_anaerobic_training_effect', 'training_stress_score', 'normalized_power' @@ -214,10 +253,24 @@ def _extract_points_from_fit(file_content: bytes) -> List[List[float]]: return _extract_data_from_fit(file_content)['points'] def _extract_data_from_tcx(file_content: bytes) -> Dict[str, List[Any]]: - data = {'points': [], 'timestamps': [], 'heart_rate': [], 'power': []} + """ + Internal helper to parse TCX files using ElementTree. + Extracts trackpoints including extension data (Power). + """ + data = {'points': [], 'timestamps': [], 'heart_rate': [], 'power': [], 'type': None} try: root = ET.fromstring(file_content) ns = {'ns': 'http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2'} + + # Extract Activity Type (Sport) + # Search for Activity tags anywhere + for activity in root.iter(): + if activity.tag.endswith('Activity'): + sport = activity.get('Sport') + if sport: + data['type'] = sport.lower() + break + # TCX namespaces can be tricky. Using simple tag checks for valid coords. for trkpt in root.iter(): @@ -276,3 +329,92 @@ def _extract_points_from_tcx(file_content: bytes) -> List[List[float]]: + + +def extract_streams(file_content: bytes, file_type: str) -> Dict[str, List[Any]]: + """ + Extracts streams specifically formatted for frontend analysis/charts. + Standardizes output format (e.g. relative time in seconds). + + Returns dict with keys: + - time (relative seconds) + - heart_rate + - power + - altitude + - speed + - cadence + - respiration_rate + """ + streams = { + "time": [], + "heart_rate": [], + "power": [], + "altitude": [], + "speed": [], + "cadence": [], + "respiration_rate": [] + } + + # Use strict_gps=False to get all data (including indoor) + raw_data = extract_activity_data(file_content, file_type, strict_gps=False) + + timestamps = raw_data.get('timestamps', []) + if not timestamps: + return streams + + start_time = None + # Find first non-none timestamp + for t in timestamps: + if t: + start_time = t + break + + if not start_time: + return streams + + # Generate streams + # Points include altitude. They might be None if no GPS. + points = raw_data.get('points', []) + + # Pre-fetch other arrays + hr = raw_data.get('heart_rate', []) + pwr = raw_data.get('power', []) + # speed/cadence/resp + spd = raw_data.get('speed', []) + cad = raw_data.get('cadence', []) + resp = raw_data.get('respiration_rate', []) + + for i, ts in enumerate(timestamps): + if ts is None: + # Skip records without timestamp? + # If we skip, we break alignment if we were just iterating? + # But we are iterating arrays by index 'i'. + # If TS is None, we probably shouldn't plot it. + continue + + t_rel = (ts - start_time).total_seconds() + + streams['time'].append(t_rel) + streams['heart_rate'].append(hr[i] if i < len(hr) else None) + streams['power'].append(pwr[i] if i < len(pwr) else None) + streams['speed'].append(spd[i] if i < len(spd) else None) + streams['cadence'].append(cad[i] if i < len(cad) else None) + streams['respiration_rate'].append(resp[i] if i < len(resp) else None) + + # Altitude + ele = None + if i < len(points): + p = points[i] + # p is [lon, lat, ele?] or [lon, lat] or None + if p and len(p) > 2: + ele = p[2] + streams['altitude'].append(ele) + + return streams + +def extract_summary(file_content: bytes, file_type: str) -> Dict[str, Any]: + """ + Extract summary stats (session/metadata). + """ + data = extract_activity_data(file_content, file_type, strict_gps=False) + return data.get('session', {}) diff --git a/FitnessSync/backend/src/services/segment_matcher.py b/FitnessSync/backend/src/services/segment_matcher.py index 441c365..2509893 100644 --- a/FitnessSync/backend/src/services/segment_matcher.py +++ b/FitnessSync/backend/src/services/segment_matcher.py @@ -119,7 +119,8 @@ class SegmentMatcher: # Map segment type to activity types (inverse of what we do in match_activity) if segment.activity_type == 'running': query = query.filter(or_( - Activity.activity_type.ilike('running'), + Activity.activity_type.ilike('running%'), + Activity.activity_type.ilike('run'), # Exact match for "run" or "Run" Activity.activity_type.ilike('trail_running'), Activity.activity_type.ilike('treadmill_running'), # unlikely to have GPS but safe Activity.activity_type.ilike('walking'), @@ -142,7 +143,54 @@ class SegmentMatcher: query = query.options(joinedload(Activity.streams)) candidates = query.all() - logger.info(f"Segment Scan: Found {len(candidates)} candidate activities for Segment {segment.name} ({segment.id})") + candidate_ids = {a.id for a in candidates} + + # Fallback: Find activities without streams (or failed intersects) using rough proximity + # Use start_lat/lng if available + if segment.points: + try: + seg_points = json.loads(segment.points) if isinstance(segment.points, str) else segment.points + if seg_points: + start_lon, start_lat = seg_points[0] # GeoJSON is lon, lat + + # Search window ~5km (approx 0.05 deg) + lat_min, lat_max = start_lat - 0.05, start_lat + 0.05 + lon_min, lon_max = start_lon - 0.05, start_lon + 0.05 + + fallback_query = self.db.query(Activity).filter( + Activity.start_lat.between(lat_min, lat_max), + Activity.start_lng.between(lon_min, lon_max), + Activity.id.notin_(candidate_ids) # Avoid duplicates + ) + + # Apply regex Type Filter (same as above) + if segment.activity_type == 'running': + fallback_query = fallback_query.filter(or_( + Activity.activity_type.ilike('running%'), + Activity.activity_type.ilike('run'), + Activity.activity_type.ilike('trail_running'), + Activity.activity_type.ilike('treadmill_running'), + Activity.activity_type.ilike('walking'), + Activity.activity_type.ilike('hiking') + )) + elif segment.activity_type == 'cycling': + fallback_query = fallback_query.filter(or_( + Activity.activity_type.ilike('%cycling%'), + Activity.activity_type.ilike('%road_biking%'), + Activity.activity_type.ilike('%mountain%'), + Activity.activity_type.ilike('%mtb%'), + Activity.activity_type.ilike('%cyclocross%') + )) + + fb_candidates = fallback_query.all() + if fb_candidates: + logger.info(f"Segment Scan: Found {len(fb_candidates)} additional candidates via proximity fallback.") + candidates.extend(fb_candidates) + + except Exception as e: + logger.warning(f"Fallback proximity scan failed: {e}") + + logger.info(f"Segment Scan: Found {len(candidates)} total candidate activities for Segment {segment.name} ({segment.id})") matches_found = 0 diff --git a/FitnessSync/backend/src/services/sync/__pycache__/__init__.cpython-311.pyc b/FitnessSync/backend/src/services/sync/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index 6cd416f..0000000 Binary files a/FitnessSync/backend/src/services/sync/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/sync/__pycache__/__init__.cpython-313.pyc b/FitnessSync/backend/src/services/sync/__pycache__/__init__.cpython-313.pyc deleted file mode 100644 index 56a78c9..0000000 Binary files a/FitnessSync/backend/src/services/sync/__pycache__/__init__.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-311.pyc b/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-311.pyc deleted file mode 100644 index 127968c..0000000 Binary files a/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-313.pyc b/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-313.pyc deleted file mode 100644 index 4a26e71..0000000 Binary files a/FitnessSync/backend/src/services/sync/__pycache__/activity.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/sync/__pycache__/health.cpython-311.pyc b/FitnessSync/backend/src/services/sync/__pycache__/health.cpython-311.pyc deleted file mode 100644 index 033b423..0000000 Binary files a/FitnessSync/backend/src/services/sync/__pycache__/health.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/sync/__pycache__/health.cpython-313.pyc b/FitnessSync/backend/src/services/sync/__pycache__/health.cpython-313.pyc deleted file mode 100644 index aad0af7..0000000 Binary files a/FitnessSync/backend/src/services/sync/__pycache__/health.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/sync/__pycache__/utils.cpython-311.pyc b/FitnessSync/backend/src/services/sync/__pycache__/utils.cpython-311.pyc deleted file mode 100644 index 3410252..0000000 Binary files a/FitnessSync/backend/src/services/sync/__pycache__/utils.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/sync/__pycache__/utils.cpython-313.pyc b/FitnessSync/backend/src/services/sync/__pycache__/utils.cpython-313.pyc deleted file mode 100644 index 14af451..0000000 Binary files a/FitnessSync/backend/src/services/sync/__pycache__/utils.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/sync/__pycache__/weight.cpython-311.pyc b/FitnessSync/backend/src/services/sync/__pycache__/weight.cpython-311.pyc deleted file mode 100644 index 90864ae..0000000 Binary files a/FitnessSync/backend/src/services/sync/__pycache__/weight.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/sync/__pycache__/weight.cpython-313.pyc b/FitnessSync/backend/src/services/sync/__pycache__/weight.cpython-313.pyc deleted file mode 100644 index f723f6b..0000000 Binary files a/FitnessSync/backend/src/services/sync/__pycache__/weight.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/services/sync/activity.py b/FitnessSync/backend/src/services/sync/activity.py index b779165..454b33d 100644 --- a/FitnessSync/backend/src/services/sync/activity.py +++ b/FitnessSync/backend/src/services/sync/activity.py @@ -504,6 +504,17 @@ class GarminActivitySync: else: stream = ActivityStream(activity_id=activity.id, **data) self.db_session.add(stream) + + # Backfill Activity start coordinates if missing + if activity.start_lat is None or activity.start_lng is None: + lats = data.get('latitude', []) + lons = data.get('longitude', []) + for lat, lon in zip(lats, lons): + if lat is not None and lon is not None: + activity.start_lat = lat + activity.start_lng = lon + self.logger.info(f"Backfilled start coordinates for activity {activity.id}: ({lat}, {lon})") + break self.logger.info(f"Saved streams for Activity {activity.id}") diff --git a/FitnessSync/backend/src/tasks/__pycache__/definitions.cpython-311.pyc b/FitnessSync/backend/src/tasks/__pycache__/definitions.cpython-311.pyc deleted file mode 100644 index 800575f..0000000 Binary files a/FitnessSync/backend/src/tasks/__pycache__/definitions.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/tasks/definitions.py b/FitnessSync/backend/src/tasks/definitions.py index 29a3557..ad0f47a 100644 --- a/FitnessSync/backend/src/tasks/definitions.py +++ b/FitnessSync/backend/src/tasks/definitions.py @@ -234,7 +234,7 @@ def run_bike_matching_job(job_id: str, db_session_factory): job_manager.update_job(job_id, status="running", progress=0, message="Starting bike matching...") - run_matching_for_all(db) + run_matching_for_all(db, job_id=job_id) job_manager.complete_job(job_id, result={"status": "completed"}) diff --git a/FitnessSync/backend/src/utils/__pycache__/__init__.cpython-311.pyc b/FitnessSync/backend/src/utils/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index 38ae350..0000000 Binary files a/FitnessSync/backend/src/utils/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/utils/__pycache__/__init__.cpython-313.pyc b/FitnessSync/backend/src/utils/__pycache__/__init__.cpython-313.pyc deleted file mode 100644 index 01398ff..0000000 Binary files a/FitnessSync/backend/src/utils/__pycache__/__init__.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/utils/__pycache__/config.cpython-311.pyc b/FitnessSync/backend/src/utils/__pycache__/config.cpython-311.pyc deleted file mode 100644 index 7e59532..0000000 Binary files a/FitnessSync/backend/src/utils/__pycache__/config.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/utils/__pycache__/config.cpython-313.pyc b/FitnessSync/backend/src/utils/__pycache__/config.cpython-313.pyc deleted file mode 100644 index e14de8d..0000000 Binary files a/FitnessSync/backend/src/utils/__pycache__/config.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/utils/__pycache__/geo.cpython-311.pyc b/FitnessSync/backend/src/utils/__pycache__/geo.cpython-311.pyc deleted file mode 100644 index dab611d..0000000 Binary files a/FitnessSync/backend/src/utils/__pycache__/geo.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/utils/__pycache__/geo.cpython-313.pyc b/FitnessSync/backend/src/utils/__pycache__/geo.cpython-313.pyc deleted file mode 100644 index 795b570..0000000 Binary files a/FitnessSync/backend/src/utils/__pycache__/geo.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/utils/__pycache__/helpers.cpython-311.pyc b/FitnessSync/backend/src/utils/__pycache__/helpers.cpython-311.pyc deleted file mode 100644 index 37eb9c3..0000000 Binary files a/FitnessSync/backend/src/utils/__pycache__/helpers.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/utils/__pycache__/helpers.cpython-313.pyc b/FitnessSync/backend/src/utils/__pycache__/helpers.cpython-313.pyc deleted file mode 100644 index b9357a5..0000000 Binary files a/FitnessSync/backend/src/utils/__pycache__/helpers.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/utils/__pycache__/logging_config.cpython-311.pyc b/FitnessSync/backend/src/utils/__pycache__/logging_config.cpython-311.pyc deleted file mode 100644 index 993b648..0000000 Binary files a/FitnessSync/backend/src/utils/__pycache__/logging_config.cpython-311.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/utils/__pycache__/logging_config.cpython-313.pyc b/FitnessSync/backend/src/utils/__pycache__/logging_config.cpython-313.pyc deleted file mode 100644 index b91bfc2..0000000 Binary files a/FitnessSync/backend/src/utils/__pycache__/logging_config.cpython-313.pyc and /dev/null differ diff --git a/FitnessSync/backend/src/utils/sampling.py b/FitnessSync/backend/src/utils/sampling.py new file mode 100644 index 0000000..adad009 --- /dev/null +++ b/FitnessSync/backend/src/utils/sampling.py @@ -0,0 +1,48 @@ +from typing import Dict, List, Any, Optional + +def downsample_streams(streams: Dict[str, List[Any]], target_points: int = 1500) -> Dict[str, List[Any]]: + """ + Downsample a dictionary of aligned data streams to a target number of points. + Uses simple decimation (N-th point sampling) to ensure alignment across all streams. + + Args: + streams: Dictionary where keys are stream names and values are lists of data. + All lists must be of the same length (or handled gracefully if not). + We use the 'time' stream (or the first found) to determine length. + target_points: Number of points to reduce to. + + Returns: + New Dictionary with downsampled lists. + """ + if not streams: + return {} + + # Find a reference key to determine length (prefer 'time') + ref_key = 'time' if 'time' in streams else next(iter(streams)) + count = len(streams[ref_key]) + + if count <= target_points: + return streams + + step = count / target_points + indices = [int(i * step) for i in range(target_points)] + + # Ensure last point included for completeness + if indices[-1] != count - 1: + indices[-1] = count - 1 + + sampled_streams = {k: [] for k in streams} + + for idx in indices: + for k in streams: + # Safety check if some streams are shorter/missing + if streams[k] and idx < len(streams[k]): + sampled_streams[k].append(streams[k][idx]) + else: + # Should we append None? Or skip? + # If we skip, streams become misaligned. Appending None is safer. + # But if the whole stream was empty/None, maybe acceptable. + if streams[k] is not None: # List exists but index out of bounds? + sampled_streams[k].append(None) + + return sampled_streams diff --git a/FitnessSync/backend/templates/discovery.html b/FitnessSync/backend/templates/discovery.html index 5d3a662..dbf4cc0 100644 --- a/FitnessSync/backend/templates/discovery.html +++ b/FitnessSync/backend/templates/discovery.html @@ -63,95 +63,99 @@ -