From a23fa1b30d9f52591b6edd8313ba090914b87aa2 Mon Sep 17 00:00:00 2001 From: sstent Date: Tue, 23 Dec 2025 06:09:34 -0800 Subject: [PATCH] before claude fix #1 --- FitnessSync/Dockerfile | 14 + FitnessSync/README.md | 178 +++ FitnessSync/backend/Dockerfile | 12 + FitnessSync/backend/README.md | 178 +++ .../backend/__pycache__/main.cpython-313.pyc | Bin 0 -> 2673 bytes FitnessSync/backend/alembic.ini | 116 ++ FitnessSync/backend/alembic/README | 1 + .../alembic/__pycache__/env.cpython-313.pyc | Bin 0 -> 2791 bytes FitnessSync/backend/alembic/env.py | 80 ++ FitnessSync/backend/alembic/script.py.mako | 26 + .../24df1381ac00_initial_migration.py | 151 +++ ...1381ac00_initial_migration.cpython-313.pyc | Bin 0 -> 11349 bytes ...ssion_fields_to_api_tokens.cpython-313.pyc | Bin 0 -> 1830 bytes ...42_add_mfa_session_fields_to_api_tokens.py | 32 + FitnessSync/backend/docker-compose.yml | 31 + FitnessSync/backend/docs/setup.md | 162 +++ FitnessSync/backend/init_db.py | 21 + .../logs/src.services.garmin_client.log | 0 FitnessSync/backend/main.py | 61 + FitnessSync/backend/pyproject.toml | 34 + FitnessSync/backend/requirements.txt | 17 + FitnessSync/backend/src/__init__.py | 0 .../src/__pycache__/__init__.cpython-313.pyc | Bin 0 -> 164 bytes FitnessSync/backend/src/api/__init__.py | 0 .../api/__pycache__/__init__.cpython-313.pyc | Bin 0 -> 168 bytes .../__pycache__/activities.cpython-313.pyc | Bin 0 -> 2516 bytes .../src/api/__pycache__/logs.cpython-313.pyc | Bin 0 -> 1228 bytes .../api/__pycache__/metrics.cpython-313.pyc | Bin 0 -> 4682 bytes .../src/api/__pycache__/setup.cpython-313.pyc | Bin 0 -> 8331 bytes .../api/__pycache__/status.cpython-313.pyc | Bin 0 -> 1581 bytes .../src/api/__pycache__/sync.cpython-313.pyc | Bin 0 -> 2607 bytes FitnessSync/backend/src/api/activities.py | 44 + FitnessSync/backend/src/api/logs.py | 24 + FitnessSync/backend/src/api/metrics.py | 98 ++ FitnessSync/backend/src/api/setup.py | 138 ++ FitnessSync/backend/src/api/status.py | 36 + FitnessSync/backend/src/api/sync.py | 51 + FitnessSync/backend/src/models/__init__.py | 13 + .../__pycache__/__init__.cpython-313.pyc | Bin 0 -> 598 bytes .../__pycache__/activity.cpython-313.pyc | Bin 0 -> 1388 bytes .../__pycache__/api_token.cpython-313.pyc | Bin 0 -> 1410 bytes .../__pycache__/auth_status.cpython-313.pyc | Bin 0 -> 1257 bytes .../models/__pycache__/config.cpython-313.pyc | Bin 0 -> 1222 bytes .../__pycache__/health_metric.cpython-313.pyc | Bin 0 -> 1252 bytes .../__pycache__/sync_log.cpython-313.pyc | Bin 0 -> 1280 bytes .../__pycache__/weight_record.cpython-313.pyc | Bin 0 -> 1265 bytes FitnessSync/backend/src/models/activity.py | 19 + FitnessSync/backend/src/models/api_token.py | 23 + FitnessSync/backend/src/models/auth_status.py | 17 + FitnessSync/backend/src/models/config.py | 16 + .../backend/src/models/health_metric.py | 17 + FitnessSync/backend/src/models/sync_log.py | 18 + .../backend/src/models/weight_record.py | 17 + FitnessSync/backend/src/services/__init__.py | 0 .../__pycache__/__init__.cpython-313.pyc | Bin 0 -> 173 bytes .../__pycache__/garmin_client.cpython-313.pyc | Bin 0 -> 34367 bytes .../postgresql_manager.cpython-313.pyc | Bin 0 -> 2524 bytes .../backend/src/services/fitbit_client.py | 74 + .../backend/src/services/garmin/__init__.py | 0 .../backend/src/services/garmin/auth.py | 217 +++ .../backend/src/services/garmin/client.py | 39 + .../backend/src/services/garmin/data.py | 139 ++ .../src/services/postgresql_manager.py | 47 + FitnessSync/backend/src/services/sync_app.py | 322 +++++ FitnessSync/backend/src/utils/__init__.py | 0 .../__pycache__/__init__.cpython-313.pyc | Bin 0 -> 170 bytes .../utils/__pycache__/config.cpython-313.pyc | Bin 0 -> 2143 bytes .../utils/__pycache__/helpers.cpython-313.pyc | Bin 0 -> 2325 bytes FitnessSync/backend/src/utils/config.py | 37 + FitnessSync/backend/src/utils/helpers.py | 36 + FitnessSync/backend/templates/index.html | 308 +++++ FitnessSync/backend/templates/setup.html | 335 +++++ FitnessSync/docker-compose.yml | 30 + FitnessSync/fitbitsync.txt | 1200 +++++++++++++++++ FitnessSync/requirements.txt | 17 + .../checklists/requirements.md | 34 + .../contracts/api-contract.yaml | 461 +++++++ .../001-fitbit-garmin-sync/data-model.md | 117 ++ .../specs/001-fitbit-garmin-sync/plan.md | 103 ++ .../001-fitbit-garmin-sync/quickstart.md | 102 ++ .../specs/001-fitbit-garmin-sync/research.md | 58 + .../specs/001-fitbit-garmin-sync/spec.md | 105 ++ .../specs/001-fitbit-garmin-sync/tasks.md | 256 ++++ 83 files changed, 5682 insertions(+) create mode 100644 FitnessSync/Dockerfile create mode 100644 FitnessSync/README.md create mode 100644 FitnessSync/backend/Dockerfile create mode 100644 FitnessSync/backend/README.md create mode 100644 FitnessSync/backend/__pycache__/main.cpython-313.pyc create mode 100644 FitnessSync/backend/alembic.ini create mode 100644 FitnessSync/backend/alembic/README create mode 100644 FitnessSync/backend/alembic/__pycache__/env.cpython-313.pyc create mode 100644 FitnessSync/backend/alembic/env.py create mode 100644 FitnessSync/backend/alembic/script.py.mako create mode 100644 FitnessSync/backend/alembic/versions/24df1381ac00_initial_migration.py create mode 100644 FitnessSync/backend/alembic/versions/__pycache__/24df1381ac00_initial_migration.cpython-313.pyc create mode 100644 FitnessSync/backend/alembic/versions/__pycache__/ce0f0282a142_add_mfa_session_fields_to_api_tokens.cpython-313.pyc create mode 100644 FitnessSync/backend/alembic/versions/ce0f0282a142_add_mfa_session_fields_to_api_tokens.py create mode 100644 FitnessSync/backend/docker-compose.yml create mode 100644 FitnessSync/backend/docs/setup.md create mode 100644 FitnessSync/backend/init_db.py create mode 100644 FitnessSync/backend/logs/src.services.garmin_client.log create mode 100644 FitnessSync/backend/main.py create mode 100644 FitnessSync/backend/pyproject.toml create mode 100644 FitnessSync/backend/requirements.txt create mode 100644 FitnessSync/backend/src/__init__.py create mode 100644 FitnessSync/backend/src/__pycache__/__init__.cpython-313.pyc create mode 100644 FitnessSync/backend/src/api/__init__.py create mode 100644 FitnessSync/backend/src/api/__pycache__/__init__.cpython-313.pyc create mode 100644 FitnessSync/backend/src/api/__pycache__/activities.cpython-313.pyc create mode 100644 FitnessSync/backend/src/api/__pycache__/logs.cpython-313.pyc create mode 100644 FitnessSync/backend/src/api/__pycache__/metrics.cpython-313.pyc create mode 100644 FitnessSync/backend/src/api/__pycache__/setup.cpython-313.pyc create mode 100644 FitnessSync/backend/src/api/__pycache__/status.cpython-313.pyc create mode 100644 FitnessSync/backend/src/api/__pycache__/sync.cpython-313.pyc create mode 100644 FitnessSync/backend/src/api/activities.py create mode 100644 FitnessSync/backend/src/api/logs.py create mode 100644 FitnessSync/backend/src/api/metrics.py create mode 100644 FitnessSync/backend/src/api/setup.py create mode 100644 FitnessSync/backend/src/api/status.py create mode 100644 FitnessSync/backend/src/api/sync.py create mode 100644 FitnessSync/backend/src/models/__init__.py create mode 100644 FitnessSync/backend/src/models/__pycache__/__init__.cpython-313.pyc create mode 100644 FitnessSync/backend/src/models/__pycache__/activity.cpython-313.pyc create mode 100644 FitnessSync/backend/src/models/__pycache__/api_token.cpython-313.pyc create mode 100644 FitnessSync/backend/src/models/__pycache__/auth_status.cpython-313.pyc create mode 100644 FitnessSync/backend/src/models/__pycache__/config.cpython-313.pyc create mode 100644 FitnessSync/backend/src/models/__pycache__/health_metric.cpython-313.pyc create mode 100644 FitnessSync/backend/src/models/__pycache__/sync_log.cpython-313.pyc create mode 100644 FitnessSync/backend/src/models/__pycache__/weight_record.cpython-313.pyc create mode 100644 FitnessSync/backend/src/models/activity.py create mode 100644 FitnessSync/backend/src/models/api_token.py create mode 100644 FitnessSync/backend/src/models/auth_status.py create mode 100644 FitnessSync/backend/src/models/config.py create mode 100644 FitnessSync/backend/src/models/health_metric.py create mode 100644 FitnessSync/backend/src/models/sync_log.py create mode 100644 FitnessSync/backend/src/models/weight_record.py create mode 100644 FitnessSync/backend/src/services/__init__.py create mode 100644 FitnessSync/backend/src/services/__pycache__/__init__.cpython-313.pyc create mode 100644 FitnessSync/backend/src/services/__pycache__/garmin_client.cpython-313.pyc create mode 100644 FitnessSync/backend/src/services/__pycache__/postgresql_manager.cpython-313.pyc create mode 100644 FitnessSync/backend/src/services/fitbit_client.py create mode 100644 FitnessSync/backend/src/services/garmin/__init__.py create mode 100644 FitnessSync/backend/src/services/garmin/auth.py create mode 100644 FitnessSync/backend/src/services/garmin/client.py create mode 100644 FitnessSync/backend/src/services/garmin/data.py create mode 100644 FitnessSync/backend/src/services/postgresql_manager.py create mode 100644 FitnessSync/backend/src/services/sync_app.py create mode 100644 FitnessSync/backend/src/utils/__init__.py create mode 100644 FitnessSync/backend/src/utils/__pycache__/__init__.cpython-313.pyc create mode 100644 FitnessSync/backend/src/utils/__pycache__/config.cpython-313.pyc create mode 100644 FitnessSync/backend/src/utils/__pycache__/helpers.cpython-313.pyc create mode 100644 FitnessSync/backend/src/utils/config.py create mode 100644 FitnessSync/backend/src/utils/helpers.py create mode 100644 FitnessSync/backend/templates/index.html create mode 100644 FitnessSync/backend/templates/setup.html create mode 100644 FitnessSync/docker-compose.yml create mode 100644 FitnessSync/fitbitsync.txt create mode 100644 FitnessSync/requirements.txt create mode 100644 FitnessSync/specs/001-fitbit-garmin-sync/checklists/requirements.md create mode 100644 FitnessSync/specs/001-fitbit-garmin-sync/contracts/api-contract.yaml create mode 100644 FitnessSync/specs/001-fitbit-garmin-sync/data-model.md create mode 100644 FitnessSync/specs/001-fitbit-garmin-sync/plan.md create mode 100644 FitnessSync/specs/001-fitbit-garmin-sync/quickstart.md create mode 100644 FitnessSync/specs/001-fitbit-garmin-sync/research.md create mode 100644 FitnessSync/specs/001-fitbit-garmin-sync/spec.md create mode 100644 FitnessSync/specs/001-fitbit-garmin-sync/tasks.md diff --git a/FitnessSync/Dockerfile b/FitnessSync/Dockerfile new file mode 100644 index 0000000..94ffee6 --- /dev/null +++ b/FitnessSync/Dockerfile @@ -0,0 +1,14 @@ +FROM python:3.11-slim + +WORKDIR /app + +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +WORKDIR /app/backend + +EXPOSE 8000 + +CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"] \ No newline at end of file diff --git a/FitnessSync/README.md b/FitnessSync/README.md new file mode 100644 index 0000000..71e9cd1 --- /dev/null +++ b/FitnessSync/README.md @@ -0,0 +1,178 @@ +# Fitbit-Garmin Local Sync + +A standalone Python application designed to synchronize health and fitness data between the Fitbit and Garmin Connect platforms. The primary functions are to transfer weight data from Fitbit to Garmin, archive activity files from Garmin to a local directory, and download a wide range of Garmin health metrics for local storage and analysis. + +## Features + +- **Weight Data Synchronization**: Fetches weight history from Fitbit API and uploads to Garmin Connect +- **Activity File Archiving**: Downloads original activity files (.fit, .gpx, .tcx) from Garmin and stores them in the PostgreSQL database +- **Health Metrics Download**: Retrieves comprehensive health metrics from Garmin Connect +- **Web Interface**: Simple browser-based UI for triggering sync operations +- **Local-Only Storage**: All sensitive data stored locally with no external cloud services + +## Prerequisites + +- Python 3.11+ +- PostgreSQL database +- Docker and Docker Compose (for containerized deployment) +- Fitbit Developer Account (to create an app and get API credentials) +- Garmin Connect Account + +## Setup + +### 1. Clone and Install Dependencies + +```bash +# Clone the repository +git clone +cd fitbit-garmin-sync + +# Create virtual environment +python -m venv venv +source venv/bin/activate # On Windows: venv\Scripts\activate + +# Install dependencies +pip install -r requirements.txt +``` + +### 2. Database Setup + +```bash +# Create PostgreSQL database +createdb fitbit_garmin_sync + +# Update database configuration in application +# The application will handle schema creation automatically +``` + +### 3. Environment Configuration + +Create a `.env` file with the following: + +```env +DATABASE_URL=postgresql://username:password@localhost:5432/fitbit_garmin_sync +FITBIT_CLIENT_ID=your_fitbit_client_id +FITBIT_CLIENT_SECRET=your_fitbit_client_secret +FITBIT_REDIRECT_URI=http://localhost:8000/api/setup/fitbit/callback +``` + +### 4. Run the Application + +```bash +# Using uvicorn directly +uvicorn main:app --host 0.0.0.0 --port 8000 + +# Or using Docker +docker-compose up --build +``` + +## Initial Configuration + +1. Open the application in your browser at `http://localhost:8000` +2. Navigate to the Setup page (`/setup`) +3. Enter your Garmin Connect username and password +4. Enter your Fitbit Client ID and Client Secret +5. Click the authorization link provided to authenticate with Fitbit +6. Copy the full callback URL from your browser after authorizing and paste it into the input field on the setup page + +## Usage + +### Sync Weight Data + +1. Go to the home page (`/`) +2. Click the "Sync Weight" button +3. Monitor the sync status in the logs table + +### Archive Activities + +1. Go to the home page (`/`) +2. Click the "Sync Activities" button +3. Enter the number of days back to look for activities +4. Monitor the sync status in the logs table + +### View Health Metrics + +1. Use the API endpoints to query health metrics: + - `/api/metrics/list` - List available metric types + - `/api/metrics/query` - Query specific metrics + - `/api/health-data/summary` - Get aggregated health statistics + +## API Endpoints + +See the full API documentation in the `specs/001-fitbit-garmin-sync/contracts/api-contract.yaml` file or access the automatic documentation at `/docs` when running the application. + +## Docker Deployment + +```bash +# Build and run with Docker Compose +docker-compose up --build + +# The application will be available at http://localhost:8000 +# PostgreSQL database will be automatically set up +``` + +## Architecture + +### Project Structure +``` +backend/ +├── main.py +├── src/ +│ ├── models/ +│ │ ├── __init__.py +│ │ ├── config.py +│ │ ├── weight_record.py +│ │ ├── activity.py +│ │ ├── health_metric.py +│ │ ├── sync_log.py +│ │ └── api_token.py +│ ├── services/ +│ │ ├── __init__.py +│ │ ├── fitbit_client.py +│ │ ├── garmin_client.py +│ │ ├── postgresql_manager.py +│ │ └── sync_app.py +│ ├── api/ +│ │ ├── __init__.py +│ │ ├── auth.py +│ │ ├── sync.py +│ │ ├── setup.py +│ │ └── metrics.py +│ └── utils/ +│ ├── __init__.py +│ └── helpers.py +├── templates/ +│ ├── index.html +│ └── setup.html +├── static/ +│ ├── css/ +│ └── js/ +├── requirements.txt +├── Dockerfile +└── docker-compose.yml + +tests/ +├── unit/ +│ ├── test_models/ +│ ├── test_services/ +│ └── test_api/ +├── integration/ +│ └── test_sync_flow.py +└── contract/ + └── test_api_contracts.py +``` + +## Development + +For development, run the application with auto-reload: + +```bash +uvicorn main:app --reload --host 0.0.0.0 --port 8000 +``` + +## Security + +- All API credentials and tokens are encrypted in the database +- OAuth2 flows are implemented following security best practices +- No external cloud services are used for data storage +- Implements rate limiting to be respectful to API providers \ No newline at end of file diff --git a/FitnessSync/backend/Dockerfile b/FitnessSync/backend/Dockerfile new file mode 100644 index 0000000..89f7120 --- /dev/null +++ b/FitnessSync/backend/Dockerfile @@ -0,0 +1,12 @@ +FROM python:3.11-slim + +WORKDIR /app + +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +EXPOSE 8000 + +CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"] \ No newline at end of file diff --git a/FitnessSync/backend/README.md b/FitnessSync/backend/README.md new file mode 100644 index 0000000..c5c896c --- /dev/null +++ b/FitnessSync/backend/README.md @@ -0,0 +1,178 @@ +# Fitbit-Garmin Local Sync + +A standalone Python application designed to synchronize health and fitness data between the Fitbit and Garmin Connect platforms. The primary functions are to transfer weight data from Fitbit to Garmin, archive activity files from Garmin to a local directory, and download a wide range of Garmin health metrics for local storage and analysis. + +## Features + +- **Weight Data Synchronization**: Fetches weight history from Fitbit API and uploads to Garmin Connect +- **Activity File Archiving**: Downloads original activity files (.fit, .gpx, .tcx) from Garmin to local storage +- **Health Metrics Download**: Retrieves comprehensive health metrics from Garmin Connect +- **Web Interface**: Simple browser-based UI for triggering sync operations +- **Local-Only Storage**: All sensitive data stored locally with no external cloud services + +## Prerequisites + +- Python 3.11+ +- PostgreSQL database +- Docker and Docker Compose (for containerized deployment) +- Fitbit Developer Account (to create an app and get API credentials) +- Garmin Connect Account + +## Setup + +### 1. Clone and Install Dependencies + +```bash +# Clone the repository +git clone +cd fitbit-garmin-sync + +# Create virtual environment +python -m venv venv +source venv/bin/activate # On Windows: venv\Scripts\activate + +# Install dependencies +pip install -r requirements.txt +``` + +### 2. Database Setup + +```bash +# Create PostgreSQL database +createdb fitbit_garmin_sync + +# Update database configuration in application +# The application will handle schema creation automatically +``` + +### 3. Environment Configuration + +Create a `.env` file with the following: + +```env +DATABASE_URL=postgresql://username:password@localhost:5432/fitbit_garmin_sync +FITBIT_CLIENT_ID=your_fitbit_client_id +FITBIT_CLIENT_SECRET=your_fitbit_client_secret +FITBIT_REDIRECT_URI=http://localhost:8000/api/setup/fitbit/callback +``` + +### 4. Run the Application + +```bash +# Using uvicorn directly +uvicorn main:app --host 0.0.0.0 --port 8000 + +# Or using Docker +docker-compose up --build +``` + +## Initial Configuration + +1. Open the application in your browser at `http://localhost:8000` +2. Navigate to the Setup page (`/setup`) +3. Enter your Garmin Connect username and password +4. Enter your Fitbit Client ID and Client Secret +5. Click the authorization link provided to authenticate with Fitbit +6. Copy the full callback URL from your browser after authorizing and paste it into the input field on the setup page + +## Usage + +### Sync Weight Data + +1. Go to the home page (`/`) +2. Click the "Sync Weight" button +3. Monitor the sync status in the logs table + +### Archive Activities + +1. Go to the home page (`/`) +2. Click the "Sync Activities" button +3. Enter the number of days back to look for activities +4. Monitor the sync status in the logs table + +### View Health Metrics + +1. Use the API endpoints to query health metrics: + - `/api/metrics/list` - List available metric types + - `/api/metrics/query` - Query specific metrics + - `/api/health-data/summary` - Get aggregated health statistics + +## API Endpoints + +See the full API documentation in the `specs/001-fitbit-garmin-sync/contracts/api-contract.yaml` file or access the automatic documentation at `/docs` when running the application. + +## Docker Deployment + +```bash +# Build and run with Docker Compose +docker-compose up --build + +# The application will be available at http://localhost:8000 +# PostgreSQL database will be automatically set up +``` + +## Architecture + +### Project Structure +``` +backend/ +├── main.py +├── src/ +│ ├── models/ +│ │ ├── __init__.py +│ │ ├── config.py +│ │ ├── weight_record.py +│ │ ├── activity.py +│ │ ├── health_metric.py +│ │ ├── sync_log.py +│ │ └── api_token.py +│ ├── services/ +│ │ ├── __init__.py +│ │ ├── fitbit_client.py +│ │ ├── garmin_client.py +│ │ ├── postgresql_manager.py +│ │ └── sync_app.py +│ ├── api/ +│ │ ├── __init__.py +│ │ ├── auth.py +│ │ ├── sync.py +│ │ ├── setup.py +│ │ └── metrics.py +│ └── utils/ +│ ├── __init__.py +│ └── helpers.py +├── templates/ +│ ├── index.html +│ └── setup.html +├── static/ +│ ├── css/ +│ └── js/ +├── requirements.txt +├── Dockerfile +└── docker-compose.yml + +tests/ +├── unit/ +│ ├── test_models/ +│ ├── test_services/ +│ └── test_api/ +├── integration/ +│ └── test_sync_flow.py +└── contract/ + └── test_api_contracts.py +``` + +## Development + +For development, run the application with auto-reload: + +```bash +uvicorn main:app --reload --host 0.0.0.0 --port 8000 +``` + +## Security + +- All API credentials and tokens are encrypted in the database +- OAuth2 flows are implemented following security best practices +- No external cloud services are used for data storage +- Implements rate limiting to be respectful to API providers \ No newline at end of file diff --git a/FitnessSync/backend/__pycache__/main.cpython-313.pyc b/FitnessSync/backend/__pycache__/main.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..95b35f1f4f64f5503fce58f266ccb054056aa854 GIT binary patch literal 2673 zcmb_cOKcNY6n*o_jQ@^p@(;uf3Dl+7X-EVKkP?1sYzovvT2--XO~zwqU_4{qjMF5m z01H^sN~l?8ne140*>x9HtRMwVNsH97U>9V%VAuO*JaK}Gvgt^3-+A|*_qo4U4+eb* zzF&WvQrbNT{Y573$2UNIf=1{G5|BV;P@2M2n#OdT%;^l1W-)7HCexDUFlS>n!>2vi zW8;>LH|@hd8*>?dI)DQ<<}<-`2#0L!$%Jt@=MlU)YJ?Vi-RL$u+Bh7y=BR#@YZd%s z6prK~*+KVu%~1&CxPBxAh0quUGCan_QH}*oE79N>+%%Foly1ZA_Gyt!2jI>I?gHH1 zz&(I_8~8BbBMsaKI1YP8vX`2tx+vE!M1|NGMC1eZ&I9b-2iSWLupc?V9(U~!heMD- zJ9}`~E3OC4QD_^boSD#`qaha^`*Lyg1FJgUTh+C1)j-LU=zdMMP0$v9RW!`Wn^P8_ zmG4$%!%R?CIIU>6#o=3WxuS}uY=Fuyn4+mjR~6Ns#zbRLlO$a;3_| z4YP!0;j3$3?kc%Ux>i(5VCE&g4CKOfixDdoso|OJ9}AL!vabID;3$eieb4OL1a+2 zQ_(d{;$iXCN(qYv8Iw3!JlxpK7iJSI_LI5mNnR?JFo}cZE6lhf}x-(3WCL4wT4y0y5BWr~exk|FKXn9nnC>s?~!=11n zIo8O5c#W3PTMvqLtsLHr9$(J9h_(GR_T$*fcN-`5%~)mm+H*F%#rDE{*3+jEr+5%gM*T+FLngNjWWf>PWM}ZRPX*5K+Wjf##8gqTgr)ULvAvtH3RUC%} z>oo3#!D@AzI}3r;HA7yfY`a04E{;10$BUtE@>thROdk2u@YfH6AaVcOe`w1;So06A zs!zZDh2I$YV$(mh!A`xa6VL8<^-xP(3d*Uvzs>(o&+Ofsp5cBt7!QEh>x0i`JxQ)pH89DI6?7($Hd1+3V3z}pW%1eqI)b6j zxQaIehz~tpsB!US251!f(0kAODDlJ0+L>D0$j@h%y)|_C4Tplgt9NRF;breXKssfg zGGxnG-L F{{ z=3{3YizrfoVw6ObX1PrvR@+m)CirO=WsT00& z=yl%=y4)}x>;q~%-_f*VR1VE|9@ew*f7n}D&XpH*gG^cGf?l2lwUd}--6VwtYE=rF zX9lZKwJZakQLs4Vj^oC!Ox&25oKUV7ZstFGm9W1sR6|=Nm6g#NWsbt6Y7?z>w^2}d z_&_xxs@V=! zF_c|a%@Q^(&`f;Q>)HsoTeb3H(}E$Is%fO^s6~Uwi-Z!a+W0mxj8TjySFn9cuU2)l z>?QJ}V2u)Qhw^QLOO`q8V6se1Y!Xs}%vUBSu3y8Bg^LhsS=9{|8FoIU8V2mw3bs2+ z?hvi!kh59rr6m>J@v@Y47R`33m)2r7+199Db+A=)sx=3KQP$EMu3S~ATCtrKgScT+ zt&mbyaCtalZqiX{nK*?CanzFPsBW8~JA|r6p{ScB+l`lW)gYQvu&Rzdn~k~=zo>4~ zdrQ?#wgPRS>`=MVLCI-?=^aG^Feo@wHEmU6er|%+%tEt!S&p`WrwwgI*qzKRS{0JB zZHJgn?mD%WAh(_SM0ajbRl9Y8NhYyvWyRETMX(UFl!Facin^8~=JIHD#qBwqpwKuP z^dw|B!%}??(<<6Og<>6b8E?pVz5nee@`cr@T_IK%x;KUHop^Hh)bLJEYNsdj=+pJ? zk>^r49NAAGxuY&+Hl@tAG_Wfs>tcFSOmB(!C6DC6x-{^+H1Nu1=YCD>nHc8RhUJON zQ4REiaRA?Y@WB9I_SxQ;Yw#MJ?r>mhK3i6g0q+MvG8)c`;6=!RPaUFtsR}kBghg;gk;;^1m);!t`6J(4vGBReOyZIoSShD{}wRt!km z?-b)Rui9=>w*!_duor$pFJKM%Z4y5KQ_UbB(J`cdRyqFIoR0$;^}fj literal 0 HcmV?d00001 diff --git a/FitnessSync/backend/alembic/env.py b/FitnessSync/backend/alembic/env.py new file mode 100644 index 0000000..11296c2 --- /dev/null +++ b/FitnessSync/backend/alembic/env.py @@ -0,0 +1,80 @@ +from logging.config import fileConfig +import os +from sqlalchemy import engine_from_config +from sqlalchemy import pool +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# Set the database URL from environment variable if available +database_url = os.getenv("DATABASE_URL") +if database_url: + config.set_main_option("sqlalchemy.url", database_url) + +# Import our models to ensure they are registered with Base +from src.models import Base +target_metadata = Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/FitnessSync/backend/alembic/script.py.mako b/FitnessSync/backend/alembic/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/FitnessSync/backend/alembic/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/FitnessSync/backend/alembic/versions/24df1381ac00_initial_migration.py b/FitnessSync/backend/alembic/versions/24df1381ac00_initial_migration.py new file mode 100644 index 0000000..817ca9d --- /dev/null +++ b/FitnessSync/backend/alembic/versions/24df1381ac00_initial_migration.py @@ -0,0 +1,151 @@ +"""Initial migration + +Revision ID: 24df1381ac00 +Revises: +Create Date: 2025-12-22 15:04:54.280508 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '24df1381ac00' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + # Create configurations table + op.create_table('configurations', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('key', sa.String(), nullable=False), + sa.Column('value', sa.String(), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('key') + ) + op.create_index(op.f('ix_configurations_id'), 'configurations', ['id'], unique=False) + + # Create api_tokens table + op.create_table('api_tokens', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('token_type', sa.String(), nullable=False), + sa.Column('access_token', sa.String(), nullable=False), + sa.Column('refresh_token', sa.String(), nullable=True), + sa.Column('expires_at', sa.DateTime(), nullable=False), + sa.Column('scopes', sa.String(), nullable=True), + sa.Column('garth_oauth1_token', sa.String(), nullable=True), + sa.Column('garth_oauth2_token', sa.String(), nullable=True), + sa.Column('last_used', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_api_tokens_id'), 'api_tokens', ['id'], unique=False) + + # Create auth_status table + op.create_table('auth_status', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('service', sa.String(), nullable=False), + sa.Column('is_authenticated', sa.Boolean(), nullable=False), + sa.Column('last_sync', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('service') + ) + op.create_index(op.f('ix_auth_status_id'), 'auth_status', ['id'], unique=False) + + # Create weight_records table + op.create_table('weight_records', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('date', sa.Date(), nullable=False), + sa.Column('weight', sa.Float(), nullable=False), + sa.Column('bmi', sa.Float(), nullable=True), + sa.Column('body_fat', sa.Float(), nullable=True), + sa.Column('source', sa.String(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_weight_records_id'), 'weight_records', ['id'], unique=False) + + # Create activities table + op.create_table('activities', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('activity_name', sa.String(), nullable=False), + sa.Column('start_time', sa.DateTime(), nullable=False), + sa.Column('end_time', sa.DateTime(), nullable=True), + sa.Column('duration', sa.Integer(), nullable=True), + sa.Column('calories', sa.Integer(), nullable=True), + sa.Column('distance', sa.Float(), nullable=True), + sa.Column('source', sa.String(), nullable=False), + sa.Column('activity_data', sa.JSON(), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_activities_id'), 'activities', ['id'], unique=False) + + # Create health_metrics table + op.create_table('health_metrics', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('date', sa.Date(), nullable=False), + sa.Column('metric_type', sa.String(), nullable=False), + sa.Column('value', sa.Float(), nullable=False), + sa.Column('unit', sa.String(), nullable=True), + sa.Column('source', sa.String(), nullable=False), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_health_metrics_id'), 'health_metrics', ['id'], unique=False) + + # Create sync_logs table + op.create_table('sync_logs', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('sync_type', sa.String(), nullable=False), + sa.Column('source', sa.String(), nullable=False), + sa.Column('destination', sa.String(), nullable=False), + sa.Column('start_time', sa.DateTime(), nullable=False), + sa.Column('end_time', sa.DateTime(), nullable=True), + sa.Column('status', sa.String(), nullable=False), + sa.Column('records_synced', sa.Integer(), nullable=True), + sa.Column('error_message', sa.Text(), nullable=True), + sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), + sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_sync_logs_id'), 'sync_logs', ['id'], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_sync_logs_id'), table_name='sync_logs') + op.drop_table('sync_logs') + + op.drop_index(op.f('ix_health_metrics_id'), table_name='health_metrics') + op.drop_table('health_metrics') + + op.drop_index(op.f('ix_activities_id'), table_name='activities') + op.drop_table('activities') + + op.drop_index(op.f('ix_weight_records_id'), table_name='weight_records') + op.drop_table('weight_records') + + op.drop_index(op.f('ix_auth_status_id'), table_name='auth_status') + op.drop_table('auth_status') + + op.drop_index(op.f('ix_api_tokens_id'), table_name='api_tokens') + op.drop_table('api_tokens') + + op.drop_index(op.f('ix_configurations_id'), table_name='configurations') + op.drop_table('configurations') + # ### end Alembic commands ### \ No newline at end of file diff --git a/FitnessSync/backend/alembic/versions/__pycache__/24df1381ac00_initial_migration.cpython-313.pyc b/FitnessSync/backend/alembic/versions/__pycache__/24df1381ac00_initial_migration.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2fadd2154c801e80d1e16f38e5f84ba6fe8336ae GIT binary patch literal 11349 zcmeHNT}&I<6&~ATdu%WeFC-yJgO|U={Nvz&$#%OUS+qd@HbbOMgESrN0S3$rduN=4 zq><8n+10+3U2U~n8;J;zpg&SA^@YbgMpf0?IMqhC`_}D4-&PG()wiB&dxpXCk|CO= zt7u1JxO48g-?{hv&0L@R%;)n`aQ*&IBjSIKQPkhagLcuy#OhVFd3KD^qfX?^x9Kq#aw<>6L^& zVW032I`lh={Ym=~uJuZptd z^J4? z=^G69b%h3c!aW0C?>}I+RM3tYR(L-lNKpYhuS)_Qr$-L5eAyEJcX`?W%=_1OA?UeUzBtrbef(+4ZL0Xas}Y)C@;m zbLky=132sk4o2_L8;w-B*k&Cp9dQ`A%i}o>9C>%rq4JFbFTV6|6dt;43q7~kF$YMOdX}nUu zD6hZQ40vU|?10f+YrxxIf9B>_XEEMki}8*a@b=fAx%Iwc*(U5O2JMu8LmtyO z!=8dIEb#6hXqasMfxP>qVYV;FyBiD=;iJFwI(a6`s0pCEco5nMjksB?> zYck+jksV&OH%3t>7Q|Gu9#ba|6jLn*3%#6}YBfl-!Y57{@b=H}=H5W3P2-u1sh|OG z|2?ERyf%yR+6{R7=OlCUvpVKG4D&m=Z_YF~zgI1XSohW0sppWE1Lcq|gN0sB4zc>Z z+-+Ea6*#f-+Nl^a^(tM0h3wTuM}45ESg6=8-zX$$Ubc=`HdZ zX`v#`6Xz9`ODIAN*TCF%#lyUC1sTmLDz7FK>?Ugyp$JtYf+Xk=Bvp)(h2a8-vM5FI zAp){Hil9{87R2cp6$(;O8O0P#6K!Bufr=fI^CD&@<=7%Om7l+yKp=!DYVSA#<>jNQ zcnhj-utHz4y~s)YJZKK4iByi*1T#?IDz;)p(_$vd$7MtYGBFW&NjYs70;a?l6rc;p zPklz<1-(`k z^}>-MpiG~Dkc>d2qVUrKt_DTuU;rIFgh=%gR~CGcQ}b?WS9t6?FUJ$}5_XSBsxU1e z>|$Zo(llmBsd1brhE|0;Fy*UAoafQvn6P+WmJ}87qNL&)s4apE8Yr3wQHlw7ux$#v zhh#Y}@Dip8B6bePWiS~%%3isQ>2ZMm74?inGM?(5k>`bOMNz>?y00R6R*0%f_pqpr zBR)D8A}t9P$ikAlCxHS6>*nLa{G=G|h5#ZZRHfTk-sHqWv6a({tz8R?*qvB_(rZjW z;~=#T{*;g16!n1GaZyh9hgUwhlBsR}it+y3^RZ{CdD*_=O?p$6n~5wF)~GGZ{+0S< zed_og>Z#?>N+cOcolDndnX?*i$GUx^VznYYo(&BCG4$vDKlEqVALqtyW~OGd%v?FH zN8?#$6gs|S8n&5+hjfl<$v++6W{!XHc8+P>sb-w69e;7QpACI};nNG5`tDq9PbM_< zX#CM|hJA0FzM-#Y>7pj4{Bp5AW&ap_P&3NNOzIoMrlp2pNCtZ|%SH{Mz%Iimh0dIln)7seKu( z+)Lg|-A}8Axmlvroq8Xr@kEIl+jF%YoAIsjt>MSvZTj6^C0^20P+p0_HKDjZwI=dF%y1u*E|K+Mc@|R5BRYr15>&_t$tg*IajUBn#&dvK<>ekfb z>)SM|ukpE#-@4qmaw>T$72E_*>C+TK?EFshoz!T$Zr9FBBuI`XkEV`of|<`~ylu;q zD>KQN)NHyh%M57LT~BE-P_41@YhKX6;SvqJc(CjS04(-L+5*Z409ou@axOKW1{Ivs zcnNhjIh%?@PGAO~Pc^G9GPC~G{V-mwpt(8<>)Ku8oZPDoH8QcF8NbNx8VN#Q$~hU zNcw3bKqy|YW3cI8_jcA1vG1t`e9O*X8?lGC?5UFHN~bj})@f3@erCo3=`LNm+oW{2 zHo^kwjE2Q}OiK4?BP@`F!SDTbxt&Ip!uLW6LFy{9+7g~;!sP=wIWk{hX|qE2$_Ah%X>YazGS$t^%`N63wQbt6(a3ets* zi8qGi1^8TtJp&p8IvhA8guD>+F5xCg0kNdZLry9dzTj`b^UvTCE}NGUvv&>5D}s$cj(srV_4T&fk{) z={9{qL+W@!H8)E@scFN-jnRH zI1Z6tvk*C+fXMHb`JEIZ#wfKF^Txa!OT>jY5f8m!BuYuZ?Hh;9X8WhzWuyOHMcJDE zP96R$b>tg{I@Iuh`KT#JoqXEZ`oQ;5Vu}5tdFe*Bx* zTiunT!cV(G54<0}z0|aH=b_`1`wwFoe_OV)Jx7IhTr)P?RolPbbJW?I^Sb#j3XI32 literal 0 HcmV?d00001 diff --git a/FitnessSync/backend/alembic/versions/__pycache__/ce0f0282a142_add_mfa_session_fields_to_api_tokens.cpython-313.pyc b/FitnessSync/backend/alembic/versions/__pycache__/ce0f0282a142_add_mfa_session_fields_to_api_tokens.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0d3c14a6b8d7d1a324c9dc65ea63eac0d6788b55 GIT binary patch literal 1830 zcmc&!&2Jk;6rcU@+TQr11xld;Y7#1NA@O<>qtsL!XsMJ~v5+Uwm{?kEb|=X;>s@Dd zU1B6e)C)Nwb3lSygperzLQ<*!f~}-dG;jkB+$sV>+<0RGJP2nU?^tAMvoK1cFscG9rebqDq5(JL#8LJL)?Z60q!)!S4*&tq^sugn6;c#Z| zYB6IGZB^4}bu&Ms_Yo2lGy06ZnxCG{o0g`jS12(@Cw^2Bhw4xnhw3e#R2fy2Auv=R zLL7;R4S}IDjtT=EOIW@n6$W`S|3&V||Cn1D$=&Ob;gNgFqtCsDve7bAA5e{@M#<>d zi$H{!*L0zjE@5d7QW7-l#IViK>_?NWh67H+46_R5T>%RMt1aJc);%T$CR48Tbp?mi z@zz+36LZn2lkd?n-yv(|*8Muk1wlx>F!vtyZxAaCa@U-2k($YCKM3O*L99+H9;*(xc5vj}M2ub*FN-x>wy_?%?a)%+jG;{Z|NIrLb00gUZvh9OZ|j&x2rb{z&w%`bDr`fEnaK^sl;ntK7cac{zV5>-^3JrhX_- zkC^#;vff(}UMz)@ut~jg_7sa6hV5I1!IZGo0De(^d6eHGiw;OK;AODsny$4@>Mi;- zkBRaaDF`V`8;0q5erSfAN`^uC>qL1o(J?MKJ1F1x!^etUuKRY=C2!LUfWk|Lf&j#Q zNf3nJL`9JQOd(<77xe58=dSwli^nUOF1rwv#)}Hva0BcD0*6e}J^3d|}{v zZFl None: + # Add MFA session columns to api_tokens table + op.add_column('api_tokens', sa.Column('mfa_session_id', sa.String(), nullable=True)) + op.add_column('api_tokens', sa.Column('mfa_resume_data', sa.String(), nullable=True)) + op.add_column('api_tokens', sa.Column('mfa_expires_at', sa.DateTime(), nullable=True)) + + +def downgrade() -> None: + # Remove MFA session columns from api_tokens table + op.drop_column('api_tokens', 'mfa_session_id') + op.drop_column('api_tokens', 'mfa_resume_data') + op.drop_column('api_tokens', 'mfa_expires_at') diff --git a/FitnessSync/backend/docker-compose.yml b/FitnessSync/backend/docker-compose.yml new file mode 100644 index 0000000..55d203c --- /dev/null +++ b/FitnessSync/backend/docker-compose.yml @@ -0,0 +1,31 @@ +version: '3.8' + +services: + app: + build: . + ports: + - "8000:8000" + environment: + - DATABASE_URL=postgresql://postgres:password@db:5432/fitbit_garmin_sync + - FITBIT_CLIENT_ID=${FITBIT_CLIENT_ID:-} + - FITBIT_CLIENT_SECRET=${FITBIT_CLIENT_SECRET:-} + - FITBIT_REDIRECT_URI=${FITBIT_REDIRECT_URI:-http://localhost:8000/api/setup/fitbit/callback} + depends_on: + - db + volumes: + - ./data:/app/data # For activity files + - ./logs:/app/logs # For application logs + + db: + image: postgres:15 + environment: + - POSTGRES_DB=fitbit_garmin_sync + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=password + ports: + - "5432:5432" + volumes: + - postgres_data:/var/lib/postgresql/data + +volumes: + postgres_data: \ No newline at end of file diff --git a/FitnessSync/backend/docs/setup.md b/FitnessSync/backend/docs/setup.md new file mode 100644 index 0000000..cce00c2 --- /dev/null +++ b/FitnessSync/backend/docs/setup.md @@ -0,0 +1,162 @@ +# Fitbit-Garmin Sync Setup Guide + +## Prerequisites + +Before setting up the Fitbit-Garmin Sync application, ensure you have the following: + +- Python 3.11+ installed +- PostgreSQL database server +- Docker and Docker Compose (for containerized deployment) +- Fitbit Developer Account to create an API application +- Garmin Connect Account + +## Quick Setup with Docker + +The easiest way to get started is using Docker Compose: + +```bash +# Navigate to the backend directory +cd backend + +# Start the application and database +docker-compose up --build +``` + +The application will be available at `http://localhost:8000`. + +## Manual Setup + +### 1. Clone the Repository + +```bash +git clone +cd fitbit-garmin-sync +``` + +### 2. Create Virtual Environment + +```bash +python -m venv venv +source venv/bin/activate # On Windows: venv\Scripts\activate +``` + +### 3. Install Dependencies + +```bash +pip install -r requirements.txt +``` + +### 4. Database Setup + +Create a PostgreSQL database for the application: + +```bash +createdb fitbit_garmin_sync +``` + +### 5. Environment Configuration + +Create a `.env` file in the backend directory with the following content: + +```env +DATABASE_URL=postgresql://username:password@localhost:5432/fitbit_garmin_sync +FITBIT_CLIENT_ID=your_fitbit_client_id +FITBIT_CLIENT_SECRET=your_fitbit_client_secret +FITBIT_REDIRECT_URI=http://localhost:8000/api/setup/fitbit/callback +DEBUG=True +``` + +### 6. Run the Application + +```bash +# Using uvicorn directly +uvicorn main:app --host 0.0.0.0 --port 8000 +``` + +## Configuration Steps + +Once the application is running, you'll need to configure both Fitbit and Garmin access: + +### 1. Garmin Connect Setup + +1. Navigate to the Setup page at `http://localhost:8000/setup` +2. Enter your Garmin Connect username and password +3. Click "Save Garmin Credentials" + +### 2. Fitbit API Setup + +1. Go to the [Fitbit Developer Portal](https://dev.fitbit.com/) +2. Create a new application +3. Set the OAuth 2.0 Application Type to "Personal" +4. Set the Callback URL to `http://localhost:8000/api/setup/fitbit/callback` +5. Note down the "Client ID" and "Client Secret" +6. On the setup page, enter these values in the Fitbit API Credentials section +7. Click "Save Fitbit Credentials" +8. Click the authorization link that appears to connect your Fitbit account +9. After authorizing, copy the complete URL from your browser and paste it in the "Complete Fitbit OAuth Flow" section +10. Click "Complete OAuth Flow" + +## Running the Synchronization + +### 1. Weight Sync + +1. Go to the home page (`/`) +2. Click the "Sync Weight" button +3. Monitor the sync status in the logs table + +### 2. Activity Archiving + +1. Go to the home page (`/`) +2. Click the "Sync Activities" button +3. Enter the number of days back to look for activities +4. The original activity files (.fit, .gpx, .tcx) will be downloaded from Garmin and stored in the PostgreSQL database +5. Monitor the sync status in the logs table +6. Use the "List Stored Activities" and "Download Activity File" options to access stored activity files + +### 3. Health Metrics Sync + +1. Go to the home page (`/`) +2. Click the "Sync Health Metrics" button +3. Monitor the sync status in the logs table + +## Security Considerations + +- Store API credentials securely (not in version control) +- Use environment variables for configuration +- Encrypt sensitive data stored in the database +- Regularly rotate API tokens +- Implement proper error handling to avoid information disclosure + +## Troubleshooting + +### Common Issues + +- **Database Connection**: Ensure PostgreSQL is running and accessible +- **API Credentials**: Verify Fitbit and Garmin credentials are correct +- **Rate Limits**: Be mindful of API rate limits from both providers +- **Network Issues**: Ensure the application has internet access + +### Logs + +Check the application logs for errors: + +```bash +# In Docker +docker-compose logs app + +# For direct Python execution, add logging to track operations +``` + +## Updating + +To update to the latest version: + +```bash +# Pull latest changes +git pull origin main + +# Update dependencies +pip install -r requirements.txt + +# Restart the application +``` \ No newline at end of file diff --git a/FitnessSync/backend/init_db.py b/FitnessSync/backend/init_db.py new file mode 100644 index 0000000..dcf31be --- /dev/null +++ b/FitnessSync/backend/init_db.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python3 +""" +Script to initialize the database tables +""" +import os +from src.services.postgresql_manager import PostgreSQLManager + +def init_database(): + print("Initializing database...") + + # Use the same DATABASE_URL as in the docker-compose + database_url = os.getenv("DATABASE_URL", "postgresql://postgres:password@localhost:5432/fitbit_garmin_sync") + print(f"Using database URL: {database_url}") + + db_manager = PostgreSQLManager(database_url=database_url) + db_manager.init_db() + + print("Database initialized successfully!") + +if __name__ == "__main__": + init_database() \ No newline at end of file diff --git a/FitnessSync/backend/logs/src.services.garmin_client.log b/FitnessSync/backend/logs/src.services.garmin_client.log new file mode 100644 index 0000000..e69de29 diff --git a/FitnessSync/backend/main.py b/FitnessSync/backend/main.py new file mode 100644 index 0000000..c263a10 --- /dev/null +++ b/FitnessSync/backend/main.py @@ -0,0 +1,61 @@ +from fastapi import FastAPI, Request +from fastapi.templating import Jinja2Templates +from fastapi.staticfiles import StaticFiles +from contextlib import asynccontextmanager +from src.services.postgresql_manager import PostgreSQLManager +from alembic.config import Config +from alembic import command +import os + +# Create application lifespan to handle startup/shutdown +@asynccontextmanager +async def lifespan(app: FastAPI): + # Startup + # Run database migrations + alembic_cfg = Config("alembic.ini") + database_url = os.getenv("DATABASE_URL", "postgresql://postgres:password@localhost:5432/fitbit_garmin_sync") + alembic_cfg.set_main_option("sqlalchemy.url", database_url) + command.upgrade(alembic_cfg, "head") + + # Initialize database tables + db_manager = PostgreSQLManager(database_url=database_url) + db_manager.init_db() + + yield + + # Shutdown + # Add any cleanup code here if needed + +# Create FastAPI app with lifespan +app = FastAPI(lifespan=lifespan) + +# Mount static files +app.mount("/static", StaticFiles(directory="static"), name="static") + +# Initialize templates +templates = Jinja2Templates(directory="templates") + +# Include API routes +from src.api.status import router as status_router +from src.api.sync import router as sync_router +from src.api.setup import router as setup_router +from src.api.logs import router as logs_router +from src.api.metrics import router as metrics_router +from src.api.activities import router as activities_router + +app.include_router(status_router, prefix="/api") +app.include_router(sync_router, prefix="/api") +app.include_router(setup_router, prefix="/api") +app.include_router(logs_router, prefix="/api") +app.include_router(metrics_router, prefix="/api") +app.include_router(activities_router, prefix="/api") + +from fastapi import Request + +@app.get("/") +async def read_root(request: Request): + return templates.TemplateResponse("index.html", {"request": request}) + +@app.get("/setup") +async def setup_page(request: Request): + return templates.TemplateResponse("setup.html", {"request": request}) \ No newline at end of file diff --git a/FitnessSync/backend/pyproject.toml b/FitnessSync/backend/pyproject.toml new file mode 100644 index 0000000..f92bf51 --- /dev/null +++ b/FitnessSync/backend/pyproject.toml @@ -0,0 +1,34 @@ +[tool.black] +line-length = 88 +target-version = ['py311'] +include = '\.pyi?$' +extend-exclude = ''' +/( + # directories + \.eggs + | \.git + | \.venv + | venv + | build + | dist + | __pycache__ +)/ +''' + +[tool.pytest.ini_options] +testpaths = ["backend/tests"] +python_files = ["test_*.py"] +python_classes = ["Test*"] +python_functions = ["test_*"] +asyncio_mode = "auto" + +[tool.flake8] +max-line-length = 88 +extend-ignore = ['E203', 'W503'] +exclude = [ + ".git", + "__pycache__", + "build", + "dist", + ".venv" +] \ No newline at end of file diff --git a/FitnessSync/backend/requirements.txt b/FitnessSync/backend/requirements.txt new file mode 100644 index 0000000..7392245 --- /dev/null +++ b/FitnessSync/backend/requirements.txt @@ -0,0 +1,17 @@ +fastapi==0.104.1 +uvicorn[standard]==0.24.0 +garminconnect==0.2.30 +garth==0.5.17 +fitbit==0.3.1 +sqlalchemy==2.0.23 +asyncpg==0.29.0 +psycopg2-binary==2.9.9 +jinja2==3.1.2 +python-dotenv==1.0.0 +pydantic==2.1.1 +requests==2.31.0 +httpx==0.25.2 +aiofiles==23.2.1 +pytest==7.4.3 +pytest-asyncio==0.21.1 +alembic==1.13.1 \ No newline at end of file diff --git a/FitnessSync/backend/src/__init__.py b/FitnessSync/backend/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/FitnessSync/backend/src/__pycache__/__init__.cpython-313.pyc b/FitnessSync/backend/src/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e358063f84c14cc99e65f2047c61734fcb24fcf9 GIT binary patch literal 164 zcmey&%ge<81iu?RGePuY5CH>>P{wB#AY&>+I)f&o-%5reCLr%KNa~ieenx(7s(x{C zNorn+en3%vR%&udvA$boNk~y*a<&nOm6uvv99)@~te*rFNzF^qFD^>fkB`sH%PfhH k*DI*J#bJ}1pHiBWYFESxG!tZJF^KVznURsPh#ANN07m^OB>(^b literal 0 HcmV?d00001 diff --git a/FitnessSync/backend/src/api/__init__.py b/FitnessSync/backend/src/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/FitnessSync/backend/src/api/__pycache__/__init__.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..61105acf97ecc1981b4cce3bcddeb63172d38387 GIT binary patch literal 168 zcmey&%ge<81Zs_*nIQTxh=2h`DC08=kTI1Zok5e)ZzV$!6Oi{ABz4POKO;XkRlm5n zBsH%@KcFZ-D>b>KSl=zPB%~-YIok-t%1bRS4zA2g)=vV8q~@jQ7Z)Y#Cl+Mt$H!;p oWtPOp>lIYq;;_lhPbtkwwJTx;nhUbG7{vI<%*e=C#0+Es0D+*T&fd|) zRFRzYA&J@ts1%8+FGzVIZ~Zf>K7o)bSgA^)KB2xC!AsscbG~z}RH?d>XJ&spJ3Bi& zGw1!bwh)2w=Z{myUw+^?@kZDc_J80Ac}8R+bC*euqg;-sd`_T38ZrK|nDbGeBMX=P zxd091Br4^CG?)v~P@3eqJ~Hl_qTw#mM`W>^$iBR9Txh^Fk`G@$>>c|Lm;L!bKH>vF z4ovWQ9(GD{P!7rA3Bj4+`H=~JjF;Q;Q8}8Aj)`)to6L*zT=T@(J8Z}QWjiqr_r&4Y z{$DdVFPWF}amUA0Q)+KYhdgOak~`k=Eb$J{Iz2z+u6!Ks_8#tQ-X}Sm$aL?bn;C%x z&s>|jUS70y%6wlg>a@zF>$+7bo0gv8Sn#ZB=~v1Hy_gZ1_=REFOuAaJjk2j0nK)@^ zHWSX6RSk`K!+jF6{~I9BNS;tmCX|;sDh$A5Q!($$`|-N@fGi9G2TTGi=whOag<3G5 zi-lV-zl%j$uz-uT0p^_{xmdJCFX&>i7A)jq@fIxXU=f;_5>u2JgIadc zFmTb5ikwd@^0 zbhD7PsFqbLM%Im>p^a?XI5M*(cI+>S^1S*t0z}ydie1N^7Z~ zU^=|rKUll6{K@LMmD5||;9Ilw(OPaPUZW>dF08 z;pu}dv449YQ@gr+YgJqMdP~f-SovKXq-oEmrFe_C*&@^m6i}49#e|Qe7k5v9_AWMw z-Cu!Zc*f7_j4wj1(GLJ%zM@ewZ00Y|%vidu@ql>)Ffw3666oD)f%q$D-q+v9?k2|Ng zx_=zjdr-0IezM^{9RYNyuhn(0XtLin%h4!2VBwaThA-j`EDR^k7^6mo0#!SU23ILG&HBWvSrQI?GY#~T`eu8 z&NWm-6J~Xr;`=*!OnXtnlyI)%>1j~GP*@#6>jKAdFUja88U2Iwz9hq2WcYP3`TgOV|3D0JL(A0z0<7Le zcjLms zk=Oyw_XWaVNe`?Qo`-jO``0FaUf3D< zaDD9g?Y*D`MpK8ei`4~?U&XtyYt?-q*Zm~ciCwE6z?nokda+L(;2u2q5*&;R?ahOa ZZE?rzyvQx^>#2Q$9Q97@Igk^#zX27)X?*|y literal 0 HcmV?d00001 diff --git a/FitnessSync/backend/src/api/__pycache__/logs.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/logs.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b5ffe2489cccfe281fde548a793b7fcecb60d791 GIT binary patch literal 1228 zcmZ8g&2Jk;6rb5I@A|V&lO}TcFd-HAKrNvb6hKH&M3oQHlHs^$wOx=ADKDO8n^83~n&lW(b<3pd`Z9jjucee;{&@9oTQ-n^YvDn)|N{{5Yp z8HD_!Cl^m)0cO7;ctjXsbdT&(pt}YbO+7dE%v}qt9GiRgt^>|)0SZmx(@k=7@j18~ zWRozfL744ZH_bT>p6?#6EmB?@nBzOXXCuQ3+lFu8moD>Ik(IX1e5Us;ZyPrZR`E-$ z>X&X#j}`-u1iC?jAn3g$K9TfWVVw@^XPHZzyV%2C00^f^$mmx}X0 zrI63{I_?sA8nGg{y;7^JYHXB7`&sXhi$RtOJ}scCjJTt`Y``JZrj;XPD2GB7`do;x z$5jytkb#W*T)8~$%uyY9ltD)X1IQxG0Cn8M;ARLbE~wsYQy4D40-$Mq^)fN!7!$I6m zvYxm;7=ec?i^3)n%1+{b zER~aWyMoII|F7kv@fhD(6~mjPiM`b(TjYJSOI!3HFG|aREfnDz2BGttO?VXt<@UG? z^m#!yN?prk3{!GU&Z?wbJ>L0OSV-JAZFdZ+o0n&9=c9Rtd<^fPs>>k~3}+^#^cmUui@f%X zyz$&y`RVGhb8fkG<0L*OIGn8#!+p^Bq4DGG$DjT3`jf5g3r}Q-Z`T7gnq18#LdS>GIce!@1=QEQSAHFaU-03@y%#78hu-`v4w4H;SX$qLEw+=JfjkLjHw=`r~(yy}$E>d`n~^bJxi{N4a^P@^KvV*In}h6&UWi z?w%K^IPamJd5KE%Uh0jL6gNPooNcHtMh1v1bQ9T~5~f@>nfe)31gfWr8emijsNN=O zkWqa=^*2#Nj2ZxHu!$N@`R<;qt8(y`L#dz~zR1gM-3O9I-ohJ6i7CGu7UPN(57I}c zID4;mj@so8`Iy{!(KRW^T`5m0%Jzvhu;UG^JJmiZ$vxfVf$)H9SgYeLGWDiH?~-Pc zms^|za-2AF#<91Y(Fb~m59!?x+Qd;CrZ@WjqwQTCef||30X+3RPuFHkhv>gdH6r91{?*RFhqzL6?LV1~^u3?fA zQf`?aff-QVExQ=z0Zd?+loHX{Fm^Y1#0KVRV3Nb_gZAc{ZV#tIL`da0ZXm+=^xiD+S%Oyr!Pl3{%Z7 zTS7)PHA^&#D>SWH!Hj0AdQQtIKvrLn1vpGnl!BVq6vgr>O1_v`$srz4l!q&7&Y1}- z%A!sUGp84{LJ_82dcm}Ssul`GQ$@=fNHt9G!eXwdniQp_5&|#EeN0h6AX85(stH=% z2ZzB5dJN|F;7_cX^!Pwt;bOd6i<)V*mv(J_YbKsY?FG*Q$|@ZK0zY?b;T)s=jD>;^|q$!qLYI zRbTM&Ts07We7)Mgq-$-x7zFXL8Kl4&JL;LGH9*P$y{gJ(&z;vs4VV)J8sDb!# z#w>PZfqJs;glL6rA5Vjm?y7~XwiAKfEFYMPn&>0Vl0Z*}F zyBDz@1bqn5wzlwb#0KgBk1%i(Ficyl(!L(t7EV+{on_-`{-w}kFG23s`ksfGck1g& ziS{i4O(mvjyRE5#x3Lk@C}JamF`Ny{hgvHXjp1nQZK<&cOX-2eOuJTUk02icJj;jz z0JWo_?0RBb=&82#zZCjym655n%8M{_RArf`;8f@0dk&NZnwPoSVz0G7q%9j+Ah5>@sgJVCl7=xu zF=iqTS5DKGl_j|Ph7~}Tw3;hYT{9fR23s2T9PDYwQ#WEPo|1^2K!Dz-7Z9{E_83ke zK#7fy0D!Si4wSB!W0k@66WhW-^;Dv?P@b*)eErt8kfW_4DwQ*-K9WBrx zKr5P6X7o|aR@dBn7f;XtXepk1?z()WmX_sAaz^bMW_kNvT80lDB}%hioY#D zF3+&cy<$N1a0pO8Aj9K@O0>6(A(ZZD3{4@8U0BSR^aBJRBDjd)rwA}$=*I{?LC`9~ zCU61)n!)%J0Enz`~3BS%K@ zq0)R=vQywtwQry_`(z2zAO>pxU}^5jmlfC3-){?pM@GYm(yj96mGt_rF^CUNXhbba zxE8F}rDP709`q$4>eOx^&p8uXeXgzzr)GA?2?6ZyzD;550MXC zi(EonfWF4fl7QuFy7z7aZ@eYd@A;Z<{QUUD+4C3R=inE{&rV!;!P6l)g$@Hq@HBz= zX$0s}8?d4mhoRMp!Yk?{=o{4g5GGEorB`9{4Ezn`+94(KA0Ih>m3!TNsu~;Gb-TKJ zyKXTc)`;j9e>70Nulzx7Ez~f|L+sWTb~3men{xf%F4ocUSp-dEYQ; z5=*dG55a17F<2rx5-4qUpd1*7eh%!v#3c&|P_9JG@+_**!7b|)0-8_^8cSMU&8P+F ze<=orB|?VP3t7s#7!PtCw8|o7pEGtRJAiXo5m;Gena^^8m)Z5R(n-d11YY|R&&4YV#N+#tz>QYC`vhM50UtM7Ufn0~s>KK|eck=X?kBf4$387}Z*rG* zc`qmKxd?xmt07wJbXYb6z0dD$j?ZkJy@rglZ%&@tFrW3jiVtsOp7~$(4Q@>T`HNQ( z=-4U`H|}jkPVagpE>w*i$Bv@r0{pez#n#+_*F+NT!W3Tf;7B5Y@ILb6$#ZZr5<40v z4{T0Mc0=abKVWg7#&TA;qY+ZdgFShE@indNrTAgs~)FU<)4;Q#;t literal 0 HcmV?d00001 diff --git a/FitnessSync/backend/src/api/__pycache__/setup.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/setup.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d4a9dbf95791dc0b8c5dfa274e30dd2bbe010f3e GIT binary patch literal 8331 zcmcIpU2I!NcAiUK{)*HOsei@t)t@c;M~ZF9wj{?E{Uh6wEla$TWk+0dDe_XHO_B0k z%8uDZ<0Qbw?mi?y8^Gx9LyHYi2gySU^ug%vgOdP`f$YO&rI_;F7){Z30l!g|fVSB@ z^qjeu6vdE5TC|to+%sop&YU?jbIvz&H=Is8f#=Qd&PRtE2>EBsyGml!Rh0*+&b<{>}j5dwgsa-USma&Ra2Xz3? zELz8$qm{Ih(YCRw(P~;P5I@&W`t@#Ujc9Ks)YVAZiCEE0M2FwhZ`2B@n{l1Mtt{Zy zGHw-cs|&bwj9UX-R{^)4aoxbJE#NlzYkUWC2H1Lwb^Zpie!$>2v=hIppVLbEII+QR z>obUr&Ez_Noy%+91X6ODEg?i~%C-D~oMyk9$>PP9B3UhEWW7>W)((HYE~~Xj)+=RX z?JO&+*n zS#_S9nwmKCMo3ypMiX%lr`k>iW$9ufEX5#Wy~Nl-q%EQ(%TQFdm$jC@{^TL_89m2g-3aNEf5=Q?aNNPlgI6i{SF_fd9r{GD3hYFsn~IA+so* zJw851ZM}0Qh?8mJvuyEmRBmk;NSvGIZjlc-woPC=_3Ls4GEdsbKGLl>2}92jgFto? z0eXqdr7^8QBCaz1Nq})3aO-|8Z=*B^$;vh-96A$&y0S6|h zSEixf;8L_#mXga$Jxg~~v(_hdgHQ+j{CNbFr1Tjw)d=l60Gw0nDxw}Sh`G-&)=8xVhQ_gXEV@fIMr z&6N>q;z_ouJ_n)f$xpp@nEX_z@ZR*i>sEfHeXE)gA~e zCc?`xq#c34&E;TBFR2OyB2g+QW6`)2Pe8FzPEr-P!FW884C3OJ0|ANySNXX_BBs^_ z0x}ka0>NaGM(1E(V2x&|u>pVZkC@cu64X|%hvZ$dR$24TMJ=gozI*Oo_!-|!aY3`% zrOmeoihr#6V9%P>8dwo?(d4NDgJJKgEyOldAR1PkIw?ybDkYib;~!aQ3u^KTCOa@` z#bhTY0w(R4;2<@P*&w?xiwX81J0XEVR=M68Uvt+#DWB}a(S1N3nDD{W!Wdm zORyQFmu=8^9DE9M)2cbb)*Bm#$6j&~9fk(f0*T6pqoJg>o{k{1)S~Lc0!*+U8CN09 z*p9ZlBlpg|KXre6g>Ng!J>%OnJ79y9OLKX>K~Q=>Zqj8L0!k&-HAf#H#Z*IhF2q6g z+=Gka9r$mcj1F+-g<(bHL<3+dKvkl#$0YK4&eX$;<^e+|VGiFio^nfX1Q(ZL(y;#S z2_+UsJSGYdj;Kyl2;CPf0x7IkCllbO1JWByQBV!sh-!}ol3FLPz zgcO_^dJL1-Fu@7-7$~zYTGPWZv%zZ+axxB4V9rRbLjR9CFweCt(_tvXeJKw?f`MSu zP8_Y@Htt{B(Y_{ht~Ix=HSSoiFjw2N)uefgyKXa8+t+I<9p)^lG@1WX#!&lqeIu80 z>ZaFMMFn`4oWl<35frKtg3Evz`wzyPvC+f`W8(&aOaq*o&ByP2)HY5VOGRzb2+bJ~@Ev2=9aT&JSidNAk+IwJmwvk!%)aA+gpm2A{ncUo6G1^HEs5x=O}u%jxt5ks!NR=_eOXh zAd{L>r9N~|MNbYzvX-!JL|3VnSQ>;4TV{scTk|->`#OrcfNbd%kZTpUh_zzfv_-7% zG0KLW;JV@_uc;GzD>dg0r8F$EPNUdVt`@Gc!d|rAB3z^p4>ylf8|eKJV0#8h(a{0w zPwnzaAt`!G3JY4~6bhV%5J^x0G2Wn%a_kdwzz%Xp0op(;jnC_?w1a{Wjf2CFha};4 zG`S!w(ZsE27$g^nOzqLSDQ*B&&@Bt{awr5)I1d zVia-^hZl^)zc2)B>+2(fYSx04YFPvY;6#+_JVPlMAiPYs23jZrVCkUnU3LBdCM54H zNx*S$al%qE7>&t;0!9D(UG)fzOGv`0K|&NYD5yq2DynfY5==RO5)iaNJPN0FVUFrK zHV^v13Wfmvs}5)y$kj`AUR;hPqx&PlP!bg1UNtYxW29$68F~&Aa7855?%lEr2o)lr zGgypGqJ%a8MNN!I?bv3M?L7<%9+&D|PQrkD7NppcL}hv!J3&vP^3gafCvNF4(M!lg*sJnrQ;(^!>M)>hauU6abi0JAqu2t#C?z0`*yh z|2qir7`*?L-}l_v^sDX_XUE-#&O936ieQ)VNX*+hCa zp}fANRNq|XX_l`y`MEV~<$I3%4yEqWs&(R<#?HT*&2$Z-G5 zo|IM^Bln#7+TK;`$*h^!s@|)(U!l|=fAlAr6Jq*=s2rbCYNuDNm)Dv*lvktas_TmN z`hR>=QM(Cq2Hc*%>8EM(NU%D^s4ntPHw~QPxyzEA6qirN7LO$mE8kM-QcQq zC||tilfF;6kB>a`D0O|S*8bmH8}miGAKEhA$Nr)F*h=?tW%miCZg|z|{kEd|FHSw+ z{(R&YCmu*2$`23x`qtn4?5}_J>pxv-?SItzyF*WJesy?dU~hONs zv@6qeDBX1E)3Hq7xpd#Tm8SFeM%P;RDFXo|aVPCgeeFnn3oS0(zo4|d_S?|ow%uDsCyGPwlkDC+H#yUS$EGQ>-g9b_3d1hd5@hxf1~uyo(@Cy#{gU zDd;Sm@Rpe{C- zKP(x^&G|_F+M^qp<5Ot}t*=ch(=*DI+0|=7Wo|Ju_hx$T%?&~VoY$~U$W?e{3Aw_Z z#8=HJ!zLl7(}pZxYntNzrLpsum0Joo`@)y=9R{URY!EKb+^*@leGJq;<%O% zk;9K$GTxcAcSae8D0y|&@j6DygJWs;1;udz!NCSGgNFG~KJ;iTbIg}M=2M15MGUV> z3rh6*igZH>$CSFoRckz7^ZytwtTneOub%&1$J5|fyPh^GJ-&36__bC1W7diKeJ|t6 zdfRIs>EJ$ddizi9B3~Fd$b4aJN4ksAyX+%^@rzeac8#5r$NDD@w z^Y7@%OuI-eJQE4*fU-}3r3s&Jgj-QLnq=S|uT{|zp&TYbhwiEs zIqcGvf%s<75iX(fh*$q`Q3zo*wE_hvF&cqZci=B$bA+&2J8{3VR_k4JPp(^dyk6q- z*Gm=~m#Wt>jwY)&clirf*|}@ra4;F%W=123W@fAfqR@;OblJ5XuN$#jx636j3{8jt zrUyS=%9}EesNHlL=sbwkZrac_3AwAb+-P;ZXjE52MgYQwY9$SVvhQ-bVn*R-^eZ&8 zmGkUWEfKf|gfBM~b*H!vD1amQ*s*jc9K`EGil4w#GaO7HPH0!EmGaG4Fcw;n7Vq@H z!Gz*<7;{2u3!I*BMd3)>v!s1+gPOSyP3-fO>d+2M+NU?|V^w{=QSRiyJ{vHnshTPK z3dp`AXL!8LrrE&NzJCY#wVPH0*Z!ah>2^3X8LaO%ztyZb1m(b~ zyUhxB`gwQnr{<5V)`^QVKbYUd*M^Z8&Tv@+klAi*(ehTyy}$~$x(zn!AMuQhpk!mel&wdyTY{G My>38~cEA+;7p=uVqW}N^ literal 0 HcmV?d00001 diff --git a/FitnessSync/backend/src/api/__pycache__/status.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/status.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..66b33673fea0dc223aae3e19a11bd33c1ebc087c GIT binary patch literal 1581 zcmaJ>&ube;6rTN|-PKCAY{#-}ITrG-1kn;f!0n~9w9ppIsS~^{G@!6-cE`%L(ylr) zYH&|!bVx%lIy8SkkNtCsAS?r+l%8~RkT1RUyk$^mI9OsOMR57d^%w3KTHdMn3rErS_#(pI*}-t1Rq zu97X{=vCt6+S(qhfDf}$s{vN?k~QBpkILDM%jj5pN;Y=#)i;p~9lKGOprfJ4{HOQ! zkK%!#3UyeLWhfEn*UywFoibI&{C`{WWZM~gy z)SKXG8^E*+rl-to8(1IkM^X=g&>Y zUf89P1lu z*+5zxtz0l>V9I6J#VkYL^@L#ILy!U{uqE(K;U)6(=A*bro16<8iRL{Pe@A`6n_qx3{L$JWUnw)(b}4oOMas3yuZ4=Nq=1cT!0&WM72k^u_gOgdTSP!SqjHCG1F zyB!Nz6wepokD&-Dm*pUS9Cc$afGC&D1y}*JaJzAbr$Rw-rPpOt+NP rQFun+bGB?KH%Ir+2z*W#h-y8n{#^ZKf2^#$R&B+YYDBH1ELG<}bIN{2 literal 0 HcmV?d00001 diff --git a/FitnessSync/backend/src/api/__pycache__/sync.cpython-313.pyc b/FitnessSync/backend/src/api/__pycache__/sync.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a0b8dfda261785886a6754a235669e9f5a6c0b78 GIT binary patch literal 2607 zcmbVNO>7%g5PtjPUE8sp#Q7_6+cPF3lx7xS!=FQBT z`QFTIEg19@7$YBFHNKJv`4bn70c&T}avUKKi9!@^lB77wrFhCmam`N(DUpf}Elhe+ z5|w}!70;xc@=~wEmnMBFKlM9&d9o!Hpn)h!b1^bfH&286NQ@}nUZVKY;)vj~X~^OF zf!or=ZB+ujgtn!Fv+=rm(?>G^S6NvU&j)-fLycD4AMelSduSZogh)R-Wq0J5$q|spWMFOj*$x12G__Ov%g{ z`3xS>7bDnlW^G~~Z61Wl_GPsSQ|GkI2T#aM!fSg~)zpf*s@i^4Etay21*8M2dV5hT)OT7{ zHD^#}6%121OR7p+(UM@8mJM5)X_hPvgJq!ErK${98CA0^YRmyhz@}}WQG=J*ZzktU zMLo%wrJGjrIxQ{e8H*(+4C@93uTCLl0xB3r68!{plZc4U=EclGys;I2?Jxe zx=*TIJGN zXLVMFxb@8-h}aTFzQ}C32>!zj&)CvJX-+k=PWTBQZ3ihD#;F6RPE-<@MQJxK`*6a# zrad?zWNsv{Ar)y%coge}2|^CE-kYoj+wM(O_eYl}9$fic`DFU37&Yqo7^4pkaPMA^r4Zu-6+YE zesYYwURTM&$vHks`bZR>Jh>qBksLQB zA6rgUeI4t**qSf)t8ZXMu7Nv*0R(8{oWd)DA`S@y z#DT&y10h)&we;*#yT=~K&EqFeogN-}BG3b1m}1$xxrG_^ zgrj%^3>~S%(}Fsxt51&uAJfmtFvMufPdfHjJ5E%?gU@9#AZ-!RBmGlD=kLlsZmG?U zT5~&&p+ovpx9enfG74mOf;|C?=3pV8fU|pq$Z7s180ZlYSvt^YY_A3(RQICd!a$FL zPJ(mVRf4BUSh>MBCr7Ij{R{W$Nf7IYmmy9~2%M^R#=aa|j}NcKhgV}GtKrcmJeY$= zXE(PLbnz(a(ET#(e_Xg5Y5a<=6|DJa-Oj7y4cB*@ZrzF5?H$K3h^BiEIFZ&>DAoYt zv-_gosD=+U1xANJW+u^P%Q+2lt!z*M7!<>`z2!<)Goja0?2xu(Rmz5$cUqVoVLH8I zK#C5P-TxlDU1(IPQ`>e3I9eeyuNNyrC0e9Dv=Laei~<{S|AlV(OVs((x;@|Nf>_74 zh<2THb#BawXIIa(z31IO+C_RER1gZb2Gf?naoq3Z&;~iUK_VOE+y;qlkp4e>;m;z= z(zeKP`yM5>34FFCf$RVL%r=3~Uw(n>hHBuw_ah%g9=yLYwH#UH&O949x-#+j+H)dv z(xb!M_^AnmKfA>PwH2<5uLk?R4WRsmXA{#u=YA?V@~iRDEmZja?H#>~FY7%}SJJ3< Y_SxXEm3JSj?lEIK;^&^@F;2h#54B`VM*si- literal 0 HcmV?d00001 diff --git a/FitnessSync/backend/src/api/activities.py b/FitnessSync/backend/src/api/activities.py new file mode 100644 index 0000000..c097659 --- /dev/null +++ b/FitnessSync/backend/src/api/activities.py @@ -0,0 +1,44 @@ +from fastapi import APIRouter, Query, Response +from pydantic import BaseModel +from typing import List, Optional, Dict, Any + +router = APIRouter() + +class ActivityResponse(BaseModel): + id: Optional[int] = None + garmin_activity_id: Optional[str] = None + activity_name: Optional[str] = None + activity_type: Optional[str] = None + start_time: Optional[str] = None + duration: Optional[int] = None + # file_path removed since we store in DB + file_type: Optional[str] = None + download_status: Optional[str] = None + downloaded_at: Optional[str] = None + +@router.get("/activities/list", response_model=List[ActivityResponse]) +async def list_activities( + limit: int = Query(50, ge=1, le=200), + offset: int = Query(0, ge=0) +): + # This would return metadata for all downloaded/available activities + # Implementation will connect with the services layer + return [] + +@router.get("/activities/query", response_model=List[ActivityResponse]) +async def query_activities( + activity_type: Optional[str] = Query(None), + start_date: Optional[str] = Query(None), + end_date: Optional[str] = Query(None), + download_status: Optional[str] = Query(None) +): + # This would allow advanced filtering of activities + # Implementation will connect with the services layer + return [] + +@router.get("/activities/download/{activity_id}") +async def download_activity(activity_id: str): + # This would serve the stored activity file from the database + # Implementation will connect with the services layer + # It should return the file content with appropriate content-type + return Response(content=b"sample_content", media_type="application/octet-stream", headers={"Content-Disposition": f"attachment; filename=activity_{activity_id}.tcx"}) \ No newline at end of file diff --git a/FitnessSync/backend/src/api/logs.py b/FitnessSync/backend/src/api/logs.py new file mode 100644 index 0000000..9fb8189 --- /dev/null +++ b/FitnessSync/backend/src/api/logs.py @@ -0,0 +1,24 @@ +from fastapi import APIRouter, Query +from pydantic import BaseModel +from typing import List, Optional + +router = APIRouter() + +class SyncLogResponse(BaseModel): + id: int + operation: str + status: str + message: Optional[str] + start_time: str + end_time: Optional[str] + records_processed: int + records_failed: int + +@router.get("/logs", response_model=List[SyncLogResponse]) +async def get_logs( + limit: int = Query(20, ge=1, le=100), + offset: int = Query(0, ge=0) +): + # This would return sync logs + # Implementation will connect with the services layer + return [] \ No newline at end of file diff --git a/FitnessSync/backend/src/api/metrics.py b/FitnessSync/backend/src/api/metrics.py new file mode 100644 index 0000000..6e43609 --- /dev/null +++ b/FitnessSync/backend/src/api/metrics.py @@ -0,0 +1,98 @@ +from fastapi import APIRouter, Query +from pydantic import BaseModel +from typing import List, Optional, Dict, Any + +router = APIRouter() + +class HealthMetricResponse(BaseModel): + id: int + metric_type: str + metric_value: float + unit: Optional[str] + timestamp: str + date: str + source: str + detailed_data: Optional[Dict[str, Any]] + +class MetricDateRange(BaseModel): + start_date: Optional[str] + end_date: Optional[str] + +class MetricsListResponse(BaseModel): + metric_types: List[str] + date_range: MetricDateRange + +class HealthDataSummary(BaseModel): + total_steps: Optional[int] = 0 + avg_heart_rate: Optional[float] = 0.0 + total_sleep_hours: Optional[float] = 0.0 + avg_calories: Optional[float] = 0.0 + +class ActivityResponse(BaseModel): + id: Optional[int] = None + garmin_activity_id: Optional[str] = None + activity_name: Optional[str] = None + activity_type: Optional[str] = None + start_time: Optional[str] = None + duration: Optional[int] = None + file_path: Optional[str] = None + file_type: Optional[str] = None + download_status: Optional[str] = None + downloaded_at: Optional[str] = None + +@router.get("/metrics/list", response_model=MetricsListResponse) +async def list_available_metrics(): + # This would return available metric types and date ranges + # Implementation will connect with the services layer + return { + "metric_types": ["steps", "heart_rate", "sleep", "calories"], + "date_range": { + "start_date": "2023-01-01", + "end_date": "2023-12-31" + } + } + +@router.get("/metrics/query", response_model=List[HealthMetricResponse]) +async def query_metrics( + metric_type: Optional[str] = Query(None), + start_date: Optional[str] = Query(None), + end_date: Optional[str] = Query(None), + limit: int = Query(100, ge=1, le=1000) +): + # This would query health metrics with filters + # Implementation will connect with the services layer + return [] + +@router.get("/health-data/summary", response_model=HealthDataSummary) +async def get_health_summary( + start_date: Optional[str] = Query(None), + end_date: Optional[str] = Query(None) +): + # This would return aggregated health statistics + # Implementation will connect with the services layer + return { + "total_steps": 123456, + "avg_heart_rate": 72.5, + "total_sleep_hours": 210.5, + "avg_calories": 2345.6 + } + +@router.get("/activities/list", response_model=List[ActivityResponse]) +async def list_activities( + limit: int = Query(50, ge=1, le=200), + offset: int = Query(0, ge=0) +): + # This would return metadata for all downloaded/available activities + # Implementation will connect with the services layer + return [] + +@router.get("/activities/query", response_model=List[ActivityResponse]) +async def query_activities( + activity_type: Optional[str] = Query(None), + start_date: Optional[str] = Query(None), + end_date: Optional[str] = Query(None), + download_status: Optional[str] = Query(None) +): + # This would allow advanced filtering of activities + # Implementation will connect with the services layer + return [] \ No newline at end of file diff --git a/FitnessSync/backend/src/api/setup.py b/FitnessSync/backend/src/api/setup.py new file mode 100644 index 0000000..b4e55d9 --- /dev/null +++ b/FitnessSync/backend/src/api/setup.py @@ -0,0 +1,138 @@ +from fastapi import APIRouter, Depends, HTTPException +from pydantic import BaseModel +from typing import Optional +from sqlalchemy.orm import Session +import traceback +from ..services.postgresql_manager import PostgreSQLManager +from ..utils.config import config +from ..services.garmin.client import GarminClient + +router = APIRouter() + +def get_db(): + db_manager = PostgreSQLManager(config.DATABASE_URL) + with db_manager.get_db_session() as session: + yield session + +class GarminCredentials(BaseModel): + username: str + password: str + is_china: bool = False + +class FitbitCredentials(BaseModel): + client_id: str + client_secret: str + +class FitbitCallback(BaseModel): + callback_url: str + +class GarminMFARequest(BaseModel): + verification_code: str + session_id: str + +class AuthStatusResponse(BaseModel): + garmin: Optional[dict] = None + fitbit: Optional[dict] = None + +@router.get("/setup/auth-status", response_model=AuthStatusResponse) +async def get_auth_status(db: Session = Depends(get_db)): + # This would return the current authentication status from the database + # Implementation will connect with the services layer + # For now, return placeholder until we have full implementation + return AuthStatusResponse( + garmin={ + "username": "example@example.com", + "authenticated": False, + "token_expires_at": None, + "last_login": None, + "is_china": False + }, + fitbit={ + "client_id": "example_client_id", + "authenticated": False, + "token_expires_at": None, + "last_login": None + } + ) + +@router.post("/setup/garmin") +async def save_garmin_credentials(credentials: GarminCredentials, db: Session = Depends(get_db)): + from ..utils.helpers import setup_logger + logger = setup_logger(__name__) + + # This would save the Garmin credentials and attempt login + # Implementation will connect with the services layer + logger.info(f"Received Garmin credentials for user: {credentials.username}, is_china: {credentials.is_china}") + + # Create the client with credentials but don't trigger login in __init__ if we handle it separately + garmin_client = GarminClient(credentials.username, credentials.password, credentials.is_china) + logger.debug("GarminClient instance created successfully") + + try: + logger.debug("Attempting to log in to Garmin") + garmin_client.login() + + # If login is successful, we're done + logger.info(f"Successfully authenticated Garmin user: {credentials.username}") + return {"status": "success", "message": "Garmin credentials saved and authenticated successfully"} + except Exception as e: + logger.error(f"Error during Garmin authentication: {str(e)}") + logger.error(f"Exception type: {type(e).__name__}") + logger.error(f"Exception details: {repr(e)}") + import traceback + logger.error(f"Full traceback: {traceback.format_exc()}") + + if "MFA" in str(e) or "mfa" in str(e).lower() or "MFA Required" in str(e): + logger.info("MFA required for Garmin authentication") + # Initiate MFA process and get session ID + session_id = garmin_client.initiate_mfa(credentials.username) + return {"status": "mfa_required", "message": "Multi-factor authentication required", "session_id": session_id} + else: + logger.error(f"Authentication failed with error: {str(e)}") + return {"status": "error", "message": f"Error during authentication: {str(e)}"} + +@router.post("/setup/garmin/mfa") +async def complete_garmin_mfa(mfa_request: GarminMFARequest, db: Session = Depends(get_db)): + from ..utils.helpers import setup_logger + logger = setup_logger(__name__) + + # Complete the MFA process for Garmin using session ID + logger.info(f"Received MFA verification code for session {mfa_request.session_id}: {'*' * len(mfa_request.verification_code)}") + + try: + # Create a basic Garmin client without credentials - we'll use the session data + garmin_client = GarminClient() + logger.debug(f"Attempting to handle MFA for session: {mfa_request.session_id}") + + # Call the handle_mfa method which will use database-stored session data + success = garmin_client.handle_mfa(mfa_request.verification_code, session_id=mfa_request.session_id) + + if success: + logger.info(f"MFA verification completed successfully for session: {mfa_request.session_id}") + return {"status": "success", "message": "MFA verification completed successfully"} + else: + logger.error(f"MFA verification failed for session: {mfa_request.session_id}") + return {"status": "error", "message": "MFA verification failed"} + except Exception as e: + logger.error(f"MFA verification failed for session {mfa_request.session_id} with exception: {str(e)}") + logger.error(f"Exception type: {type(e).__name__}") + logger.error(f"Exception details: {repr(e)}") + import traceback + logger.error(f"Full traceback: {traceback.format_exc()}") + return {"status": "error", "message": f"MFA verification failed: {str(e)}"} + +@router.post("/setup/fitbit") +async def save_fitbit_credentials(credentials: FitbitCredentials, db: Session = Depends(get_db)): + # This would save the Fitbit credentials and return auth URL + # Implementation will connect with the services layer + return { + "status": "success", + "auth_url": "https://www.fitbit.com/oauth2/authorize?...", + "message": "Fitbit credentials saved, please visit auth_url to authorize" + } + +@router.post("/setup/fitbit/callback") +async def fitbit_callback(callback_data: FitbitCallback, db: Session = Depends(get_db)): + # This would handle the Fitbit OAuth callback + # Implementation will connect with the services layer + return {"status": "success", "message": "Fitbit OAuth flow completed successfully"} \ No newline at end of file diff --git a/FitnessSync/backend/src/api/status.py b/FitnessSync/backend/src/api/status.py new file mode 100644 index 0000000..2cf35e5 --- /dev/null +++ b/FitnessSync/backend/src/api/status.py @@ -0,0 +1,36 @@ +from fastapi import APIRouter +from pydantic import BaseModel +from typing import List, Optional + +router = APIRouter() + +class SyncLogResponse(BaseModel): + id: int + operation: str + status: str + message: Optional[str] + start_time: str + end_time: Optional[str] + records_processed: int + records_failed: int + +class StatusResponse(BaseModel): + total_weight_records: int + synced_weight_records: int + unsynced_weight_records: int + total_activities: int + downloaded_activities: int + recent_logs: List[SyncLogResponse] + +@router.get("/status") +async def get_status(): + # This would return the current sync status + # Implementation will connect with the services layer + return { + "total_weight_records": 100, + "synced_weight_records": 85, + "unsynced_weight_records": 15, + "total_activities": 50, + "downloaded_activities": 30, + "recent_logs": [] + } \ No newline at end of file diff --git a/FitnessSync/backend/src/api/sync.py b/FitnessSync/backend/src/api/sync.py new file mode 100644 index 0000000..164a381 --- /dev/null +++ b/FitnessSync/backend/src/api/sync.py @@ -0,0 +1,51 @@ +from fastapi import APIRouter, Depends +from pydantic import BaseModel +from typing import Optional +from ..services.postgresql_manager import PostgreSQLManager +from sqlalchemy.orm import Session +from ..utils.config import config + +router = APIRouter() + +class SyncActivityRequest(BaseModel): + days_back: int = 30 + +class SyncResponse(BaseModel): + status: str + message: str + job_id: Optional[str] = None + +def get_db(): + db_manager = PostgreSQLManager(config.DATABASE_URL) + with db_manager.get_db_session() as session: + yield session + +@router.post("/sync/weight", response_model=SyncResponse) +async def sync_weight(db: Session = Depends(get_db)): + # This would trigger the weight sync process + # Implementation will connect with the services layer + return { + "status": "started", + "message": "Weight sync process started", + "job_id": "weight-sync-12345" + } + +@router.post("/sync/activities", response_model=SyncResponse) +async def sync_activities(request: SyncActivityRequest, db: Session = Depends(get_db)): + # This would trigger the activity sync process + # Implementation will connect with the services layer + return { + "status": "started", + "message": "Activity sync process started", + "job_id": f"activity-sync-{request.days_back}" + } + +@router.post("/sync/metrics", response_model=SyncResponse) +async def sync_metrics(db: Session = Depends(get_db)): + # This would trigger the health metrics sync process + # Implementation will connect with the services layer + return { + "status": "started", + "message": "Health metrics sync process started", + "job_id": "metrics-sync-12345" + } \ No newline at end of file diff --git a/FitnessSync/backend/src/models/__init__.py b/FitnessSync/backend/src/models/__init__.py new file mode 100644 index 0000000..4128249 --- /dev/null +++ b/FitnessSync/backend/src/models/__init__.py @@ -0,0 +1,13 @@ +from sqlalchemy.ext.declarative import declarative_base + +# Create a base class for all models to inherit from +Base = declarative_base() + +# Import all models here to ensure they're registered with the Base +from .config import Configuration +from .api_token import APIToken +from .auth_status import AuthStatus +from .weight_record import WeightRecord +from .activity import Activity +from .health_metric import HealthMetric +from .sync_log import SyncLog \ No newline at end of file diff --git a/FitnessSync/backend/src/models/__pycache__/__init__.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4a6f7179bbf3a4bce59ea5d33a945e34c5a33165 GIT binary patch literal 598 zcmYk3L5tHs6vyARX_BVRS}(E+YFQ7`O9O&<5n&a?O;i@M;6)f_)3F&hP1ZM4wKu4d0kI65|HjX2p^Zm<_`pN))+UDQs?O=-ppCE^v zWsovtQ+UM=;O=gWy}XzAvwrHKHw9uZ8>Bw^jt5zgh8Q~bvtb%xesnGI*RP7*s)YPxla&1RVO+k2ge00*B zNz2jU+LXBwX053kzm#fOnisNYaN#x!x=xvmojsAFGUYRAP!-e%`KB(OHp}O;%k4G& zwh~oQ%GG8rKbZM{D%dBB)_X`otzS439N1_m&J{OS1AW_%1etK{R4}kf{NCAcbUGN? zEEGE<9J-a#N%57lLi?NAP?H$1nk9~?{R~^G(56Y*tYo6Kk+n&Vv3Vm4qm#$VoS-Pq z?pUg&*7mKEQ%YpLNHiA7s#(ZNC!DKV8P4bDn>M^p;kuF!aG$QWpIOhS{OU8tI=J`S hhj9N}aP_12#|L)M!OaeCb#S|b8&rS5b-Y5`e*uVZsZsy{ literal 0 HcmV?d00001 diff --git a/FitnessSync/backend/src/models/__pycache__/activity.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/activity.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7e7efeebaa63e288fb0556939da810dd65365b62 GIT binary patch literal 1388 zcmb7EPiWg#7=N;5S(f8CZrmg;gM`pS-H~|fnr?wHSeAuFZo5&L!65KZq!(AKEG3_w z(t0PuVC*!)V36H<^loLRvK=-e5^Mg{xPPq-T-TOVsPLtyVe*S*npP%0M{XYA& zQZWcVH^1KYzETPKTY||`7#pYe(D;T}#8TSCR6sFvkTdg;H&sy00u)RQv?UqOwTq?> zI@)f*K=T8K;;xoVXll-QY4uqnod!5jAky> z)y7D-N|<>mMH80U4B@Quma(qv)bCwC-%MsNleMwaDkH<(|E!t9nt7`N3*G5vRp{?J z+&lDm|3oS+3_F7!i#pAmDDQ*U!#UFjtS<^);Ic>ritl@!65^BE*b{$zKa@vNA5NoLZ>vadyG? z$C0Q^9L%LQhmx$$2jN3uIKZ%R=@mvcgw(!F(^AjD0R9MeW6mNCR96NcjAk3huRc+e*|ov#(ahY@A^K-l2HT^#SB^g(ev-_+ zIrvcWyft)_`WwhuoBviHUQHHSKdZm#zv#ajNv(~PXYQw0f4cU=wI8o1wf8?aVP(?9 z-R2aEcHrB-bD#D4FIOR5gH%)~%H&kT>6e+SXlBxA*^r<-;YlPL-Zq`z7(}*x7r} z&~S=`gg9{M1*zP63^$Gl4j?!nZdhE%nJ6#vup8gAv#mK|$K_r34`mz8|&siqPK@JbQ9e;rt#5Um*)wN((g=sx&h+)6CLrQ>E%v$!A)*rbac8vzFS* zH+8Bfe6CfXg%z}hENvB8`F(vWoBE_iH&@rsB&e%GdpGo>ejxI@0ViETg=TT;1zn-v zwmE5geNtD1+KB?^EF&!d+_D+jb5dvNsZGK8x+1gPM^v#8%~%S}S{bVD=ho1^2KhmT zJ6W1fux{lnZ6`wuGsu{tY@#bBXl{D5G^3lJK?^fzaR!xV(4_>;K`t`-(hRDk2zr*a z@;l0Y_1lW*CLVxG(ZaX|A!I|u#FzkVcxx}np z&xqoHdVQM?@gW(CoENy{r~wnugUI*o17D6t=P;sA!$8XOjL?q>#V+aCk!DDjBuCU+W_#FLNxjMS@xUg`XKW%?o zi3{7KTa)tT6K!0L%jT##S*o49GTw@pHbx&zDyt{kaXVpVd|`3WGiH zZQtpU{_w@}B#V>o!~6^V4G_~gX=%r30UQbSJ`01iUvd_y_gAGxe=FVMx9RHu;m-%L mH8}psDvI(Ky7>pX`3PNkglZ2_@n5y9G?Xv+6C}ThkN*HntBFeh literal 0 HcmV?d00001 diff --git a/FitnessSync/backend/src/models/__pycache__/auth_status.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/auth_status.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..51f1e396943b364c8e3c3d8be612e17b7cfc250f GIT binary patch literal 1257 zcmb7EO>7%Q6rSC+z3a6d*VL)oN~k5Os<_Y^DpC{)Atlg=Zb*dK94(DD>xsQ&J?p-i zg_@HkB+j{1GPfMVjSCWj1E?Ih;sX*MsX~ZTZiv90H*1F^$BwkK-}nCZ&G$xoQmMEE z*Q?(=ik?}7{He+5%Fn^cHiDD)SnPFGt48~!k8Yo~ zdiGu-D|6x(v3mUlb6m_>^h(;%LEs+8_UAY>eI#uIc`D3$uVGe77%0y*8d8Fe5#L0Nw{ z3K^Bd0aH$jR0o5tqbmA%jLRs*Haeg$fVw1;ea0z!GKhc)8c5~Ffsiy#x)E1SBxu-+ zcrdqyJr?eRqZ2Kje5%|Kn68!O&5oDhMVhG?h3LQ=x|>R@P2mbAp5Q~&R;!ovSxX3s z{j@%Wy}=O5STozS2x5%e z7v#74E2FJHiq!-AaOdm#r1)U8KD~TxeEVo+a`}zX2h-}}!QP>qR9_ptKdoOMzjgF* zQoldinl@e@_b|Tk;Fnx|T5F7*qcRn7YZ?84JbdGLKuC{*6{p>t* zesU+ZO$`6v{OzqDmcLv6{?4TK@N*BAPg~t?mXs?V$3cv$_J?QF%qE#lXEwHlv!Htj zXYv7%Q6rTODyQbeRGhqhF7t)_^LI3P=-&F-p@DBNA<0x(rk~kdW}M za;}GxZ6`gZY(KcY)ijjd$$5mk6C)#$IW%<)B(8>nMLOVgl_%in6@sUvO~42Un1KOS zV1nH)t&(;*u<&PZSWqc!2d&bE3Dp_Pov~^&)?8uLC+Fr1YXKX!g7SvZKJn@5w4CF8 zm0X?}K?QTY_CL=!c;;kKg+|wH)>Qpk#yd$jhfpRNKh#aD`RKnsvE4LPZ5NVW2z`2& z^;Ie1F?*<0x6kKHJkB#S8M(a*=n9r4y8QoF-nz7 zX)lZORO2~H@8w}SVHPOuBp_s(aKy%2JY|Mim1+aC!W)g!QI(4Fx$el#eC`}lb zG>KJX)`M3JOsa)$2)%^U9P8B4{&nnzLOjSIhKdeX7kwU4!KB1t5}>sKM-vv$9w-+> zVM8$urEfZ4qr^e`TK0wNEEVyW=Pw{vN?4|NPzymV>e5xyRiUmI(KTfbKqo>ZP~ zKW_}H9}KRJme1~W_T_MS{e|5aoq7A&`RD7yGatUNS4K;3?0vQ$50~B_+@8UW(|hat zYr}>&xHVc_{@Ovj;{9m<;{4?N><$-O1OM3l?b;6)zPs@KyTiqgzVhMxw5F}*oN~pz zG)$vAtk*xjui~!8Y8EojAAfWSVIeV8F^npYJBHdcW})IZ#XI@~=gN3^HsK=2TC_uFO|1?HilURZB5wD-BV`yFdjRjg_r81Y9^d!Q8V!@+ zvYzkw->QWCC5`h{o`JJR2woD0I7*k;3Mh67O128Bt$}8jp=|4*UzdETTd@r=5UY;X zt=cA-*U2hbBTo4$arC}<7vGQzY6q3JaVlY37y1WrkcN?{>_nU$Fc7-K!H*7v@xbM* z=Z8#`J3;Jnq4n4iZ!1FEPb2TNB*W3T?66qAX3RXL`#vN*@FNz*NH-|u^09&kIU%6Q zf=m7Y^2%#P$&`JBcFp5T~uYc33tPx&}3agPvR6j28LuC1IArH zU;~Pr3svb;qxf@Sdcg4ZfZ~jq#UJJP5~Y}o`yLY5$-R`bgi=_+z%%@a_}Ds(LuMrj zXA!qPf%r4#@xB7qR2>lBi*^czLPClK~ZlS-iH0~g7+{!xB<*Ub=uhu8aR<=8BUjE8JylVZV z{c8Md{9;a;-ORplf4BZ)>-*LZHz&>Q7dFTd&MSS0^JXSb0yppuSva~-Fjp_vC0DJ` zVD94P9fXAldeIK3o#)J3Ts+Z6pD^wfwVnSRy^S_G@a+v2&m8 zy9-kg5@O7PRJINbVY?B*0D^%P4@l%mCxn=~Ar%G&B)(@mDmU_^JpX?0`{VC@zt6d> z)v5&7KOY@^Tk|~31DxjEUC|?)0(xGZ(P`OUpWSdy!=ZK{aD!Z6pET|5(j(#Cvr_9yYLO&S=TssJ2He|q6D+VtZa{Z1S zv%WWCTOtcB_OZ{P=(6Ur6Fc!m_w^XH_%I9@;(Em30R#FUYZxSB z7Zv-G9Cg}Ur!=rfj8a~ubQHRYFYqd*_Y>PMEDcIeJcwf73s?}MxkhO$UY^@Qj0-5D z_LAp9eu;3*z>d8z;A#}xaT0+h9JCROA{(bi%mA;V31TYh$gf{rk(d6ucwt)rneVs8?y)Va=O-?9xQ6tX1(+G(%OsD z-lD!f>!06A>z(P{MRW5^pYNv4m#5~U(VCsk*V9IK`o^OD!r8;eQrf=tlhRtWoDW33>KUr6%>HPu;(WcxDOy1!@>vTmS$7 literal 0 HcmV?d00001 diff --git a/FitnessSync/backend/src/models/__pycache__/weight_record.cpython-313.pyc b/FitnessSync/backend/src/models/__pycache__/weight_record.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3c38d431dbd9e25244aef63dee3833de370b6550 GIT binary patch literal 1265 zcmb7EO=#Rk6dtX#T4{gmUDt8kToQ~Mybg&~6nfrEpn)Ft=-zTH?V+VT^sGan9tfrMUn9$8g}CJ_#5Mc&2G&;#s)I_~I#sY;mga|Xln%M9Y;(Z|45aA_2>C!-_dLON z!y%L9P855htx01)<^E|&B_g@&CCpb<&X+j@XD<;vC4B;#OQ7Uxpt~h7`sEeUH?ceH zZ&Qa#&MjnR*FZfgC7 z``6E#y*O9M#*DZ%tg!f+Id#mrQ^nc!%r-uQG_$}FKUzQf7+#51VWtGxl9Hf!LbxMy?FPeGgDcuht z5mCq)kI~(rRH#M@Kd2W3hU%yk29Qr@?}uV96cl+yIm}{OY2YjZ)RT~gYC_to*+h85 zLs?V9P{bllq&@Ip7;;s>Qqek(AIqu_43h%t30W)rhEsO_F0=|Pc!lJ}^MXOtf zBdBB5d{fgV&v5&S{Jyw4{^(D8eo~q4K3~l2cgJ^6noE;M)8P4otohz}`=s&4WNW&W zHP*+SlUi$XeY%m=+T(i)uT39hwHt^rG(>vaC_X#D;*nI^CsF` zm!Db3tJ$*ilkuzdv-L|gYxTz63-foYKdybh_QQJC+WN8!>*u}hv}>}O97SH_AF$!* zLbqJAT$Mttxr?A%2nz+w;x0h_nTq+<{T9`4Ad=Sm#rydH9Hbh4$yMC`(KSu`gWUOz Y+<8H+z94VDH0oMMo811Fs539{AABKMO8@`> literal 0 HcmV?d00001 diff --git a/FitnessSync/backend/src/models/activity.py b/FitnessSync/backend/src/models/activity.py new file mode 100644 index 0000000..3bd260a --- /dev/null +++ b/FitnessSync/backend/src/models/activity.py @@ -0,0 +1,19 @@ +from sqlalchemy import Column, Integer, String, DateTime, Text, LargeBinary +from sqlalchemy.sql import func +from ..models import Base + +class Activity(Base): + __tablename__ = "activities" + + id = Column(Integer, primary_key=True, index=True) + garmin_activity_id = Column(String, unique=True, nullable=False) # Original Garmin ID + activity_name = Column(String, nullable=True) # Name of the activity + activity_type = Column(String, nullable=True) # Type of activity (e.g., 'running', 'cycling') + start_time = Column(DateTime, nullable=True) # Start time of the activity + duration = Column(Integer, nullable=True) # Duration in seconds + file_content = Column(LargeBinary, nullable=True) # Activity file content stored in database (base64 encoded) + file_type = Column(String, nullable=True) # File type (.fit, .gpx, .tcx, etc.) + download_status = Column(String, default='pending') # 'pending', 'downloaded', 'failed' + downloaded_at = Column(DateTime, nullable=True) # When downloaded + created_at = Column(DateTime(timezone=True), server_default=func.now()) + updated_at = Column(DateTime(timezone=True), onupdate=func.now()) \ No newline at end of file diff --git a/FitnessSync/backend/src/models/api_token.py b/FitnessSync/backend/src/models/api_token.py new file mode 100644 index 0000000..13eebfb --- /dev/null +++ b/FitnessSync/backend/src/models/api_token.py @@ -0,0 +1,23 @@ +from sqlalchemy import Column, Integer, String, DateTime +from sqlalchemy.sql import func +from ..models import Base +import json + +class APIToken(Base): + __tablename__ = "api_tokens" + + id = Column(Integer, primary_key=True, index=True) + token_type = Column(String, nullable=False) # 'fitbit' or 'garmin' + access_token = Column(String, nullable=False) # This should be encrypted in production + refresh_token = Column(String, nullable=True) # This should be encrypted in production + expires_at = Column(DateTime, nullable=False) + scopes = Column(String, nullable=True) + garth_oauth1_token = Column(String, nullable=True) # OAuth1 token for garmin (JSON) + garth_oauth2_token = Column(String, nullable=True) # OAuth2 token for garmin (JSON) + # MFA session fields for garmin + mfa_session_id = Column(String, nullable=True) + mfa_resume_data = Column(String, nullable=True) # JSON blob + mfa_expires_at = Column(DateTime, nullable=True) + last_used = Column(DateTime, nullable=True) + created_at = Column(DateTime(timezone=True), server_default=func.now()) + updated_at = Column(DateTime(timezone=True), onupdate=func.now()) \ No newline at end of file diff --git a/FitnessSync/backend/src/models/auth_status.py b/FitnessSync/backend/src/models/auth_status.py new file mode 100644 index 0000000..3630a6c --- /dev/null +++ b/FitnessSync/backend/src/models/auth_status.py @@ -0,0 +1,17 @@ +from sqlalchemy import Column, Integer, String, DateTime, Boolean +from sqlalchemy.sql import func +from ..models import Base + +class AuthStatus(Base): + __tablename__ = "auth_status" + + id = Column(Integer, primary_key=True, index=True) + service_type = Column(String, nullable=False) # 'fitbit' or 'garmin' + username = Column(String, nullable=True) # Masked username for security display + authenticated = Column(Boolean, default=False) # Whether currently authenticated + token_expires_at = Column(DateTime, nullable=True) # When current token expires + last_login = Column(DateTime, nullable=True) # Last successful login + is_china = Column(Boolean, default=False) # For Garmin - whether using garmin.cn domain + last_check = Column(DateTime, nullable=True) # When status was last checked + created_at = Column(DateTime(timezone=True), server_default=func.now()) + updated_at = Column(DateTime(timezone=True), onupdate=func.now()) \ No newline at end of file diff --git a/FitnessSync/backend/src/models/config.py b/FitnessSync/backend/src/models/config.py new file mode 100644 index 0000000..9a17a9b --- /dev/null +++ b/FitnessSync/backend/src/models/config.py @@ -0,0 +1,16 @@ +from sqlalchemy import Column, Integer, String, DateTime, JSON +from sqlalchemy.sql import func +from datetime import datetime +from ..models import Base + +class Configuration(Base): + __tablename__ = "configurations" + + id = Column(Integer, primary_key=True, index=True) + fitbit_client_id = Column(String, nullable=True) + fitbit_client_secret = Column(String, nullable=True) # This should be encrypted in production + garmin_username = Column(String, nullable=True) + garmin_password = Column(String, nullable=True) # This should be encrypted in production + sync_settings = Column(JSON, nullable=True) # JSON field for sync preferences + created_at = Column(DateTime(timezone=True), server_default=func.now()) + updated_at = Column(DateTime(timezone=True), onupdate=func.now()) \ No newline at end of file diff --git a/FitnessSync/backend/src/models/health_metric.py b/FitnessSync/backend/src/models/health_metric.py new file mode 100644 index 0000000..453e903 --- /dev/null +++ b/FitnessSync/backend/src/models/health_metric.py @@ -0,0 +1,17 @@ +from sqlalchemy import Column, Integer, String, DateTime, Float, Text +from sqlalchemy.sql import func +from ..models import Base + +class HealthMetric(Base): + __tablename__ = "health_metrics" + + id = Column(Integer, primary_key=True, index=True) + metric_type = Column(String, nullable=False) # Type of metric (e.g., 'steps', 'heart_rate') + metric_value = Column(Float, nullable=False) # Value of the metric + unit = Column(String, nullable=True) # Unit of measurement + timestamp = Column(DateTime, nullable=False) # When the metric was recorded + date = Column(DateTime, nullable=False) # Date of the metric + source = Column(String, nullable=False) # Source of the metric ('garmin') + detailed_data = Column(Text, nullable=True) # Additional details (stored as JSON string) + created_at = Column(DateTime(timezone=True), server_default=func.now()) + updated_at = Column(DateTime(timezone=True), onupdate=func.now()) \ No newline at end of file diff --git a/FitnessSync/backend/src/models/sync_log.py b/FitnessSync/backend/src/models/sync_log.py new file mode 100644 index 0000000..c1c3c36 --- /dev/null +++ b/FitnessSync/backend/src/models/sync_log.py @@ -0,0 +1,18 @@ +from sqlalchemy import Column, Integer, String, DateTime, Text +from sqlalchemy.sql import func +from ..models import Base + +class SyncLog(Base): + __tablename__ = "sync_logs" + + id = Column(Integer, primary_key=True, index=True) + operation = Column(String, nullable=False) # 'weight_sync', 'activity_archive', 'metrics_download' + status = Column(String, nullable=False) # 'started', 'in_progress', 'completed', 'failed' + message = Column(Text, nullable=True) # Status message or error details + start_time = Column(DateTime, nullable=False) + end_time = Column(DateTime, nullable=True) # When operation completed + records_processed = Column(Integer, default=0) # Number of records processed + records_failed = Column(Integer, default=0) # Number of records failed + user_id = Column(Integer, nullable=True) # Reference to user (if applicable) + created_at = Column(DateTime(timezone=True), server_default=func.now()) + updated_at = Column(DateTime(timezone=True), onupdate=func.now()) \ No newline at end of file diff --git a/FitnessSync/backend/src/models/weight_record.py b/FitnessSync/backend/src/models/weight_record.py new file mode 100644 index 0000000..9f43d79 --- /dev/null +++ b/FitnessSync/backend/src/models/weight_record.py @@ -0,0 +1,17 @@ +from sqlalchemy import Column, Integer, String, DateTime, Float +from sqlalchemy.sql import func +from ..models import Base + +class WeightRecord(Base): + __tablename__ = "weight_records" + + id = Column(Integer, primary_key=True, index=True) + fitbit_id = Column(String, unique=True, nullable=False) # Original Fitbit ID to prevent duplicates + weight = Column(Float, nullable=False) # Weight value + unit = Column(String, nullable=False) # Unit (e.g., 'kg', 'lbs') + date = Column(DateTime, nullable=False) # Date of measurement + timestamp = Column(DateTime, nullable=False) # Exact timestamp + sync_status = Column(String, default='unsynced') # 'unsynced', 'synced', 'failed' + garmin_id = Column(String, nullable=True) # ID of record if synced to Garmin + created_at = Column(DateTime(timezone=True), server_default=func.now()) + updated_at = Column(DateTime(timezone=True), onupdate=func.now()) \ No newline at end of file diff --git a/FitnessSync/backend/src/services/__init__.py b/FitnessSync/backend/src/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/FitnessSync/backend/src/services/__pycache__/__init__.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8a716a942b9f1d9236f5ec4b2ea8cd1a0adfcfb6 GIT binary patch literal 173 zcmey&%ge<81S*Z5nIQTxh=2h`DC08=kTI1Zok5e)ZzV$!6Oi{ABz4P2KO;XkRlm5n zBsH%@KcFZ-D>b>KSl=zPB%~-YIok-t%1bRS4zA2g)=vV8q~@jQ7Z)Y#7pE4LWhSQ< t>&M4u=4F<|$LkeT-r}&y%}*)KNwq6t1sV>rycopz$jr#dSi}ru0RV_?E9(FN literal 0 HcmV?d00001 diff --git a/FitnessSync/backend/src/services/__pycache__/garmin_client.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/garmin_client.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fee4c6277228957b981814b8ddc0b87a07852375 GIT binary patch literal 34367 zcmeHwZEzdMmEho$APAD62$29kho2%Seo2&MiJ~Zxl&BAW1A;7xlxTO z?5u0=ZcEOlF7noSEycN5uFan1O|HUJQfH<%b)lssOYb?!K!6tvEbnS-eW}X*xS}FC z_Q_RO_g>HR%m5sbl9RpLz4DOgndy($-LJpid;R*&z3gl=16Rp|qk(UJg<<{)U+AS5 zAMRa(hgTRk%=J%}I)=Nv*RsX*ulWW&*+P>3}~N{tdRuK+-fH@^e8iG_7gQ8wx!;%lVR~Kxk}y zDiHLJ(_V`WXoQ~*e)oP$TJfZH8hy!ilARWN8pE7ynHGB(1HIS7Ok)~yxi#=6K^!4t z1p{2!Lb%d(YdxAA=<6O`4%4cq{g`Dux;inM*U2y&z)++c$)V6)R{A90VQj$I*`opY z8VUX!!+5mPTOcL3d^s(Ba%W(PTeXU^GOo=uRKD*NA1P#C?S_y|DKHCVSoDd)WSL1N zPSi@7DZ@&MtJVf)Wn5?0Ku@IlXfO~Cc&7uAKyZ>(b|-dXmSeFmwXqRP150}lyj6D3 z1}6fO^Bg9b^m5@T61N*W80(vz@dknqGQl5uX481^EltuWct+|v!(smn%tDYZJj;6L z!&A_21LH6kiCNPO8wY0DXP{I*htm}{?HT)lT#oV+2fucANPks z6Z6y47s1c#^ZQu;b1(r4Ss{o9Hgq8{H-{d|q^*z>9Wl*xz_4@N>@xub;uC5apjh(a z6za5axzJJ^c04W*oMg68-LY{p5|b9h)@(579}oL|Nuw}>CJjFSx%tVYfuJmz2@wbu z{YiZwI5C^d!g7xZ^-UT`#c&!A`KKq+vJ2suW8q2;H&4yZ_?ttaFd(vdh?_kRS%#W> z0^#GFcl^S3dgrU7J_3}ksOS_lq zd3%4{GH@rqFp*ys&96%2*F^JcZd(gp&3ZYDFX@e2`&O*BSIsY*U$J~deV=^adh#Qh z$H4oR!Fy0_W?Y)t;3WfoWDe|r+3>9K6=npZkjo{_bJPdsKbY}c(u^njr_K{)N+_5^ z+68mrBS0B3(PS=ZW~Dw1tp4gSz?ZJha%7ly-TFN`cB66`)M2L9PJtn{x*6p;W}`V; zJva3lP$~yjoJ?9V1%_Tzh>mpl==xg_K&e4It!Js-}MO{js3t zj%=YRA6hPzlDmM)Nxx`22eoQ-oiR2uZtJins2kZzQ+kX$H10g02TdMbK10(}>;&~5 zJ*7sxT|kY{WQ?m(dXygAn;xk>rTD6&C7-8PSQFTn6Vv0-^O{{hNVAeYBo z=(hJ31`P@~K(?EaBX_jIx@|I_mU4x-6;iZm$$+Ss9Kzc=KkWFPyCp9@SN zU5YpvpY{18)rU9^=wZwc@uHSt8~ea|=(%w};j-EA#W_Ft8h z8758b)3f8==}B70s58b&5#R;Udn1Fy4?6V6o4MVKYFlV=p2#VP{~xdnf%7 ztMCf`7|kCFN9tU{gBVd{N77mn`*oyD$_A0Z1;!~1At{GE{J;Zi58#gt19RXYxFWdK@UobfCxrm<~PXpW`^3h?6=mNUbuS^>RTNav-Rpg-)8G!ZY6R znE$zPX9kCpc~S$CEU8x{bD%o|7bA9>P%>}I8^Xy@_TvsgDsBfjAtcR^nPS-kq#KcH zCpWF_K)9rajshM(eUU-lChMgvzMx|<{NopX_`>2WZySi423NH^G9|vIxP9^XkI($@ z%;F&L=!;wWKLQq~_ba_u^si@M&At|X^MyBFSUPyia+LBx#eK`ie|hHJGs_TvG;TR_ z-^gU=3mnp(xTWoOMN^_;=dFsJ3%<8!zBjXU{BO>@b0${dzH~5dDZ9O;;mzmYc>dDC zgrzKMDT`aGDOAUyxTWU~TaD35DN|+N6}LR0fZrRp^sQK|2}?=TQi9+p^VPEy{+k!x zd0}amx1V_5^5mztb8U%SHk!-cW3-tA8Y$kkh3dB(zt^}>&f7cVmaf~Iiusbh*rtBo z(tkVGexE5*pk@D0bW5E#jf>Cmjw5kJ^|yZK;6mQF4<{PhqYdr+)_uIABW~GG>rveF zo$x~Ex1YTp;2pc;mOW5Herq(8e|IvUW=6Q=yLTh8{chQF6YZn#jU5Mg!|&w{r_YskqW%^`q4zg zfoQ{lSVI@TwVQW93mjV6lz-{)XRG@FArC@0%>MF_`A`e<)0WPxz7pNf_8uzfE7ZML zXn>cLi5R;n&UW`kfg9YIPnGGPGLS@rj2UFnK@G^1wQhYc$fJW=@@(ie9?J-(a)v0; zNW0udk4}x7N~4Ojz?G`TqoqO0bUhiGmPw)0)Jme@%Yb52k)6rZxEfF8(YRDNAGfLP z2*c1)x+Dr#Mgv{cf<}*#($+qprWI7BT4|L$o(80nCbxNy24EOmDm17^!8;Fb-T-nI zEiF*oQZZd>WsvwUh$(R-Bb79f8YkoiN&8OhZEyuM9ywL6B9&=_QdvD2?wr0f3F`)F z@;s)sX*?OUJ-qifY>!Pc)?i%ZyK{Q;gPAQ%&^#jXWl$~~l{QF_0b3iCZLyuQjdpHN&r<`y3k`z8xVfqThVj((J=Y$ zc6hQ=M<$tj%7Zx@q$~2|xH@Gx^oiCZ6=pf(&Ix9BW*^oJTo#^W=|ozY`yQdp?B^_V z#pfyW!2euZ;s%C}ai4}^CV*loXIS`~%n>9mX?6N$TAdD4Y{_?bwPJ*2(>6}UqM}9A zAk>T;rSLow1lKX@C*L0nYV8R0BH@yurb1BZF>2-8@;KP{WP==w^clIQyj2&ea-$#y zL<%5c5;x#LI&;pCJ7qq08pJ(ZQ>0XqTahRzyg|{3D8Axw&jR-TOr1kVkK-O5Jm(F~ zhQgDa-#y&d?+pUO%Ymm+*av%H^Mo)tC?H|?!`$7610v?%&V+3(i0}Bi? zcM+2y`HI2#5LCUg=x`8+8O@bp+J#FQB+yX9>J)(CYdwo50ClIjfwM1(m>{wq;VoJ4bvQV z5>pr^0vsq)>AXH)G6Uj+8UiPX$dC?5>d%K}gUJl&9wY!45NSP|_TLp%Dsu zgX4ZK1N}1tp;$?(LJEB`VVvhZnj@`*CPUO`` z^XlVy4GS4mKvOyrw~pQ^uYLW6t1nzX87tqFDBl+?-?uoG=opT64D+5d(T+2*^0SF@ zU$op8E1$S*`E_B{ZClZ+eJ}U%74Eq0_^+zAE<6{lI&isf#a8@k@5{Z{EbrSIKcZ1O z;jt+hXNyE8CxVyZL*~vF+X0%zu%4*T8Ju zw`yYc?*DQ959^ms$J&OjSmO34Zi|3xdgJ!K-&xkwKwBWxVC7#&4enc{rtXeg53X1X zQL-4f5*Z^=Oz8lT;3p$mW>X`d*YN56lNzR^<{qPiS|TcPuIBL0-sR5aR({K1+&=Vy zy-xBTUu-KPgqe2)`L&_v54>IHGFpe ztd_Al?lDG%ymr?QLW|qJ|IES^@7N!=9FXOkk0r1D;1MOG*^u>vdqB)HuXYxI63#n~ z2X_u{*Zr)j%Q3KD`!3U|f!p6|O2F@L%eovx4()q;J2i0od3zi9{bH9168&Pod8okf z%gj#gP@aL$GC}~KX8WtO_(QzmU!Xc2Xn_Ur1-A{-P@B`9eAi z_@T0$FQl^ynfw>h+5g)z`3gEz74k5(`RfbmEJc)brz1F3e)olRHbqLVCcFGXI-7dl zL`6Ewp`4f-MQ0A2_1SLjS%~s3#(fT*i{PYCt(o9~yC3)Tpxyk1P61MDW&e-p)(vLa zOpnoRPTwt8Xd-|%fdYcsj+S(G5bNkRvdalquoAnHwm@8=2QG5YUm8rI$uMUBx+Hn84P_g$f?aHUFT_dd&AtR_W zM=sYc`Xt}ocG%Y!dtVCYqt+uGw1iNvbz2rr3P4^P;8C#?|H!y6RnPY^alcLv4_Y=T zb(sS9lAg}z#C^HP^2oSnW!!I2!U{!9Y1iA;F1I;-A}5ki2Ay_+HvfS%FLmieJt4`xa0 zLD{?BNNWi-x_XPEcS(AKscoZZOEm~}ig`+c;5xFl)gKG0bEKC-@JLWoA*l3vwer<} z9{f+{q+{Q?HB#FL$8gu^KEP)7{EVOV!nQI|LuiVWrKvyQ>0R9UR_Z@+15iC~2pv3X zz#RuCa_HeI4X}NvI3G&7E~T!e-`xqm`IUSa-wRlJVwwHsRP(b zkhD-l!vc>wNL$5Dp;*FRvQW6Dl)Zc*S2!sXDQpk|=q@!;;=lmZ=pftu0#a!CAsaYT zu2zmnW$IBQXizv$;!nvpQt0wNo8m(X-NydLht=0LH8n+SLtYM*U_@O5tQ4RJcS|GN zQ#2XG45XMsbz#eVlBm@PnjsOVtks3?4qN175E2M{jyt#0u|ov2wu4%VfE zcqmrW3o2oi20RR_)xtQVV_X_ysTvL?f`$W)B{Vc?cj&TdDh|$CS;c_^3V{*!=+ENQ zFgj!CNJuf~4THeGGj<*^o}5O2D#$GC?VVOI0So8i^;4wrM~j^2e#l1UurUovYdt znoP%kx3vD@R0(`>tAEvK%BxtZs(*9e8~YaQv8uL2RcExSbEz)T<%xEA_*33!mp4{* zE>SfVt(uBeoxj{iH|(omdv}!gP4Poh{9u5uI3KrN_??xpS7QWFE8y8HVJe?Sri4M%fjt2Q7v#X%MtvRd(J?JCaJ}tcUYfouE!P~UU13Zfz?gJa? zZm8T+wQn(y=x{|lT(OSh{5}s~aUyPe@&jmnhXC@k)f|BK+5L7=>0nA$U~uv78s>$k~P}47c;`pRIxdgX2Ny4$M@bz~Hul0z-jY zr@Nu=+&oaDz3C{0mz%p_LyNh&zjF_~{B$?I{7h2|&p)f{Dj3SuE_ZB!mv?Iq77RPJ zKi`Mpzc3<{UzCvN8uO6F@Qdcotf36UFWdAG@XHPi;4=&mz*}_S!{?fZYYn^ubQ~%S z(PATbMJo*86|KeKsMG9b4AIt3t!uC0)=mt*wbuY%F$RNUI#9Pp{GsvewzValE!EKfypT_Gd2$LHexOqcXU-m@?k?2kOS{~X95N+8W2aHd_s=1J zc$YbFp*-??TDihdt_%f!p*7Plw`ru1`aM)KlBpJFFC_B;z2G*hg{qwmr1->?UKVpZ*;6Gco?qk10PT7`%2#9zII|;XZNJVhxm{BI zdf(UkUK_ZaxoXPGE?9AtCLCL%j;#sDfvDraO4*i`;)+!RQ`D(hWpqWGE*pXRgOlLI z8rJT`f<@DkW~q9qU@5$Oba^jte==@4xsva=X5;f~cuUQv_sbbu={?4hU9hS*=M}7! zRwPQBqovJ>(u2{`gDaI=6O}uol{;gVPpp*JKt5%MFrTu5S9(4La!Vi_3ht<++7YeX zv2Z$8c_202t35CGe70&ZR3Av7s;2>1PHe5vBECl*igj-I&XFldun zH-AHi$G89VkFGWSpnFmC{hqh`zSsBtfmmf1)p`|FVDTvSaIC<^TU~dA!_#>H`9a7J zP4&%^?!C-k7j-_tytBCi9^ZMw+`U_OLtEF?ti4fh0G}Jp=I$25jXlopt%jSG`tDZU z&06%oxzzyvH(PY*(`tZJH+SpMf3Nw_9^Fq2oy~`K=zhAx053^1ENUERiNnE{qzO!@ z2~URU0H|H3EOFRt$r2ZVUhsP@agw1zU-hFeaW34@O}pHZ0mGmcmH<4KlXj^sEhjfo z0X-~$IW&Q)(w1&SS{6N$fks#i^(h-y`ZbFntn_f$TZXMR#Tpb-w}A@XQw4+`15C0; zT=o=`Y(}K6#~&V_V!^CR;F&-ei~@o&Krs9_JCi=maxX#cxUYZ{F_APdnMDd|!T=m| zP^5}%5vI%u1dcPN2tO?v$lAb0VCrPXeHHWh_vjF_8RXo7oLru~lDcVsP++r>qjwsG z$?HF0%2&|&b8wPoMbg9I5klHWn7GKa#a+Qz#VL~&o^+6ty^XWgYY^+9vsG56?eFu8 zZx>g+-v0IW*E$l#+oQ$X7usUQ9hXfofQ!oLtko8Ev@N#997C6mp_)F-Q&;M~)p{-R z7yJ3jR=)KZZzuL8@=L)216Yu-qwNUUN=DaDT9=-_xofG8caUhp7&kza2O&F@@0*>@ zE)#RZWbSIy-PlyswNrOvrvaXmSz=`_js?KJ2pre|%Oh}HYg{55A+e<5(DM6^NL7=4 zYR0f+0|a2QOCmSbh)gg9C>TXW;!)o!^>Ek_l}KDK5RM*QH5fEqGlI6l2+}KZK$%I} z1%?|ihl~-4pi_>~lFxOqs&Ox!P#engj}J>W)acKK_OYW0+Uv-s-y8K zHV_UW>4Ih{o5%=@*eDcXq6=3A#is{_d}79mVRn&?!OtiWr4==^YG6*B<8;~jYAHhjlNFNf*Ezko7@F&AcAO`Yispvqum@G>f zV{5|S#bJhn_o2fqGt&m6sf0}wH%5ya6UAN8;;yB=vEso?ed1{1i<+)~>H5s#j-|}S zFD-B3?XI}R{r6SPm)m*E?gckEYe&_tg)c44EOo?zF+cHS%yp6|D-HqtJz{fGld zkNjQAaDgG2#?9b@rd%wgWl;v{HjGHiT*^R{7HJszI1r6#12H2~FDz(NexO8H4o&8w zoK!g)=r|MxoUo(`%ODK7_0r)1+6Q_h;Uf)c>)SfXaPVa*1|N2g^hGWg$0C6-hD8po zHw>i(E}5lFK^C{vQONxzWR)yQ8Fy(J{R&4N4(x}HI`jIATXA`!s3BU^kSJ=87PT*) zjup8s^`h8FG)ddG)UfR1-6Onfl((OWTh6ZJm+_Xewd3&cd%ES$|7KhQ$G;co{x13seeEN~Q4u$(-B>f8;Nn!t7=rKo`IC{~TE(vu!;cw{0?h9;3?G-nVP8x>Lqg+wFeQxHg`Wuy^lrbvshR(Qe2 zruP|;j}l#)89&&>7*CNSsE}$-9rvH04BWTTSpX+eP1g|wrv^ms{&i_&+e1=d|6fNg zfn*hjlRk(2_0iUl46$IU+!7)Gpe$ozBo&%X5o!ibH}qd1=ZCH)x$CSZaN3-$PL#Js z;i&wvSozVFvdTo+wrJV5h5A_8;ncUg8T!JUONZh3HkGP&EJhZmm#debTRzX*PsJ^# zSMrN44XmZ+x_{EWthw2A2dur84g+M5aueXa3rncm>2y%24s>fO`+t|gIVkr-2mW4W>pVct9RNDB-#hy z|FSPpP!}zzOBC#i7VL@_?B&8UPks3Rsj;geB8J+>Q|?94c33L40>0X`RH4jO?`G=R#r(S?nWc1dtD zz!ae)Ef8L5eqe6X1d5ZzYh%dFshTlNFybkxpvr08#uOu(6>Xj6I3YG$zTN0UO%5 zYYL{iiQb+`0wiqihQ>gMI}4sbX5b1y>X6LEDnb^xc{~yDbKHN$ph0wo(Q%{mG&-`% zV3sl)EJ4Tu4y(907ChinOXSl`VL#gUPC8On5@k?5v`gp+P|zW+WMny{UCjf zww;O>t_rH#(PxOM1PrwO_p5F0FZe^y{OqolTVPjF#?mqsrOzElWkww+Wg(N zx7rpi#I|-_0`uc_AJuRFZo^+Se7osKdzM|%y+hIZA@tvJ$CCfGftLp2mJ&(a+_t!% zZ|hy|;7?ESrvkkFeB5$jCBNd6Jxq=>kzEJ>4~T~5J39{+_Y~=VR%Gs}%Sh%azncIH zyw49ir$V2G)=y!>vEz{Cb{d{GK6#qHh?3-)5&*_qzmK1UB0vnY+&QCO}m8U5yGSsOL7E{mMon( zcfh5#I+M*Bjrsz_@pcoFCJ!V&{B0ziuyRu>%0D+($ zLprw`v^*q+)tyHd;$XHXn-;?hm|Fj|CXiB_-1&PnfE}r5YAgkOyC>}=mjdo?DXH`T z<4)4oOM$J1;mvDcOK?=bR;09%-A1FTpGQ3TBZ?Pbe5BE#DEk4LUcLi4h{{80mwd`l zfho1x6w>x2C&^V|xw_MU!z=BlDfWPIeoVXId~HrDI{~Ncm}!^?giBF6wbTkb*ov5H z^-!Fq1&kb}@Z~!#F^r2p+!&R>tM4! z9t$cvg0vU8JQCDY2xP`{2J<#3Uxg>n<)N_TJMAH9F#lk^#bX5#oH=Oiv?|-@p-hN) zdeT`L88#omk8iDILC#&Ihv$gSL8`hVIcbDUi;~EJ_&z`I72Bl4fXG#d$AeIQLb)4~ zT1nqOl);Ep!O=EyHcj|okg&@Oe1bA{qz@v^fhHPdWE6^*m@V=ZVdR^{L{0PySCk~C z?9wEBthFgpjisjWMKw(VgZd#yG7#C1(biNS>;Q-0#2(S@+#sI=sb}!$F{&6eY4J6~Aa^6(o50_1g`TrhU_m zo*_*uB0*lHLy$JpZjNejsg)MWi7lvLth)tG=Cvo`wkjmeXRs*=XLuH$2zxk&PlQK& z3ZJ~_oI__E9m3iYhSG;fzDUa`7t`knhGp1VQRk&uSp%qF9l=1#rSFw z8Bfx6IHYcbvv4S8dr)AQ z3sV?oIbc)^K*B&nyvTq`aAvOXv8%(&=B4L(+wr)`^Ly~bD^`2LS{1ccU3)HSZ6UXpMgiaHO) z%6cwm-6^hpegD<{*FjHm*Jaa6>RVOQoBQ9`ztFZgAFDdVTUoT?3x}{AuMYm%;Pt(? zZ1_zs@l;n?@A8r5HooLU-1_8g>*j>D@|Lxdui6o}?v(5W5204q&}p7nHWrei9pTb5 zaqHRJHQN(4dv4W$3?f$3e_0>5R^Hy)^4<2g+Ar$?Skzh>x7JBjItm}XS~cqPp45=7 zl9p&$OQNhJTGkOOJ8=0hJrh`YaOugVqdYqhw+(_oqQ;3}D-Z}dhlZcgQk3Fk+=3?y;=}G@Y!{r-k;JC6?F|16^k|S zGMp6bS$cjc$X9t1wi8j?iO*Jb82Z`$TA~M^Ew^0xnfO^Re{MW>*2kam^Q9AU>m)@% zdGinS3&Y>fNVFV?wjAJ_JNZ&PS0@E;|G^GWQh#E>$(Odrt@}O@to#zQ&CLs$iI#)W zmV>dD9=`c7UkaZPJNhWKy;D$lIped{VkqK+PX@7y@1tl1=d^dSyIY28bU*1h*x0{W z`)iFgf7V4sg#y00=R4;qm9V z7JM!xvs=7?=Rw_eENK}VdwSkGEq=W(cWjK9N{26-`h&A$W5T@n0%m~xx9~+l zGGU^O8tkfrW@%WMr2h;(bOAUFgPaSeN5_E96?BZ?Bn{^-q7K;`=<%26$a|GPz}Fw5 z12j3qy$4Qm^B8<05>CjEgHRH7V9%i-c5Lh|hG?0RsgTRY$mpPOF~n&Y=1-Wr+CsyS z=34$LgZG7ya9=+DF?s*(t|tt(-*WKXSGWzq_bSQll*Zt=`;69Uuw6T}%D`;_lEZD; zL~i`(Qy=5&-4hzC!FCL8&z|GQC-|ug{P>0F+3D!1nU68v-Rc}j z<6dRpwkX_|Px2>5-W~fGz3#HP5L~s&z-`552tRw4ulLkN&7 z+~DX)H$QxQ`N%t87NI;s6q}F{+%O}!rDO!}s|_ZD<68B725xt4Er!Z#?vEL`-Sug_ z8iW0M_bP+;MfWGAit;gbeXJtV!)`Na#H#NC&47 z=RuB*B62viLu~&~`KRHNyCE(M!IG8Pj!&f76Zlkx4hv4w)GqYt4(>yEg`U8Kc7X#^ ziAM7QWBGuw{R7hwV;VkSaz0=(|ADFdkSX|(sr$#w&6lTQndL7UKhZyJ*4Tf`piAKX E9|Vm=Gynhq literal 0 HcmV?d00001 diff --git a/FitnessSync/backend/src/services/__pycache__/postgresql_manager.cpython-313.pyc b/FitnessSync/backend/src/services/__pycache__/postgresql_manager.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9e09c2bab438a7b9b7d74550f2b261da84678411 GIT binary patch literal 2524 zcmai0&2Jk;6rcUDy>{%hjY5++X}bNQtN>f2jjEJVHHDVcPNO87z#*Zl^?Gb?vffQ+ z)=867!~vxU)C-jm7s>_H8y8Mo_!Al`)M%+CB)H_3)UDvafj8sbCXfRo>&1s?%ZnK3Z1R0B9)5ivO ztG0`Qakz{v$0Vk2FJq%*QpQVDOsVa;b#oCDSSP0%xPd1<&zXQ#r{%dmzT?+T*Q{b< zg=h0g3Ky+Q0PdrCL$EE1m;<{*6Sp-Z}~$Mm?K7>Um!Cki8)V;1oN*HdQEv=XiNW<5C~ z!8b2;Rgsp0OT0HaGkSGYzh=x%kFQ->YTuRKb}r`g(c;hfS|SiV8pNyE z$L=y=ekbgbLA;$@gn2SZ>+M_`XE)HqK&IHK5au?@qM@q62l<*;$9YOYjh~++-U7CK zn!j%QGsLtOhZu4}i4NDymw$YQrNts((4HUz$UGS0-WDjIg zF4{{oL#BYNqpgmf&5q+w`;UD+^3}+LTaSiU9{1nb82{twsjYPH)1$|?j`nW#^>1Yd zxB3QlGl`zmhWuH_^TX(v_WiY=W*<$>KAD=|gx|nN8}ctoUkj8X3qtX7u&m+P`7Hn_ ze#kpk{H_3nE$9*}eplpJX2tJH9LHGkyE4b|qEbk@tSJb3qL?alxN(k^V!DuV6Dpb; zh)$axjL_aZePRzs2y*shg!g91SwThBl-9A9zX49PO$U-k^=nv-icc-As=S#%;;5$M zs8Ev*rnzkvR%jEMcRjaaR~xLgcrGxM(a9S#9z+jtYP8|kbl>zF6jtdEv0bhC)7XMs z=aLq*BimnPdt=yi{Mt?I6WfAyf~CQ@SJeapwT&jUt{_wQ%GjYf=u2=QIR)(CaL6;4 zY{O@VlGBXc6|v!XgYxsq8Me_KZme)i3@8CGg}lnP4n^ODMv=4oKd5o@j5@cEyte-X zR+on3Rf)#9@>S?qfdYaib77DQJ2AM9WI;ev2n!xJpBu{!<0kMExo)nCvL>U3gB)YZ z6Fk$)rJ(O%s)t+b40;~OI(iXD$)To_Zz|`S%7vzqiBdVjhDpN;5ka)m)?$jj)ypu#-O3gJH^l7z$) zPW-P-1-&C3M-}0MR1q|Btsff4vR%fixtF2gGnSjsMA8q31u@I<;Dr;prvtHE3VTr7 ziJj2NjB?2cKMT}@zWlcYi#B}=$Tm22CyqM1o@53$GlP#a>V~)@O39wBuI^9AeoJ@W z8@oI9dEu{(t8MT%Wu5xtBbS9QWDR`@^TV#Ur5~lsa1(B6J0pY%EydcHYt}Kev6Nvz zDmEO3(}r=oVLIW?A;YNHg!&G=>8=Oc67>mT9e}WilLR9pK!R5cp7yq7m_Afgso`U4 z7!MH-Fn0rCk0>X+NweJj1nr9H*e&6K{sOW2*+4pWZf5|+yLZKStfwUbYBAK-!W3wk zAO(+^=~y*fU*(TOklkbC;QvEz-#-r`Gvhk<33#O5en1ZGW3uOozcHkj$*019B92{Z zUy;S4ZT~1kDkHo;luK>TT_*1Xk7X%+9mtL(2tpH`ZlViKbhe4k{ECu5cjHLueVpi9 Pm;aPc3q8*e5U%VWREB!# literal 0 HcmV?d00001 diff --git a/FitnessSync/backend/src/services/fitbit_client.py b/FitnessSync/backend/src/services/fitbit_client.py new file mode 100644 index 0000000..1028ae2 --- /dev/null +++ b/FitnessSync/backend/src/services/fitbit_client.py @@ -0,0 +1,74 @@ +import fitbit +from datetime import datetime, timedelta +from typing import List, Dict, Any, Optional +import logging +from ..utils.helpers import setup_logger + +logger = setup_logger(__name__) + +class FitbitClient: + def __init__(self, client_id: str, client_secret: str, access_token: str = None, refresh_token: str = None): + self.client_id = client_id + self.client_secret = client_secret + self.access_token = access_token + self.refresh_token = refresh_token + self.fitbit_client = None + + if access_token and refresh_token: + self.fitbit_client = fitbit.Fitbit( + client_id=client_id, + client_secret=client_secret, + access_token=access_token, + refresh_token=refresh_token, + # Callback for token refresh if needed + ) + + def get_authorization_url(self, redirect_uri: str) -> str: + """Generate authorization URL for Fitbit OAuth flow.""" + # This would generate the Fitbit authorization URL + auth_url = f"https://www.fitbit.com/oauth2/authorize?response_type=code&client_id={self.client_id}&redirect_uri={redirect_uri}&scope=weight" + logger.info(f"Generated Fitbit authorization URL: {auth_url}") + return auth_url + + def exchange_code_for_token(self, code: str, redirect_uri: str) -> Dict[str, str]: + """Exchange authorization code for access and refresh tokens.""" + # This would exchange the authorization code for tokens + # Implementation would use the Fitbit library to exchange the code + logger.info(f"Exchanging authorization code for tokens") + # Return mock response for now + return { + "access_token": "mock_access_token", + "refresh_token": "mock_refresh_token", + "expires_at": (datetime.now() + timedelta(hours=1)).isoformat() + } + + def get_weight_logs(self, start_date: str, end_date: str = None) -> List[Dict[str, Any]]: + """Fetch weight logs from Fitbit API.""" + if not self.fitbit_client: + raise Exception("Fitbit client not authenticated") + + if not end_date: + end_date = datetime.now().strftime('%Y-%m-%d') + + try: + # Get weight logs from Fitbit + weight_logs = self.fitbit_client.get_bodyweight( + base_date=start_date, + end_date=end_date + ) + + logger.info(f"Fetched {len(weight_logs.get('weight', []))} weight entries from Fitbit") + return weight_logs.get('weight', []) + except Exception as e: + logger.error(f"Error fetching weight logs from Fitbit: {str(e)}") + raise e + + def refresh_access_token(self) -> Dict[str, str]: + """Refresh the Fitbit access token.""" + # Implementation for token refresh + logger.info("Refreshing Fitbit access token") + # Return mock response for now + return { + "access_token": "new_mock_access_token", + "expires_at": (datetime.now() + timedelta(hours=1)).isoformat() + } \ No newline at end of file diff --git a/FitnessSync/backend/src/services/garmin/__init__.py b/FitnessSync/backend/src/services/garmin/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/FitnessSync/backend/src/services/garmin/auth.py b/FitnessSync/backend/src/services/garmin/auth.py new file mode 100644 index 0000000..f165048 --- /dev/null +++ b/FitnessSync/backend/src/services/garmin/auth.py @@ -0,0 +1,217 @@ +import garth +import garminconnect +from datetime import datetime, timedelta +from uuid import uuid4 +import json +import traceback + +from src.utils.helpers import setup_logger +from src.models.api_token import APIToken +from src.services.postgresql_manager import PostgreSQLManager +from src.utils.config import config + +logger = setup_logger(__name__) + +class AuthMixin: + def login(self): + """Login to Garmin Connect with proper token handling.""" + logger.info(f"Starting login process for Garmin user: {self.username}") + try: + logger.debug(f"Attempting garth login for user: {self.username}") + garth.login(self.username, self.password, return_on_mfa=True) + logger.debug(f"Successfully completed garth authentication for: {self.username}") + + logger.debug(f"Creating Garmin Connect client for user: {self.username}") + self.garmin_client = garminconnect.Garmin(self.username, self.password) + self.garmin_client.garth = garth.client + logger.debug(f"Successfully created Garmin Connect client for user: {self.username}") + + self.is_connected = True + logger.info(f"Setting is_connected to True for user: {self.username}") + + self.save_tokens() + logger.info(f"Successfully logged in to Garmin Connect as {self.username}") + except Exception as e: + logger.error(f"Error logging in to Garmin Connect: {str(e)}") + logger.error(f"Exception type: {type(e).__name__}") + + error_str = str(e).lower() + if "mfa" in error_str or "2fa" in error_str or "unauthorized" in error_str: + logger.warning(f"Multi-factor authentication likely required for {self.username}") + logger.debug(f"Detected MFA indicator in error message: {error_str}") + raise Exception("MFA Required: Please provide verification code") + + logger.error(f"Full traceback: {traceback.format_exc()}") + raise e + + def save_tokens(self): + """Save garth tokens to be used later.""" + logger.info(f"Starting token saving process for user: {self.username}") + try: + db_manager = PostgreSQLManager(config.DATABASE_URL) + + with db_manager.get_db_session() as session: + token_record = session.query(APIToken).filter(APIToken.token_type == 'garmin').first() + + if not token_record: + token_record = APIToken(token_type='garmin') + session.add(token_record) + + oauth1_token = getattr(garth.client, 'oauth1_token', None) + oauth2_token = getattr(garth.client, 'oauth2_token', None) + + if oauth1_token: + try: + token_dict = oauth1_token.__dict__ if hasattr(oauth1_token, '__dict__') else str(oauth1_token) + token_record.garth_oauth1_token = json.dumps(token_dict, default=str) + except Exception as e: + logger.warning(f"Could not serialize OAuth1 token for user {self.username}: {e}") + + if oauth2_token: + try: + token_dict = oauth2_token.__dict__ if hasattr(oauth2_token, '__dict__') else str(oauth2_token) + token_record.garth_oauth2_token = json.dumps(token_dict, default=str) + except Exception as e: + logger.warning(f"Could not serialize OAuth2 token for user {self.username}: {e}") + + session.commit() + logger.info(f"Garmin tokens saved successfully for user: {self.username}") + + except Exception as e: + logger.error(f"Error saving garth tokens for user {self.username}: {str(e)}") + raise e + + def load_tokens(self): + """Load garth tokens to resume a session.""" + logger.info(f"Starting token loading process for user: {self.username}") + try: + db_manager = PostgreSQLManager(config.DATABASE_URL) + + with db_manager.get_db_session() as session: + try: + token_record = session.query(APIToken).filter(APIToken.token_type == 'garmin').first() + except Exception as db_error: + logger.info(f"No existing Garmin tokens found for user {self.username} or table doesn't exist: {db_error}") + return False + + if not token_record or (not token_record.garth_oauth1_token and not token_record.garth_oauth2_token): + logger.info(f"No Garmin token record found in database for user: {self.username}") + return False + + if token_record.garth_oauth1_token: + try: + oauth1_data = json.loads(token_record.garth_oauth1_token) + setattr(garth.client, 'oauth1_token', oauth1_data) + logger.info(f"Successfully restored OAuth1 token for user: {self.username}") + except Exception as e: + logger.warning(f"Could not restore OAuth1 token for user {self.username}: {e}") + + if token_record.garth_oauth2_token: + try: + oauth2_data = json.loads(token_record.garth_oauth2_token) + setattr(garth.client, 'oauth2_token', oauth2_data) + logger.info(f"Successfully restored OAuth2 token for user: {self.username}") + + self.garmin_client = garminconnect.Garmin(self.username, self.password) + self.garmin_client.garth = garth.client + self.is_connected = True + logger.debug(f"Successfully created Garmin Connect client for user {self.username} with restored session") + return True + except Exception as e: + logger.warning(f"Could not restore OAuth2 token for user {self.username}: {e}") + + return True + except Exception as e: + logger.error(f"Error loading garth tokens for user {self.username}: {str(e)}") + return False + + def initiate_mfa(self, username: str = None): + """Initiate the MFA process and return session data.""" + user_identifier = username if username else self.username + logger.info(f"Initiating MFA process for Garmin user: {user_identifier}") + mfa_session_id = str(uuid4()) + + db_manager = PostgreSQLManager(config.DATABASE_URL) + + with db_manager.get_db_session() as session: + token_record = session.query(APIToken).filter(APIToken.token_type == 'garmin').first() + + if not token_record: + token_record = APIToken(token_type='garmin') + session.add(token_record) + + token_record.mfa_session_id = mfa_session_id + resume_data = { + 'username': user_identifier, + 'password': self.password, + 'is_china': self.is_china + } + token_record.mfa_resume_data = json.dumps(resume_data) + token_record.mfa_expires_at = datetime.now() + timedelta(minutes=10) + + session.commit() + + logger.info(f"MFA session initiated for user: {user_identifier}, session ID: {mfa_session_id}") + return mfa_session_id + + def handle_mfa(self, verification_code: str, session_id: str = None): + """Handle the MFA process by completing authentication with the verification code.""" + logger.info(f"Starting MFA completion process with session ID: {session_id}") + + db_manager = PostgreSQLManager(config.DATABASE_URL) + + with db_manager.get_db_session() as session: + token_record = session.query(APIToken).filter( + APIToken.token_type == 'garmin', + APIToken.mfa_session_id == session_id + ).first() + + if not token_record: + raise Exception("No pending MFA authentication for this session.") + + if token_record.mfa_expires_at and datetime.now() > token_record.mfa_expires_at: + self.cleanup_mfa_session(token_record, session) + raise Exception("MFA verification code has expired.") + + try: + resume_data = json.loads(token_record.mfa_resume_data) + self.username = resume_data.get('username') + self.password = resume_data.get('password') + + if resume_data.get('is_china', False): + garth.configure(domain="garmin.cn") + + try: + garth.client.mfa_submit(verification_code) + except AttributeError: + garth.login(self.username, self.password, verification_code) + + self.garmin_client = garminconnect.Garmin(self.username, self.password) + self.garmin_client.garth = garth.client + + try: + profile = self.garmin_client.get_full_name() + logger.info(f"Verified authentication for user: {profile}") + except Exception as verify_error: + logger.warning(f"Could not verify authentication for user {self.username}: {verify_error}") + + self.is_connected = True + self.save_tokens() + + self.cleanup_mfa_session(token_record, session) + + logger.info(f"Successfully completed MFA authentication for {self.username}") + return True + + except Exception as e: + logger.error(f"Error during MFA completion for user {self.username}: {e}") + self.cleanup_mfa_session(token_record, session) + raise e + + def cleanup_mfa_session(self, token_record, session): + """Clear out MFA session data from the token record.""" + token_record.mfa_session_id = None + token_record.mfa_resume_data = None + token_record.mfa_expires_at = None + session.commit() + logger.debug("MFA session data cleaned up.") diff --git a/FitnessSync/backend/src/services/garmin/client.py b/FitnessSync/backend/src/services/garmin/client.py new file mode 100644 index 0000000..16101dc --- /dev/null +++ b/FitnessSync/backend/src/services/garmin/client.py @@ -0,0 +1,39 @@ +import garth +from src.utils.helpers import setup_logger +from .auth import AuthMixin +from .data import DataMixin + +logger = setup_logger(__name__) + +class GarminClient(AuthMixin, DataMixin): + def __init__(self, username: str = None, password: str = None, is_china: bool = False): + self.username = username + self.password = password + self.is_china = is_china + self.garmin_client = None + self.is_connected = False + + logger.debug(f"Initializing GarminClient for user: {username}, is_china: {is_china}") + + if is_china: + logger.debug("Configuring garth for China domain") + garth.configure(domain="garmin.cn") + + if username and password: + logger.info(f"Attempting to authenticate Garmin user: {username}") + if not self.load_tokens(): + logger.info("No valid tokens found, attempting fresh login") + self.login() + else: + logger.info("Successfully loaded existing tokens, skipping fresh login") + else: + logger.debug("No username/password provided during initialization") + + def check_connection(self) -> bool: + """Check if the connection to Garmin is still valid.""" + try: + profile = self.garmin_client.get_full_name() if self.garmin_client else None + return profile is not None + except: + self.is_connected = False + return False diff --git a/FitnessSync/backend/src/services/garmin/data.py b/FitnessSync/backend/src/services/garmin/data.py new file mode 100644 index 0000000..2904a9f --- /dev/null +++ b/FitnessSync/backend/src/services/garmin/data.py @@ -0,0 +1,139 @@ +from datetime import datetime +from typing import List, Dict, Any, Optional +from src.utils.helpers import setup_logger + +logger = setup_logger(__name__) + +class DataMixin: + def upload_weight(self, weight: float, unit: str = 'kg', timestamp: datetime = None) -> bool: + """Upload weight entry to Garmin Connect.""" + if not self.is_connected: + raise Exception("Not connected to Garmin Connect") + + try: + if not timestamp: + timestamp = datetime.now() + + try: + result = self.garmin_client.add_body_composition( + timestamp=timestamp, + weight=weight + ) + except Exception: + try: + result = self.garmin_client.add_body_composition( + timestamp=timestamp.isoformat(), + weight=weight + ) + except Exception: + result = self.garmin_client.add_body_composition( + timestamp=timestamp.strftime('%Y-%m-%d'), + weight=weight + ) + + logger.info(f"Successfully uploaded weight: {weight} {unit} at {timestamp}") + return result is not None + except Exception as e: + logger.error(f"Error uploading weight to Garmin: {str(e)}") + if "401" in str(e) or "unauthorized" in str(e).lower(): + logger.error("Authentication failed - need to re-authenticate") + raise Exception("Authentication expired, needs re-authentication") + raise e + + def get_activities(self, start_date: str, end_date: str = None, limit: int = 100) -> List[Dict[str, Any]]: + """Fetch activity list from Garmin Connect.""" + if not self.is_connected: + raise Exception("Not connected to Garmin Connect") + + try: + if not end_date: + end_date = datetime.now().strftime('%Y-%m-%d') + + activities = self.garmin_client.get_activities(start_date, end_date) + logger.info(f"Fetched {len(activities)} activities from Garmin") + return activities + except Exception as e: + logger.error(f"Error fetching activities from Garmin: {str(e)}") + raise e + + def download_activity(self, activity_id: str, file_type: str = 'tcx') -> Optional[bytes]: + """Download activity file from Garmin Connect and return its content.""" + if not self.is_connected: + raise Exception("Not connected to Garmin Connect") + + try: + file_content = self.garmin_client.get_activity_details(activity_id) + logger.info(f"Downloaded activity {activity_id} as {file_type} format") + return file_content if file_content else b"" + except Exception as e: + logger.error(f"Error downloading activity {activity_id} from Garmin: {str(e)}") + raise e + + def get_heart_rates(self, start_date: str, end_date: str = None) -> Dict[str, Any]: + """Fetch heart rate data from Garmin Connect.""" + if not self.is_connected: + raise Exception("Not connected to Garmin Connect") + + try: + if not end_date: + end_date = datetime.now().strftime('%Y-%m-%d') + + heart_rates = self.garmin_client.get_heart_rates(start_date, end_date) + logger.info(f"Fetched heart rate data from Garmin for {start_date} to {end_date}") + return heart_rates + except Exception as e: + logger.error(f"Error fetching heart rate data from Garmin: {str(e)}") + raise e + + def get_sleep_data(self, start_date: str, end_date: str = None) -> Dict[str, Any]: + """Fetch sleep data from Garmin Connect.""" + if not self.is_connected: + raise Exception("Not connected to Garmin Connect") + + try: + if not end_date: + end_date = datetime.now().strftime('%Y-%m-%d') + + sleep_data = self.garmin_client.get_sleep_data(start_date, end_date) + logger.info(f"Fetched sleep data from Garmin for {start_date} to {end_date}") + return sleep_data + except Exception as e: + logger.error(f"Error fetching sleep data from Garmin: {str(e)}") + raise e + + def get_steps_data(self, start_date: str, end_date: str = None) -> Dict[str, Any]: + """Fetch steps data from Garmin Connect.""" + if not self.is_connected: + raise Exception("Not connected to Garmin Connect") + + try: + if not end_date: + end_date = datetime.now().strftime('%Y-%m-%d') + + steps_data = self.garmin_client.get_steps_data(start_date, end_date) + logger.info(f"Fetched steps data from Garmin for {start_date} to {end_date}") + return steps_data + except Exception as e: + logger.error(f"Error fetching steps data from Garmin: {str(e)}") + raise e + + def get_all_metrics(self, start_date: str, end_date: str = None) -> Dict[str, Any]: + """Fetch all available metrics from Garmin Connect.""" + if not self.is_connected: + raise Exception("Not connected to Garmin Connect") + + try: + if not end_date: + end_date = datetime.now().strftime('%Y-%m-%d') + + metrics = { + 'heart_rates': self.get_heart_rates(start_date, end_date), + 'sleep_data': self.get_sleep_data(start_date, end_date), + 'steps_data': self.get_steps_data(start_date, end_date), + } + + logger.info(f"Fetched all metrics from Garmin for {start_date} to {end_date}") + return metrics + except Exception as e: + logger.error(f"Error fetching all metrics from Garmin: {str(e)}") + raise e diff --git a/FitnessSync/backend/src/services/postgresql_manager.py b/FitnessSync/backend/src/services/postgresql_manager.py new file mode 100644 index 0000000..6184252 --- /dev/null +++ b/FitnessSync/backend/src/services/postgresql_manager.py @@ -0,0 +1,47 @@ +from sqlalchemy import create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker +from sqlalchemy.pool import QueuePool +import os +from contextlib import contextmanager + +# Create a base class for declarative models +Base = declarative_base() + +class PostgreSQLManager: + def __init__(self, database_url: str = None): + if database_url is None: + database_url = os.getenv("DATABASE_URL", "postgresql://postgres:password@localhost:5432/fitbit_garmin_sync") + + self.engine = create_engine( + database_url, + poolclass=QueuePool, + pool_size=10, + max_overflow=20, + pool_pre_ping=True, + pool_recycle=300, + ) + self.SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=self.engine) + + def init_db(self): + """Initialize the database by creating all tables.""" + # Import all models to ensure they're registered with the Base + from ..models.config import Configuration + from ..models.api_token import APIToken + from ..models.auth_status import AuthStatus + from ..models.weight_record import WeightRecord + from ..models.activity import Activity + from ..models.health_metric import HealthMetric + from ..models.sync_log import SyncLog + + # Create all tables + Base.metadata.create_all(bind=self.engine) + + @contextmanager + def get_db_session(self): + """Provide a database session.""" + db = self.SessionLocal() + try: + yield db + finally: + db.close() \ No newline at end of file diff --git a/FitnessSync/backend/src/services/sync_app.py b/FitnessSync/backend/src/services/sync_app.py new file mode 100644 index 0000000..25274cb --- /dev/null +++ b/FitnessSync/backend/src/services/sync_app.py @@ -0,0 +1,322 @@ +from ..models.weight_record import WeightRecord +from ..models.sync_log import SyncLog +from ..services.fitbit_client import FitbitClient +from ..services.garmin.client import GarminClient +from sqlalchemy.orm import Session +from datetime import datetime, timedelta +from typing import Dict +import logging +from ..utils.helpers import setup_logger + +logger = setup_logger(__name__) + +class SyncApp: + def __init__(self, db_session: Session, fitbit_client: FitbitClient, garmin_client: GarminClient): + self.db_session = db_session + self.fitbit_client = fitbit_client + self.garmin_client = garmin_client + + def sync_weight_data(self, start_date: str = None, end_date: str = None) -> Dict[str, int]: + """Sync weight data from Fitbit to Garmin.""" + if not start_date: + # Default to 1 year back + start_date = (datetime.now() - timedelta(days=365)).strftime('%Y-%m-%d') + + if not end_date: + end_date = datetime.now().strftime('%Y-%m-%d') + + # Create a sync log entry + sync_log = SyncLog( + operation="weight_sync", + status="started", + start_time=datetime.now(), + records_processed=0, + records_failed=0 + ) + self.db_session.add(sync_log) + self.db_session.commit() + + try: + # Fetch unsynced weight records from Fitbit + fitbit_weights = self.fitbit_client.get_weight_logs(start_date, end_date) + + # Track processing results + processed_count = 0 + failed_count = 0 + + for weight_entry in fitbit_weights: + try: + # Check if this weight entry already exists in our DB (prevents duplicates) + fitbit_id = weight_entry.get('logId', str(weight_entry.get('date', '') + str(weight_entry.get('weight', 0)))) + + existing_record = self.db_session.query(WeightRecord).filter( + WeightRecord.fitbit_id == fitbit_id + ).first() + + if existing_record and existing_record.sync_status == 'synced': + # Skip if already synced + continue + + # Create or update weight record + if not existing_record: + weight_record = WeightRecord( + fitbit_id=fitbit_id, + weight=weight_entry.get('weight'), + unit=weight_entry.get('unit', 'kg'), + date=datetime.fromisoformat(weight_entry.get('date')) if isinstance(weight_entry.get('date'), str) else weight_entry.get('date'), + timestamp=datetime.fromisoformat(weight_entry.get('date')) if isinstance(weight_entry.get('date'), str) else weight_entry.get('date'), + sync_status='unsynced' + ) + self.db_session.add(weight_record) + self.db_session.flush() # Get the ID + else: + weight_record = existing_record + + # Upload to Garmin if not already synced + if weight_record.sync_status != 'synced': + # Upload weight to Garmin + success = self.garmin_client.upload_weight( + weight=weight_record.weight, + unit=weight_record.unit, + timestamp=weight_record.timestamp + ) + + if success: + weight_record.sync_status = 'synced' + weight_record.garmin_id = "garmin_" + fitbit_id # Placeholder for Garmin ID + else: + weight_record.sync_status = 'failed' + failed_count += 1 + + processed_count += 1 + + except Exception as e: + logger.error(f"Error processing weight entry: {str(e)}") + failed_count += 1 + + # Update sync log with results + sync_log.status = "completed" if failed_count == 0 else "completed_with_errors" + sync_log.end_time = datetime.now() + sync_log.records_processed = processed_count + sync_log.records_failed = failed_count + + self.db_session.commit() + + logger.info(f"Weight sync completed: {processed_count} processed, {failed_count} failed") + return { + "processed": processed_count, + "failed": failed_count + } + + except Exception as e: + logger.error(f"Error during weight sync: {str(e)}") + + # Update sync log with error status + sync_log.status = "failed" + sync_log.end_time = datetime.now() + sync_log.message = str(e) + + self.db_session.commit() + raise e + + def sync_activities(self, days_back: int = 30) -> Dict[str, int]: + """Sync activity data from Garmin to local storage.""" + start_date = (datetime.now() - timedelta(days=days_back)).strftime('%Y-%m-%d') + end_date = datetime.now().strftime('%Y-%m-%d') + + # Create a sync log entry + sync_log = SyncLog( + operation="activity_archive", + status="started", + start_time=datetime.now(), + records_processed=0, + records_failed=0 + ) + self.db_session.add(sync_log) + self.db_session.commit() + + try: + # Fetch activities from Garmin + garmin_activities = self.garmin_client.get_activities(start_date, end_date) + + processed_count = 0 + failed_count = 0 + + from ..models.activity import Activity + for activity in garmin_activities: + try: + activity_id = str(activity.get('activityId', '')) + existing_activity = self.db_session.query(Activity).filter( + Activity.garmin_activity_id == activity_id + ).first() + + if existing_activity and existing_activity.download_status == 'downloaded': + # Skip if already downloaded + continue + + # Create or update activity record + if not existing_activity: + activity_record = Activity( + garmin_activity_id=activity_id, + activity_name=activity.get('activityName', ''), + activity_type=activity.get('activityType', ''), + start_time=datetime.fromisoformat(activity.get('startTimeLocal', '')) if activity.get('startTimeLocal') else None, + duration=activity.get('duration', 0), + download_status='pending' + ) + self.db_session.add(activity_record) + self.db_session.flush() + else: + activity_record = existing_activity + + # Download activity file if not already downloaded + if activity_record.download_status != 'downloaded': + # Download in various formats + file_formats = ['tcx', 'gpx', 'fit'] + downloaded_successfully = False + + for fmt in file_formats: + try: + # Get file content from Garmin client + file_content = self.garmin_client.download_activity(activity_id, file_type=fmt) + if file_content: + # Store file content directly in the database + activity_record.file_content = file_content + activity_record.file_type = fmt + activity_record.download_status = 'downloaded' + activity_record.downloaded_at = datetime.now() + downloaded_successfully = True + break + except Exception as e: + logger.warning(f"Could not download activity {activity_id} in {fmt} format: {str(e)}") + continue + + if not downloaded_successfully: + activity_record.download_status = 'failed' + failed_count += 1 + + processed_count += 1 + + except Exception as e: + logger.error(f"Error processing activity {activity.get('activityId', '')}: {str(e)}") + failed_count += 1 + + # Update sync log with results + sync_log.status = "completed" if failed_count == 0 else "completed_with_errors" + sync_log.end_time = datetime.now() + sync_log.records_processed = processed_count + sync_log.records_failed = failed_count + + self.db_session.commit() + + logger.info(f"Activity sync completed: {processed_count} processed, {failed_count} failed") + return { + "processed": processed_count, + "failed": failed_count + } + + except Exception as e: + logger.error(f"Error during activity sync: {str(e)}") + + # Update sync log with error status + sync_log.status = "failed" + sync_log.end_time = datetime.now() + sync_log.message = str(e) + + self.db_session.commit() + raise e + + def sync_health_metrics(self, start_date: str = None, end_date: str = None) -> Dict[str, int]: + """Sync health metrics from Garmin to local database.""" + if not start_date: + # Default to 1 year back + start_date = (datetime.now() - timedelta(days=365)).strftime('%Y-%m-%d') + + if not end_date: + end_date = datetime.now().strftime('%Y-%m-%d') + + # Create a sync log entry + sync_log = SyncLog( + operation="metrics_download", + status="started", + start_time=datetime.now(), + records_processed=0, + records_failed=0 + ) + self.db_session.add(sync_log) + self.db_session.commit() + + try: + # Fetch all metrics from Garmin + all_metrics = self.garmin_client.get_all_metrics(start_date, end_date) + + processed_count = 0 + failed_count = 0 + + from ..models.health_metric import HealthMetric + # Process heart rate data + heart_rates = all_metrics.get('heart_rates', {}) + if 'heartRateValues' in heart_rates: + for hr_data in heart_rates['heartRateValues']: + try: + timestamp = datetime.fromisoformat(hr_data[0]) if isinstance(hr_data[0], str) else datetime.fromtimestamp(hr_data[0]/1000) + metric = HealthMetric( + metric_type='heart_rate', + metric_value=hr_data[1], + unit='bpm', + timestamp=timestamp, + date=timestamp.date(), + source='garmin', + detailed_data=None + ) + self.db_session.add(metric) + processed_count += 1 + except Exception as e: + logger.error(f"Error processing heart rate data: {str(e)}") + failed_count += 1 + + # Process other metrics similarly... + # For brevity, I'll show just one more example + sleep_data = all_metrics.get('sleep_data', {}) + sleep_levels = sleep_data.get('sleep', []) + for sleep_entry in sleep_levels: + try: + metric = HealthMetric( + metric_type='sleep', + metric_value=sleep_entry.get('duration', 0), + unit='minutes', + timestamp=datetime.now(), # Actual timestamp would come from data + date=datetime.now().date(), # Actual date would come from data + source='garmin', + detailed_data=sleep_entry + ) + self.db_session.add(metric) + processed_count += 1 + except Exception as e: + logger.error(f"Error processing sleep data: {str(e)}") + failed_count += 1 + + # Update sync log with results + sync_log.status = "completed" if failed_count == 0 else "completed_with_errors" + sync_log.end_time = datetime.now() + sync_log.records_processed = processed_count + sync_log.records_failed = failed_count + + self.db_session.commit() + + logger.info(f"Health metrics sync completed: {processed_count} processed, {failed_count} failed") + return { + "processed": processed_count, + "failed": failed_count + } + + except Exception as e: + logger.error(f"Error during health metrics sync: {str(e)}") + + # Update sync log with error status + sync_log.status = "failed" + sync_log.end_time = datetime.now() + sync_log.message = str(e) + + self.db_session.commit() + raise e \ No newline at end of file diff --git a/FitnessSync/backend/src/utils/__init__.py b/FitnessSync/backend/src/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/FitnessSync/backend/src/utils/__pycache__/__init__.cpython-313.pyc b/FitnessSync/backend/src/utils/__pycache__/__init__.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..90baf4107bef60094bda2aa28bf33444e6076950 GIT binary patch literal 170 zcmey&%ge<81X_)rnIQTxh=2h`DC08=kTI1Zok5e)ZzV$!6Oi{ABz4PEKO;XkRlm5n zBsH%@KcFZ-D>b>KSl=zPB%~-YIok-t%1bRS4zA2g)=vV8q~@jQ7Z)Y#mzHGa6zj*w qXXa&=#K-FuRNmsS$<0qG%}KQ@Vg;HEvbh+<_{hx2$XLV-WB~y2gDSfK literal 0 HcmV?d00001 diff --git a/FitnessSync/backend/src/utils/__pycache__/config.cpython-313.pyc b/FitnessSync/backend/src/utils/__pycache__/config.cpython-313.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bb44aa072213199cb6d5dc5e4f5623b1e052b1c2 GIT binary patch literal 2143 zcmaJ?&2JM&6rWwMKNBZT?1b>4vVn%g2)q$V)GBHrj$?uo@`2YWAY83B&c<1~*>z^t zpyZTFl^Us+fP`?l5OG26rPuxm9F6t>9YCV$iCa((oGSIr`lBf-9?Ngu%zMB0-oE!{ z<{%snAsE9yWOTn5p}*LoF<|YEgGHDgAQ{QrEW&K&XSo?3^CIItvz{3N3nI!3edrXD zg=0wej`20z(UE^EmJjs53Z=TS};^X3U#PE`Ra>< zUNHOsdj>)HNqz4aSHt2^zAYbM zIRK2eCC1mpv>(FwT4MZipcz-#jmzIsKiKrqamYuoB_`CwbRNQlT4LJd_GVlWH?B5U zzw6Yea~7668knZX?tB;IE!@&qryObEvP|)#(P$UBYy1jAIF|1Li=9kS?#}m&@^bVT zx-H;dIfjoEd*Z#+m$J-Nz4(q5ZEK{qC}Wj1m+1DCXj>w&Ev$)}c}K^VxvrVEct^#$ zx@>49LEER2x#VP0PAf~UXL3ti6kX{@$mDM|b#L&E;tsh$gj6`Zb3F*%Vt9AR{1V@RM)*eCXj~f- zQPC3@w#L3u4IOR+O|;ikTg2K%S;yMS|5r{_p{9_O1P*}Rt-(2+5Zx@cMvaTPI8XgW z%~ow2(-1f;YiW!v9QROxXvQiHH{2@FB!qexWxvj%9H7+pD)CiHG5hPY0&0LWPE=XltH;7bldo@woq=7s2FQuKeo=r41PJk=&1(0p@ z_UK@>Z)hj{Dk|=zUPq5rMqfk+oan&oUa@+5=Gn-zfolJv(|fHNx%S?RV*UF*6p0-G z6#W91Y#$9Mim9$^ib6w*vTm)E4aT=C%0^i=>XJ@HS=BMI4c*jC3&eg!S+NSh@D`yy zNE93!H^bCJAY-7fnx+NmgA+y+1qZ<$X8FPFRl#kHuxlBI8R-B*J-TV*PR4l+>#l}z z7vqIx%Q7(gT-u{3gjp;otSb6)+18lplOV&NF>;&{_8_=~wIlf($dBkx;cNKnqsJcy z9|iXk)eE-MSKhw5d)g7ous;}osz1@I!;{XLR8@%Wu2smrbw^0OJbUi9_^oY{*zk#pNqI}p^lKO)iH$%mQynab+ksuMf!L=rVg z0szsThw=OI%JklKr~CYi?vdx+BTo0&@4e5vFKwrHMjheOzTor5DxZHqFx4`{k`njh z0hp-I-YmfhaH~Pvny;!xumEB=b<|hu!H;rF17Fqp<02jgRd$|90?59H7fK6rS~t?RD0F;5Z~9giQ#k-88XUpaMjTgh~&~o0 z;{$R}Mf4DmipVj&!L3I)m7aP{;;4=mP$M;$-hxtk?whqYP8+1CBkjI7^M2<2eEW7M z5a>X#eqX(!^|=xHgKeDUX)!xBU>+kG$=nnojJl?{s~q7Z#`9CIt32Uto}0o1%iJtV z*bbl4|io{Nir zGCyC&IZMbXrfO;>mGQ~4sTsObw6Ly}R7)rt`OIytsAl=rwT!Uq*nVf7W0~A!XZA|D z9Ij0VAs3nw?J8DiN)WT)OELdLPZODq_5ZGih7RwKj zx9tM5NmQle2z`0gyk=))37~h4MQ3#=xm-&j?j;QAgu&8s+reAD+q+SCTM1jCRs}N7 zfrrHmmGFY0)LhnNqF|KN1f`~`n~Cehm{YSRO^j>i3{kRoK4F{=6_FS8Y~mK2sCq6z zNj6b2wIWRv)MA;{w!C2HR4HegcaWoS2MdtCfL9G|g^x7C18d=dmBCa!EZ44VdPA$; zBOB4)rPE8p%XlSn^1kp|=vWjVh(BC3p6`7~-YMb8lF0}1J6;wS(q)AE(s^+g6j#@a)>;RUS`Ba0UwT&#)Sls#=kqx3uAccy zDQfJOBbf!ol$^vRRiD?0p_f=cpa`0BtEf^*Au5|bW7XKSMk&mJ0}RefRo6$-_)OeQ z*q_1jKx@FW3%}BI%?NzAnKL0v&XmR1e!ct`M-uL%5{M`-z@m2rvW&St+&|5bO zANyNCLg#BCuo6D|yz}Rt7d>K|9(p3Uk literal 0 HcmV?d00001 diff --git a/FitnessSync/backend/src/utils/config.py b/FitnessSync/backend/src/utils/config.py new file mode 100644 index 0000000..d65bbd6 --- /dev/null +++ b/FitnessSync/backend/src/utils/config.py @@ -0,0 +1,37 @@ +import os +from typing import Optional +from dataclasses import dataclass + +@dataclass +class Config: + """Configuration class to store environment variables.""" + + # Database + DATABASE_URL: str = os.getenv("DATABASE_URL", "postgresql://postgres:password@localhost:5432/fitbit_garmin_sync") + + # Fitbit + FITBIT_CLIENT_ID: Optional[str] = os.getenv("FITBIT_CLIENT_ID") + FITBIT_CLIENT_SECRET: Optional[str] = os.getenv("FITBIT_CLIENT_SECRET") + FITBIT_REDIRECT_URI: str = os.getenv("FITBIT_REDIRECT_URI", "http://localhost:8000/api/setup/fitbit/callback") + + # Garmin + GARMIN_USERNAME: Optional[str] = os.getenv("GARMIN_USERNAME") + GARMIN_PASSWORD: Optional[str] = os.getenv("GARMIN_PASSWORD") + + # Server + HOST: str = os.getenv("HOST", "0.0.0.0") + PORT: int = int(os.getenv("PORT", "8000")) + + # App + DEBUG: bool = os.getenv("DEBUG", "False").lower() == "true" + + def validate(self) -> bool: + """Validate that required environment variables are set.""" + required_vars = ['DATABASE_URL'] + for var in required_vars: + if not getattr(self, var): + raise ValueError(f"Missing required environment variable: {var}") + return True + +# Create a global config instance +config = Config() \ No newline at end of file diff --git a/FitnessSync/backend/src/utils/helpers.py b/FitnessSync/backend/src/utils/helpers.py new file mode 100644 index 0000000..53aff98 --- /dev/null +++ b/FitnessSync/backend/src/utils/helpers.py @@ -0,0 +1,36 @@ +import logging +from datetime import datetime +from typing import Optional +import os + +def setup_logger(name: str, level=logging.DEBUG): + """Function to setup a logger that writes to the console.""" + formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s') + + console_handler = logging.StreamHandler() + console_handler.setFormatter(formatter) + + logger = logging.getLogger(name) + logger.setLevel(level) + + if not logger.handlers: + logger.addHandler(console_handler) + + return logger + +def get_current_timestamp() -> str: + """Get current timestamp in ISO format.""" + return datetime.utcnow().isoformat() + +def validate_environment_vars(required_vars: list) -> bool: + """Validate that required environment variables are set.""" + missing_vars = [] + for var in required_vars: + if not os.getenv(var): + missing_vars.append(var) + + if missing_vars: + print(f"Missing required environment variables: {', '.join(missing_vars)}") + return False + + return True \ No newline at end of file diff --git a/FitnessSync/backend/templates/index.html b/FitnessSync/backend/templates/index.html new file mode 100644 index 0000000..937406f --- /dev/null +++ b/FitnessSync/backend/templates/index.html @@ -0,0 +1,308 @@ + + + + Fitbit-Garmin Sync Dashboard + + + + + +
+

Fitbit-Garmin Sync Dashboard

+ +
+
+
+
+
Weight Records
+

Total: 0

+

Synced: 0

+

Unsynced: 0

+
+
+
+
+
+
+
Activities
+

Total: 0

+

Downloaded: 0

+
+
+
+
+
+
+
Sync Status
+
+ + +
+
+
+
+
+ +
+
+

Recent Sync Logs

+
+ + + + + + + + + + + + + + + +
OperationStatusStart TimeRecords ProcessedRecords Failed
Loading logs...
+
+
+
+ +
+
+

Health Metrics

+
+
+
+ + + +
+
+
+
+
+ +
+
+

Activity Files

+
+
+
+ + +
+
+
+
+
+ + +
+ +
+
+

Health Metrics

+
+
+
+ + + +
+
+
+
+
+ +
+
+

Activity Files

+
+
+
+ + +
+
+
+
+
+ + + + + + \ No newline at end of file diff --git a/FitnessSync/backend/templates/setup.html b/FitnessSync/backend/templates/setup.html new file mode 100644 index 0000000..a9ba32d --- /dev/null +++ b/FitnessSync/backend/templates/setup.html @@ -0,0 +1,335 @@ + + + + Fitbit-Garmin Sync - Setup + + + + + +
+

Fitbit-Garmin Sync - Setup

+ + +
+
+
+
+
Current Status
+
+

Loading status...

+
+
+
+
+
+ +
+
+
+
+
Garmin Connect Credentials
+
+
+ + +
+
+ + +
+
+ + +
+ +
+ + +
+

Loading Garmin authentication status...

+
+ + + +
+
+
+ +
+
+
+
Fitbit API Credentials
+
+
+ + +
+
+ + +
+ +
+
+ +
+ + +
+

Loading Fitbit authentication status...

+
+
+
+
+
+ +
+
+
+
+
Complete Fitbit OAuth Flow
+
+
+ + +
+ +
+
+
+
+
+
+ + + + + \ No newline at end of file diff --git a/FitnessSync/docker-compose.yml b/FitnessSync/docker-compose.yml new file mode 100644 index 0000000..9f1c011 --- /dev/null +++ b/FitnessSync/docker-compose.yml @@ -0,0 +1,30 @@ +version: '3.8' + +services: + app: + build: . + ports: + - "8000:8000" + environment: + - DATABASE_URL=postgresql://postgres:password@db:5432/fitbit_garmin_sync + - FITBIT_CLIENT_ID=${FITBIT_CLIENT_ID:-} + - FITBIT_CLIENT_SECRET=${FITBIT_CLIENT_SECRET:-} + - FITBIT_REDIRECT_URI=${FITBIT_REDIRECT_URI:-http://localhost:8000/api/setup/fitbit/callback} + depends_on: + - db + volumes: + - ./logs:/app/logs # For application logs + + db: + image: postgres:15 + environment: + - POSTGRES_DB=fitbit_garmin_sync + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=password + ports: + - "5432:5432" + volumes: + - postgres_data:/var/lib/postgresql/data + +volumes: + postgres_data: \ No newline at end of file diff --git a/FitnessSync/fitbitsync.txt b/FitnessSync/fitbitsync.txt new file mode 100644 index 0000000..5a7b98c --- /dev/null +++ b/FitnessSync/fitbitsync.txt @@ -0,0 +1,1200 @@ +================================================================================ +FILE PATH: fitbit_garmin_sync/requirements.txt +================================================================================ + +fitbit==0.3.1 +garminconnect==0.2.30 +garth==0.5.17 +schedule==1.2.2 +python-consul + + +================================================================================ +FILE PATH: fitbit_garmin_sync/Dockerfile +================================================================================ + +FROM python:3.13-slim +WORKDIR /app +ARG COMMIT_SHA +ENV GIT_SHA=${COMMIT_SHA} +COPY requirements.txt . +RUN pip install --upgrade pip; pip install --no-cache-dir --upgrade -r requirements.txt +COPY . . +VOLUME /app/data +ENTRYPOINT ["python", "fitbitsync.py"] +CMD ["schedule"] + +================================================================================ +FILE PATH: fitbit_garmin_sync/fitbit-garmin-sync.nomad +================================================================================ + +variable "container_version" { + default = "latest" +} +job "fitbit-garmin-sync" { + datacenters = ["dc1"] + type = "service" + group "sync" { + count = 1 + task "fitbit-garmin-sync" { + driver = "docker" + config { + image = "gitea.service.dc1.fbleagh.duckdns.org/sstent/fitbit_garmin_sync:${var.container_version}" + volumes = [ + "/mnt/Public/configs/fitbit-garmin-sync:/app/data" + ] + memory_hard_limit = 2048 + } + env { + CONFIG_SOURCE = "consul" + } + resources { + cpu = 100 # MHz + memory = 128 # MB + } + } + } +} + + +================================================================================ +FILE PATH: fitbit_garmin_sync/fitbitsync.py +================================================================================ + +import base64 +import sys +import asyncio +import json +import logging +from datetime import datetime, timedelta, timezone +from typing import List, Dict, Optional, Tuple +from dataclasses import dataclass, asdict +import hashlib +import time +import webbrowser +from urllib.parse import urlparse, parse_qs +try: + import fitbit + FITBIT_LIBRARY = True +except ImportError: + FITBIT_LIBRARY = False +try: + import garth + GARTH_LIBRARY = True +except ImportError: + GARTH_LIBRARY = False +try: + import garminconnect + GARMINCONNECT_LIBRARY = True +except ImportError: + GARMINCONNECT_LIBRARY = False +try: + import consul + CONSUL_LIBRARY = True +except ImportError: + CONSUL_LIBRARY = False +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + handlers=[logging.StreamHandler()] +) +logger = logging.getLogger(__name__) +@dataclass +class WeightRecord: + """Represents a weight measurement""" + timestamp: datetime + weight_kg: float + source: str = "fitbit" + sync_id: Optional[str] = None + + def __post_init__(self): + if self.sync_id is None: + unique_string = f"{self.timestamp.isoformat()}_{self.weight_kg}" + self.sync_id = hashlib.md5(unique_string.encode()).hexdigest() +class ConsulManager: + """Manages all configuration and state in Consul K/V store""" + + def __init__(self, host: str = "localhost", port: int = 8500, prefix: str = "fitbit-garmin-sync"): + if not CONSUL_LIBRARY: + raise ImportError("python-consul library not installed. Please install it with: pip install python-consul") + + self.client = consul.Consul(host=host, port=port) + self.prefix = prefix.strip('/') + self.config_key = f"{self.prefix}/config" + self.records_prefix = f"{self.prefix}/records/" + self.logs_prefix = f"{self.prefix}/logs/" + + logger.info(f"Using Consul at {host}:{port} with prefix '{self.prefix}'") + + self._ensure_config_exists() + + def _ensure_config_exists(self): + """Ensure configuration exists in Consul with defaults""" + index, data = self.client.kv.get(self.config_key) + + if not data: + logger.info("No configuration found in Consul, creating defaults...") + default_config = { + "fitbit": { + "client_id": "", + "client_secret": "", + "access_token": "", + "refresh_token": "", + "redirect_uri": "http://localhost:8080/fitbit-callback" + }, + "garmin": { + "username": "", + "password": "", + "is_china": False, + "garth_oauth1_token": "", + "garth_oauth2_token": "" + }, + "sync": { + "sync_interval_minutes": 60, + "lookback_days": 7, + "max_retries": 3, + "read_only_mode": False + } + } + self._save_config(default_config) + + def _save_config(self, config: Dict): + """Save configuration to Consul""" + self.client.kv.put(self.config_key, json.dumps(config)) + logger.info("Configuration saved to Consul") + + def get_config(self) -> Dict: + """Get configuration from Consul""" + index, data = self.client.kv.get(self.config_key) + + if not data or not data.get('Value'): + logger.error("No configuration found in Consul") + return {} + + try: + decoded_json_str = data['Value'].decode('utf-8') + except UnicodeDecodeError: + encoded_value = data['Value'] + padding_needed = len(encoded_value) % 4 + if padding_needed != 0: + encoded_value += b'=' * (4 - padding_needed) + decoded_json_str = base64.b64decode(encoded_value).decode('utf-8') + + return json.loads(decoded_json_str) + + def update_config(self, section: str, updates: Dict): + """Update a section of the configuration""" + config = self.get_config() + + if section not in config: + config[section] = {} + + config[section].update(updates) + self._save_config(config) + + def get_config_value(self, path: str, default=None): + """Get a configuration value using dot notation""" + config = self.get_config() + keys = path.split('.') + value = config + + for key in keys: + if isinstance(value, dict): + value = value.get(key, {}) + else: + return default + + return value if value != {} else default + + def save_weight_record(self, record: WeightRecord) -> bool: + """Save weight record to Consul if it doesn't exist""" + key = f"{self.records_prefix}{record.sync_id}" + + try: + index, data = self.client.kv.get(key) + if data is not None: + return False + + record_data = asdict(record) + record_data['timestamp'] = record.timestamp.isoformat() + record_data['synced_to_garmin'] = False + + self.client.kv.put(key, json.dumps(record_data)) + return True + except Exception as e: + logger.error(f"Error saving weight record to Consul: {e}") + return False + + def get_unsynced_records(self) -> List[WeightRecord]: + """Get records from Consul that haven't been synced to Garmin""" + records = [] + + try: + index, keys = self.client.kv.get(self.records_prefix, keys=True) + if not keys: + return [] + + logger.info(f"Scanning {len(keys)} records from Consul to find unsynced items") + + for key in keys: + index, data = self.client.kv.get(key) + if data and data.get('Value'): + try: + record_data = json.loads(data['Value']) + if not record_data.get('synced_to_garmin'): + record = WeightRecord( + sync_id=record_data['sync_id'], + timestamp=datetime.fromisoformat(record_data['timestamp']), + weight_kg=record_data['weight_kg'], + source=record_data['source'] + ) + records.append(record) + except (json.JSONDecodeError, KeyError) as e: + logger.warning(f"Could not parse record from key {key}: {e}") + except Exception as e: + logger.error(f"Error getting unsynced records: {e}") + + records.sort(key=lambda r: r.timestamp, reverse=True) + return records + + def mark_synced(self, sync_id: str) -> bool: + """Mark a record as synced to Garmin""" + key = f"{self.records_prefix}{sync_id}" + + try: + for _ in range(5): + index, data = self.client.kv.get(key) + if data is None: + logger.warning(f"Cannot mark sync_id {sync_id} as synced: record not found") + return False + + record_data = json.loads(data['Value']) + record_data['synced_to_garmin'] = True + + success = self.client.kv.put(key, json.dumps(record_data), cas=data['ModifyIndex']) + if success: + return True + time.sleep(0.1) + + logger.error(f"Failed to mark record {sync_id} as synced after retries") + return False + except Exception as e: + logger.error(f"Error marking record as synced: {e}") + return False + + def log_sync(self, sync_type: str, status: str, message: str = "", records_processed: int = 0): + """Log sync operation to Consul""" + log_entry = { + "sync_type": sync_type, + "status": status, + "message": message, + "records_processed": records_processed, + "timestamp": datetime.now(timezone.utc).isoformat() + } + key = f"{self.logs_prefix}{log_entry['timestamp']}" + + try: + self.client.kv.put(key, json.dumps(log_entry)) + except Exception as e: + logger.error(f"Error logging sync: {e}") + + def reset_sync_status(self) -> int: + """Reset all records to unsynced status""" + affected_rows = 0 + + try: + index, keys = self.client.kv.get(self.records_prefix, keys=True) + if not keys: + return 0 + + logger.info(f"Resetting sync status for {len(keys)} records...") + + for key in keys: + try: + for _ in range(3): + index, data = self.client.kv.get(key) + if data and data.get('Value'): + record_data = json.loads(data['Value']) + if record_data.get('synced_to_garmin'): + record_data['synced_to_garmin'] = False + success = self.client.kv.put(key, json.dumps(record_data), cas=data['ModifyIndex']) + if success: + affected_rows += 1 + break + else: + break + except Exception as e: + logger.warning(f"Failed to reset sync status for key {key}: {e}") + + return affected_rows + except Exception as e: + logger.error(f"Error resetting sync status: {e}") + return 0 + + def get_status_info(self) -> Dict: + """Get status info from Consul""" + status_info = { + "total_records": 0, + "synced_records": 0, + "unsynced_records": 0, + "recent_syncs": [], + "recent_records": [] + } + + try: + index, keys = self.client.kv.get(self.records_prefix, keys=True) + if keys: + status_info['total_records'] = len(keys) + synced_count = 0 + all_records = [] + + for key in keys: + index, data = self.client.kv.get(key) + if data and data.get('Value'): + record_data = json.loads(data['Value']) + all_records.append(record_data) + if record_data.get('synced_to_garmin'): + synced_count += 1 + + status_info['synced_records'] = synced_count + status_info['unsynced_records'] = status_info['total_records'] - synced_count + + all_records.sort(key=lambda r: r.get('timestamp', ''), reverse=True) + for record in all_records[:5]: + status_info['recent_records'].append(( + record['timestamp'], + record['weight_kg'], + record['source'], + record['synced_to_garmin'] + )) + + index, log_keys = self.client.kv.get(self.logs_prefix, keys=True) + if log_keys: + log_keys.sort(reverse=True) + for key in log_keys[:5]: + index, data = self.client.kv.get(key) + if data and data.get('Value'): + log_data = json.loads(data['Value']) + status_info['recent_syncs'].append(( + log_data['timestamp'], + log_data['status'], + log_data['message'], + log_data['records_processed'] + )) + except Exception as e: + logger.error(f"Error getting status info: {e}") + + return status_info +class FitbitClient: + """Client for Fitbit API using python-fitbit""" + + def __init__(self, consul: ConsulManager): + self.consul = consul + self.client = None + + if not FITBIT_LIBRARY: + raise ImportError("python-fitbit library not installed. Install with: pip install fitbit") + + async def authenticate(self) -> bool: + """Authenticate with Fitbit API""" + try: + config = self.consul.get_config() + fitbit_config = config.get('fitbit', {}) + + client_id = fitbit_config.get('client_id') + client_secret = fitbit_config.get('client_secret') + + if not client_id or not client_secret: + logger.info("No Fitbit credentials found in Consul") + if not self._setup_credentials(): + return False + + config = self.consul.get_config() + fitbit_config = config.get('fitbit', {}) + client_id = fitbit_config.get('client_id') + client_secret = fitbit_config.get('client_secret') + + access_token = fitbit_config.get('access_token') + refresh_token = fitbit_config.get('refresh_token') + + if access_token and refresh_token: + try: + self.client = fitbit.Fitbit( + client_id, + client_secret, + access_token=access_token, + refresh_token=refresh_token, + refresh_cb=self._token_refresh_callback + ) + + profile = self.client.user_profile_get() + logger.info(f"Authenticated with existing tokens for: {profile['user']['displayName']}") + return True + except Exception as e: + logger.warning(f"Existing tokens invalid: {e}") + self.consul.update_config('fitbit', {'access_token': '', 'refresh_token': ''}) + + return await self._oauth_flow(client_id, client_secret) + + except Exception as e: + logger.error(f"Fitbit authentication error: {e}") + return False + + def _setup_credentials(self) -> bool: + """Setup Fitbit credentials interactively""" + if not sys.stdout.isatty(): + logger.error("Cannot prompt for credentials in non-interactive environment") + return False + + print("\n🔑 Fitbit API Credentials Setup") + print("=" * 40) + print("To get your Fitbit API credentials:") + print("1. Go to https://dev.fitbit.com/apps") + print("2. Create a new app or use an existing one") + print("3. Copy the Client ID and Client Secret") + print("4. Set OAuth 2.0 Application Type to 'Personal'") + print("5. Set Callback URL to: http://localhost:8080/fitbit-callback") + print() + + client_id = input("Enter your Fitbit Client ID: ").strip() + if not client_id: + print("❌ Client ID cannot be empty") + return False + + import getpass + client_secret = getpass.getpass("Enter your Fitbit Client Secret: ").strip() + if not client_secret: + print("❌ Client Secret cannot be empty") + return False + + self.consul.update_config('fitbit', { + 'client_id': client_id, + 'client_secret': client_secret + }) + + print("✅ Credentials saved to Consul") + return True + + async def _oauth_flow(self, client_id: str, client_secret: str) -> bool: + """Perform OAuth 2.0 authorization flow""" + if not sys.stdout.isatty(): + logger.error("Cannot perform OAuth flow in non-interactive environment") + return False + + try: + config = self.consul.get_config() + redirect_uri = config.get('fitbit', {}).get('redirect_uri') + + from fitbit.api import FitbitOauth2Client + + auth_client = FitbitOauth2Client(client_id, client_secret, redirect_uri=redirect_uri) + auth_url, _ = auth_client.authorize_token_url() + + print("\n🔐 Fitbit OAuth Authorization") + print("=" * 40) + print("Opening your browser for Fitbit authorization...") + print(f"If it doesn't open automatically, visit: {auth_url}") + print("\nAfter authorizing, copy the FULL URL from your browser's address bar.") + print() + + try: + webbrowser.open(auth_url) + except Exception as e: + logger.warning(f"Could not open browser: {e}") + + callback_url = input("After authorization, paste the full callback URL here: ").strip() + + if not callback_url: + print("❌ Callback URL cannot be empty") + return False + + parsed_url = urlparse(callback_url) + query_params = parse_qs(parsed_url.query) + + if 'code' not in query_params: + print("❌ No authorization code found in callback URL") + return False + + auth_code = query_params['code'][0] + token = auth_client.fetch_access_token(auth_code) + + self.consul.update_config('fitbit', { + 'client_id': client_id, + 'client_secret': client_secret, + 'access_token': token['access_token'], + 'refresh_token': token['refresh_token'] + }) + + self.client = fitbit.Fitbit( + client_id, + client_secret, + access_token=token['access_token'], + refresh_token=token['refresh_token'], + refresh_cb=self._token_refresh_callback + ) + + profile = self.client.user_profile_get() + print(f"✅ Successfully authenticated for user: {profile['user']['displayName']}") + logger.info(f"Successfully authenticated for user: {profile['user']['displayName']}") + + return True + + except Exception as e: + logger.error(f"OAuth flow failed: {e}") + print(f"❌ OAuth authentication failed: {e}") + return False + + def _token_refresh_callback(self, token): + """Callback for when tokens are refreshed""" + logger.info("Fitbit tokens refreshed") + config = self.consul.get_config() + fitbit_config = config.get('fitbit', {}) + + self.consul.update_config('fitbit', { + 'client_id': fitbit_config.get('client_id'), + 'client_secret': fitbit_config.get('client_secret'), + 'access_token': token['access_token'], + 'refresh_token': token['refresh_token'] + }) + + async def get_weight_data(self, start_date: datetime, end_date: datetime) -> List[WeightRecord]: + """Fetch weight data from Fitbit API""" + if not self.client: + logger.error("Fitbit client not authenticated") + return [] + + logger.info(f"Fetching weight data from {start_date.date()} to {end_date.date()}") + records = [] + + try: + start_date_str = start_date.strftime("%Y-%m-%d") + end_date_str = end_date.strftime("%Y-%m-%d") + + weight_data = self.client.get_bodyweight( + base_date=start_date_str, + end_date=end_date_str + ) + + weight_entries = None + if weight_data: + if 'weight' in weight_data: + weight_entries = weight_data['weight'] + elif 'body-weight' in weight_data: + weight_entries = weight_data['body-weight'] + + if weight_entries: + logger.info(f"Processing {len(weight_entries)} weight entries") + + for weight_entry in weight_entries: + try: + date_str = weight_entry['date'] + time_str = weight_entry.get('time', '00:00:00') + datetime_str = f"{date_str} {time_str}" + timestamp = datetime.strptime(datetime_str, "%Y-%m-%d %H:%M:%S") + timestamp = timestamp.replace(tzinfo=timezone.utc) + + weight_lbs = float(weight_entry['weight']) + weight_kg = weight_lbs * 0.453592 + + record = WeightRecord( + timestamp=timestamp, + weight_kg=weight_kg, + source="fitbit" + ) + records.append(record) + + logger.info(f"Found weight: {weight_lbs}lbs ({weight_kg:.1f}kg) at {timestamp}") + + except Exception as e: + logger.warning(f"Failed to parse weight entry: {e}") + continue + + logger.info(f"Retrieved {len(records)} weight records from Fitbit") + + except Exception as e: + logger.error(f"Error fetching Fitbit weight data: {e}") + + return records +class GarminClient: + """Client for Garmin Connect using garminconnect library""" + + def __init__(self, consul: ConsulManager): + self.consul = consul + self.garmin_client = None + + try: + import garminconnect + self.garminconnect = garminconnect + except ImportError: + raise ImportError("garminconnect library not installed. Install with: pip install garminconnect") + + async def authenticate(self) -> bool: + """Authenticate with Garmin Connect""" + config = self.consul.get_config() + + if config.get('sync', {}).get('read_only_mode', False): + logger.info("Running in read-only mode - skipping Garmin authentication") + return True + + try: + garmin_config = config.get('garmin', {}) + username = garmin_config.get('username') + password = garmin_config.get('password') + is_china = garmin_config.get('is_china', False) + + if not username or not password: + logger.info("No Garmin credentials found in Consul") + if not self._setup_credentials(): + return False + + config = self.consul.get_config() + garmin_config = config.get('garmin', {}) + username = garmin_config.get('username') + password = garmin_config.get('password') + + if is_china: + garth.configure(domain="garmin.cn") + + tokens_loaded = self._load_garth_tokens() + + if not tokens_loaded: + logger.info("No existing Garmin tokens, performing fresh login...") + garth.login(username, password) + self._save_garth_tokens() + + self.garmin_client = self.garminconnect.Garmin(username, password) + self.garmin_client.garth = garth.client + + profile = self.garmin_client.get_full_name() + logger.info(f"Successfully authenticated with Garmin for: {profile}") + return True + + except Exception as e: + logger.error(f"Garmin authentication error: {e}") + return False + + def _setup_credentials(self) -> bool: + """Setup Garmin credentials interactively""" + if not sys.stdout.isatty(): + logger.error("Cannot prompt for credentials in non-interactive environment") + return False + + print("\n🔑 Garmin Connect Credentials Setup") + print("=" * 40) + + username = input("Enter your Garmin Connect username/email: ").strip() + if not username: + print("❌ Username cannot be empty") + return False + + import getpass + password = getpass.getpass("Enter your Garmin Connect password: ").strip() + if not password: + print("❌ Password cannot be empty") + return False + + self.consul.update_config('garmin', { + 'username': username, + 'password': password + }) + + print("✅ Credentials saved to Consul") + return True + + def _save_garth_tokens(self): + """Save garth tokens to Consul""" + try: + oauth1_token = garth.client.oauth1_token + oauth2_token = garth.client.oauth2_token + + updates = {} + + if oauth1_token: + token_dict = oauth1_token.__dict__ + for k, v in token_dict.items(): + if isinstance(v, datetime): + token_dict[k] = v.isoformat() + updates['garth_oauth1_token'] = json.dumps(token_dict) + logger.info("Saved OAuth1 token to Consul") + + if oauth2_token: + token_dict = oauth2_token.__dict__ + for k, v in token_dict.items(): + if isinstance(v, datetime): + token_dict[k] = v.isoformat() + updates['garth_oauth2_token'] = json.dumps(token_dict) + logger.info("Saved OAuth2 token to Consul") + + if updates: + self.consul.update_config('garmin', updates) + + except Exception as e: + logger.warning(f"Failed to save garth tokens: {e}") + + def _load_garth_tokens(self) -> bool: + """Load garth tokens from Consul""" + try: + config = self.consul.get_config() + garmin_config = config.get('garmin', {}) + + oauth1_json = garmin_config.get('garth_oauth1_token') + oauth2_json = garmin_config.get('garth_oauth2_token') + + if not oauth1_json: + logger.info("No OAuth1 token found in Consul") + return False + + oauth1_token = json.loads(oauth1_json) + oauth2_token = json.loads(oauth2_json) if oauth2_json else None + + garth.client.oauth1_token = oauth1_token + if oauth2_token: + garth.client.oauth2_token = oauth2_token + + logger.info("Successfully loaded Garmin tokens from Consul") + return True + + except Exception as e: + logger.warning(f"Failed to load garth tokens: {e}") + return False + + async def upload_weight_data(self, records: List[WeightRecord]) -> Tuple[int, int]: + """Upload weight records to Garmin""" + config = self.consul.get_config() + read_only_mode = config.get('sync', {}).get('read_only_mode', False) + + if read_only_mode: + logger.info(f"Read-only mode: Would upload {len(records)} weight records") + for record in records: + logger.info(f"Read-only mode: Would upload {record.weight_kg}kg at {record.timestamp}") + return len(records), 0 + + if not self.garmin_client: + logger.error("Garmin client not authenticated") + return 0, len(records) + + success_count = 0 + total_count = len(records) + + for record in records: + try: + success = await self._upload_weight(record) + + if success: + success_count += 1 + logger.info(f"Successfully uploaded: {record.weight_kg}kg at {record.timestamp}") + else: + logger.error(f"Failed to upload: {record.weight_kg}kg at {record.timestamp}") + + await asyncio.sleep(2) + + except Exception as e: + logger.error(f"Error uploading weight record: {e}") + + return success_count, total_count - success_count + + async def _upload_weight(self, record: WeightRecord) -> bool: + """Upload weight using garminconnect library""" + try: + date_str = record.timestamp.strftime("%Y-%m-%d") + logger.info(f"Uploading weight: {record.weight_kg}kg on {date_str}") + + timestamp_str = record.timestamp.isoformat() + + try: + result = self.garmin_client.add_body_composition( + timestamp=record.timestamp, + weight=record.weight_kg + ) + except Exception as e1: + try: + result = self.garmin_client.add_body_composition( + timestamp=timestamp_str, + weight=record.weight_kg + ) + except Exception as e2: + try: + result = self.garmin_client.add_body_composition( + timestamp=date_str, + weight=record.weight_kg + ) + except Exception as e3: + if hasattr(self.garmin_client, 'set_body_composition'): + result = self.garmin_client.set_body_composition( + timestamp=record.timestamp, + weight=record.weight_kg + ) + elif hasattr(self.garmin_client, 'add_weigh_in'): + result = self.garmin_client.add_weigh_in( + weight=record.weight_kg, + date=date_str + ) + else: + raise Exception("No suitable weight upload method found") + + if result: + logger.info("Upload successful") + return True + else: + logger.error("Upload returned no result") + return False + + except Exception as e: + logger.error(f"Upload error: {e}") + + if "401" in str(e) or "unauthorized" in str(e).lower(): + logger.error("Authentication failed - attempting re-authentication") + try: + self.garmin_client.login() + self._save_garth_tokens() + + result = self.garmin_client.add_body_composition( + timestamp=record.timestamp, + weight=record.weight_kg + ) + + if result: + logger.info("Upload successful after re-authentication") + return True + + except Exception as re_auth_error: + logger.error(f"Re-authentication failed: {re_auth_error}") + return False + + elif "429" in str(e) or "rate" in str(e).lower(): + logger.error("Rate limit exceeded - wait 1-2 hours") + return False + + elif "duplicate" in str(e).lower() or "already exists" in str(e).lower(): + logger.warning(f"Weight already exists for {date_str}") + return True + + return False +class WeightSyncApp: + """Main application class""" + + def __init__(self, consul_host: str = "localhost", consul_port: int = 8500, + consul_prefix: str = "fitbit-garmin-sync"): + self.consul = ConsulManager(consul_host, consul_port, consul_prefix) + self.fitbit = FitbitClient(self.consul) + self.garmin = GarminClient(self.consul) + + async def setup(self): + """Setup and authenticate with services""" + logger.info("Setting up Weight Sync Application...") + + if not await self.fitbit.authenticate(): + logger.error("Failed to authenticate with Fitbit") + return False + + if not await self.garmin.authenticate(): + config = self.consul.get_config() + if not config.get('sync', {}).get('read_only_mode', False): + logger.error("Failed to authenticate with Garmin") + return False + + logger.info("Setup completed successfully") + return True + + async def sync_weight_data(self) -> bool: + """Perform weight data synchronization""" + try: + logger.info("Starting weight data sync...") + + config = self.consul.get_config() + read_only_mode = config.get('sync', {}).get('read_only_mode', False) + + if read_only_mode: + logger.info("Running in read-only mode") + + lookback_days = config.get('sync', {}).get('lookback_days', 7) + end_date = datetime.now(timezone.utc) + start_date = end_date - timedelta(days=lookback_days) + + fitbit_records = await self.fitbit.get_weight_data(start_date, end_date) + + new_records = 0 + for record in fitbit_records: + if self.consul.save_weight_record(record): + new_records += 1 + + logger.info(f"Processed {new_records} new weight records") + + unsynced_records = self.consul.get_unsynced_records() + + if not unsynced_records: + logger.info("No unsynced records found") + self.consul.log_sync("weight_sync", "success", "No records to sync", 0) + return True + + success_count, failed_count = await self.garmin.upload_weight_data(unsynced_records) + + synced_count = 0 + for i in range(success_count): + record_to_mark = unsynced_records[i] + if self.consul.mark_synced(record_to_mark.sync_id): + synced_count += 1 + + mode_prefix = "(Read-only) " if read_only_mode else "" + message = f"{mode_prefix}Synced {synced_count} records, {failed_count} failed" + status = "success" if failed_count == 0 else "partial" + self.consul.log_sync("weight_sync", status, message, synced_count) + + logger.info(f"Sync completed: {message}") + return True + + except Exception as e: + error_msg = f"Sync failed: {e}" + logger.error(error_msg) + self.consul.log_sync("weight_sync", "error", error_msg, 0) + return False + + async def force_full_sync(self, days: int = 365): + """Perform full sync with custom lookback period""" + try: + logger.info(f"Starting FULL sync (looking back {days} days)...") + + config = self.consul.get_config() + read_only_mode = config.get('sync', {}).get('read_only_mode', False) + + if read_only_mode: + logger.info("Running in read-only mode") + + end_date = datetime.now(timezone.utc) + start_date = end_date - timedelta(days=days) + + logger.info(f"Fetching Fitbit data from {start_date.date()} to {end_date.date()}") + + fitbit_records = await self.fitbit.get_weight_data(start_date, end_date) + + if not fitbit_records: + logger.warning("No weight records found") + print("❌ No weight records found") + return False + + logger.info(f"Found {len(fitbit_records)} weight records") + print(f"📊 Found {len(fitbit_records)} weight records") + + new_records = 0 + for record in fitbit_records: + if self.consul.save_weight_record(record): + new_records += 1 + + print(f"💾 Found {new_records} new records to sync") + + unsynced_records = self.consul.get_unsynced_records() + + if not unsynced_records: + print("✅ All records are already synced") + return True + + print(f"🔄 Found {len(unsynced_records)} records to sync to Garmin") + + success_count, failed_count = await self.garmin.upload_weight_data(unsynced_records) + + synced_count = 0 + for i in range(success_count): + record_to_mark = unsynced_records[i] + if self.consul.mark_synced(record_to_mark.sync_id): + synced_count += 1 + + mode_prefix = "(Read-only) " if read_only_mode else "" + message = f"{mode_prefix}Full sync: {synced_count} synced, {failed_count} failed" + status = "success" if failed_count == 0 else "partial" + self.consul.log_sync("full_sync", status, message, synced_count) + + print(f"✅ Full sync completed: {synced_count} synced, {failed_count} failed") + return True + + except Exception as e: + error_msg = f"Full sync failed: {e}" + logger.error(error_msg) + self.consul.log_sync("full_sync", "error", error_msg, 0) + print(f"❌ Full sync failed: {e}") + return False + + def reset_sync_status(self): + """Reset all records to unsynced status""" + try: + affected_rows = self.consul.reset_sync_status() + logger.info(f"Reset sync status for {affected_rows} records") + print(f"🔄 Reset sync status for {affected_rows} records") + print(" All records will be synced again on next sync") + return True + except Exception as e: + logger.error(f"Error resetting sync status: {e}") + print(f"❌ Error resetting sync status: {e}") + return False + + async def manual_sync(self): + """Perform manual sync""" + success = await self.sync_weight_data() + if success: + print("✅ Manual sync completed successfully") + else: + print("❌ Manual sync failed - check logs") + + def show_status(self): + """Show application status""" + try: + config = self.consul.get_config() + read_only_mode = config.get('sync', {}).get('read_only_mode', False) + status_info = self.consul.get_status_info() + + print("\n📊 Weight Sync Status") + print("=" * 50) + print(f"Mode: {'Read-only (No Garmin uploads)' if read_only_mode else 'Full sync mode'}") + print(f"Backend: Consul K/V Store") + print(f"Total weight records: {status_info['total_records']}") + print(f"Synced to Garmin: {status_info['synced_records']}") + print(f"Pending sync: {status_info['unsynced_records']}") + + print(f"\n📜 Recent Sync History:") + if status_info['recent_syncs']: + for sync in status_info['recent_syncs']: + status_emoji = "✅" if sync[1] == "success" else "⚠️" if sync[1] == "partial" else "❌" + print(f" {status_emoji} {sync[0]} - {sync[1]} - {sync[2]} ({sync[3]} records)") + else: + print(" No sync history found") + + if status_info['recent_records']: + print(f"\n📈 Recent Weight Records:") + for record in status_info['recent_records']: + sync_status = "✅" if record[3] else "⏳" + timestamp = datetime.fromisoformat(record[0]) + print(f" {sync_status} {timestamp.strftime('%Y-%m-%d %H:%M')}: {record[1]}kg ({record[2]})") + + except Exception as e: + print(f"❌ Error getting status: {e}") + + def toggle_read_only_mode(self): + """Toggle read-only mode""" + config = self.consul.get_config() + current_mode = config.get('sync', {}).get('read_only_mode', False) + new_mode = not current_mode + + self.consul.update_config('sync', {'read_only_mode': new_mode}) + + mode_text = "enabled" if new_mode else "disabled" + print(f"✅ Read-only mode {mode_text}") + print(f" {'Will NOT upload to Garmin' if new_mode else 'Will upload to Garmin'}") + + async def start_scheduler(self): + """Start the sync scheduler""" + config = self.consul.get_config() + sync_interval = config.get('sync', {}).get('sync_interval_minutes', 60) + + logger.info(f"Starting scheduler with {sync_interval} minute interval") + logger.info("Running initial sync...") + + await self.sync_weight_data() + + logger.info(f"Scheduled syncs will run every {sync_interval} minutes") + + while True: + try: + await asyncio.sleep(sync_interval * 60) + logger.info("Running scheduled sync...") + await self.sync_weight_data() + except Exception as e: + logger.error(f"Error in scheduled sync: {e}") + await asyncio.sleep(60) # Wait a minute before retrying +async def main(): + """Main application entry point""" + import os + + consul_host = os.getenv('CONSUL_HOST', 'consul.service.dc1.consul') + consul_port = int(os.getenv('CONSUL_PORT', '8500')) + consul_prefix = os.getenv('CONSUL_PREFIX', 'fitbit-garmin-sync') + + logger.info(f"Connecting to Consul at {consul_host}:{consul_port}") + logger.info(f"Using Consul prefix: {consul_prefix}") + + app = WeightSyncApp(consul_host, consul_port, consul_prefix) + + if len(sys.argv) > 1: + command = sys.argv[1].lower() + + if command == "setup": + success = await app.setup() + if success: + print("✅ Setup completed successfully") + else: + print("❌ Setup failed") + + elif command == "sync": + await app.setup() + await app.manual_sync() + + elif command == "status": + app.show_status() + + elif command == "reset": + app.reset_sync_status() + + elif command == "fullsync": + days = 365 + if len(sys.argv) > 2: + try: + days = int(sys.argv[2]) + except ValueError: + print("❌ Invalid number of days. Using default 365.") + + await app.setup() + await app.force_full_sync(days) + + elif command == "readonly": + app.toggle_read_only_mode() + + elif command == "schedule": + await app.setup() + try: + config = app.consul.get_config() + read_only_mode = config.get('sync', {}).get('read_only_mode', False) + sync_interval = config.get('sync', {}).get('sync_interval_minutes', 60) + print("🚀 Starting scheduled sync...") + print(f"⏰ Sync interval: {sync_interval} minutes") + if read_only_mode: + print("📖 Running in read-only mode") + print("Press Ctrl+C to stop") + await app.start_scheduler() + except KeyboardInterrupt: + print("\n👋 Scheduler stopped") + + else: + print("❓ Unknown command. Available commands:") + print(" setup - Initial setup and authentication") + print(" sync - Run manual sync") + print(" status - Show sync status") + print(" reset - Reset sync status for all records") + print(" fullsync [days] - Full sync with custom lookback (default: 365)") + print(" readonly - Toggle read-only mode") + print(" schedule - Start scheduled sync") + else: + print("🏃 Weight Sync Application (Consul-Only)") + print("Syncs weight data from Fitbit API to Garmin Connect") + print("All state and configuration stored in Consul K/V store") + print("\nRun with 'python fitbitsync.py '") + print("\nAvailable commands:") + print(" setup - Initial setup and authentication") + print(" sync - Run manual sync") + print(" status - Show sync status") + print(" reset - Reset sync status for all records") + print(" fullsync [days] - Full sync with custom lookback") + print(" readonly - Toggle read-only mode") + print(" schedule - Start scheduled sync") + print("\n💡 Tips:") + print(" - All configuration is stored in Consul") + print(" - Set CONSUL_HOST, CONSUL_PORT, CONSUL_PREFIX env vars to override defaults") + print(" - Use 'readonly' to toggle between read-only and full sync mode") + print(" - First run 'setup' to configure API credentials") + + config = app.consul.get_config() + read_only_mode = config.get('sync', {}).get('read_only_mode', False) + if read_only_mode: + print("\n📖 Currently in READ-ONLY mode") + else: + print("\n🔄 Currently in FULL SYNC mode") +if __name__ == "__main__": + asyncio.run(main()) + diff --git a/FitnessSync/requirements.txt b/FitnessSync/requirements.txt new file mode 100644 index 0000000..7392245 --- /dev/null +++ b/FitnessSync/requirements.txt @@ -0,0 +1,17 @@ +fastapi==0.104.1 +uvicorn[standard]==0.24.0 +garminconnect==0.2.30 +garth==0.5.17 +fitbit==0.3.1 +sqlalchemy==2.0.23 +asyncpg==0.29.0 +psycopg2-binary==2.9.9 +jinja2==3.1.2 +python-dotenv==1.0.0 +pydantic==2.1.1 +requests==2.31.0 +httpx==0.25.2 +aiofiles==23.2.1 +pytest==7.4.3 +pytest-asyncio==0.21.1 +alembic==1.13.1 \ No newline at end of file diff --git a/FitnessSync/specs/001-fitbit-garmin-sync/checklists/requirements.md b/FitnessSync/specs/001-fitbit-garmin-sync/checklists/requirements.md new file mode 100644 index 0000000..d6b668b --- /dev/null +++ b/FitnessSync/specs/001-fitbit-garmin-sync/checklists/requirements.md @@ -0,0 +1,34 @@ +# Specification Quality Checklist: Fitbit-Garmin Local Sync + +**Purpose**: Validate specification completeness and quality before proceeding to planning +**Created**: December 22, 2025 +**Feature**: [Link to spec.md](../spec.md) + +## Content Quality + +- [x] No implementation details (languages, frameworks, APIs) +- [x] Focused on user value and business needs +- [x] Written for non-technical stakeholders +- [x] All mandatory sections completed + +## Requirement Completeness + +- [x] No [NEEDS CLARIFICATION] markers remain +- [x] Requirements are testable and unambiguous +- [x] Success criteria are measurable +- [x] Success criteria are technology-agnostic (no implementation details) +- [x] All acceptance scenarios are defined +- [x] Edge cases are identified +- [x] Scope is clearly bounded +- [x] Dependencies and assumptions identified + +## Feature Readiness + +- [x] All functional requirements have clear acceptance criteria +- [x] User scenarios cover primary flows +- [x] Feature meets measurable outcomes defined in Success Criteria +- [x] No implementation details leak into specification + +## Notes + +- All validation items have been checked and the specification is ready for planning. \ No newline at end of file diff --git a/FitnessSync/specs/001-fitbit-garmin-sync/contracts/api-contract.yaml b/FitnessSync/specs/001-fitbit-garmin-sync/contracts/api-contract.yaml new file mode 100644 index 0000000..f4b02c6 --- /dev/null +++ b/FitnessSync/specs/001-fitbit-garmin-sync/contracts/api-contract.yaml @@ -0,0 +1,461 @@ +openapi: 3.0.0 +info: + title: Fitbit-Garmin Sync API + description: API for synchronizing health and fitness data between Fitbit and Garmin Connect platforms + version: 1.0.0 +servers: + - url: http://localhost:8000 + description: Development server + +paths: + /api/status: + get: + summary: Get current sync status + description: Provides JSON data for the status dashboard including sync counts and recent logs + responses: + '200': + description: Current sync status + content: + application/json: + schema: + type: object + properties: + total_weight_records: + type: integer + description: Total number of weight records + synced_weight_records: + type: integer + description: Number of synced weight records + unsynced_weight_records: + type: integer + description: Number of unsynced weight records + total_activities: + type: integer + description: Total number of activities + downloaded_activities: + type: integer + description: Number of downloaded activities + recent_logs: + type: array + items: + $ref: '#/components/schemas/SyncLog' + + /api/logs: + get: + summary: Get sync logs + description: Provides JSON data for the sync logs table + parameters: + - name: limit + in: query + schema: + type: integer + default: 20 + description: Number of log entries to return + - name: offset + in: query + schema: + type: integer + default: 0 + description: Offset for pagination + responses: + '200': + description: Array of sync log entries + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/SyncLog' + + /api/sync/weight: + post: + summary: Trigger weight sync + description: Starts the process of syncing weight data from Fitbit to Garmin + responses: + '200': + description: Weight sync initiated successfully + content: + application/json: + schema: + type: object + properties: + status: + type: string + example: "started" + message: + type: string + example: "Weight sync process started" + job_id: + type: string + example: "weight-sync-12345" + + /api/sync/activities: + post: + summary: Trigger activity sync + description: Starts the process of archiving activities from Garmin to local storage + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + days_back: + type: integer + description: Number of days to look back for activities + example: 30 + responses: + '200': + description: Activity sync initiated successfully + content: + application/json: + schema: + type: object + properties: + status: + type: string + example: "started" + message: + type: string + example: "Activity sync process started" + job_id: + type: string + example: "activity-sync-12345" + + /api/setup/garmin: + post: + summary: Save Garmin credentials + description: Saves Garmin credentials from the setup form + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + username: + type: string + description: Garmin Connect username + password: + type: string + description: Garmin Connect password + responses: + '200': + description: Garmin credentials saved successfully + content: + application/json: + schema: + type: object + properties: + status: + type: string + example: "success" + message: + type: string + example: "Garmin credentials saved" + + /api/setup/fitbit: + post: + summary: Save Fitbit credentials + description: Saves Fitbit credentials and returns the auth URL + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + client_id: + type: string + description: Fitbit API client ID + client_secret: + type: string + description: Fitbit API client secret + responses: + '200': + description: Fitbit credentials saved and auth URL returned + content: + application/json: + schema: + type: object + properties: + status: + type: string + example: "success" + auth_url: + type: string + example: "https://www.fitbit.com/oauth2/authorize?..." + message: + type: string + example: "Fitbit credentials saved, please visit auth_url to authorize" + + /api/setup/fitbit/callback: + post: + summary: Complete Fitbit OAuth flow + description: Completes the Fitbit OAuth flow with the callback URL + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + callback_url: + type: string + description: Full callback URL from the browser after authorizing the Fitbit app + responses: + '200': + description: Fitbit OAuth flow completed successfully + content: + application/json: + schema: + type: object + properties: + status: + type: string + example: "success" + message: + type: string + example: "Fitbit OAuth flow completed successfully" + + /api/metrics/list: + get: + summary: List available metric types + description: Returns a list of available metric types and date ranges + responses: + '200': + description: List of available metric types + content: + application/json: + schema: + type: object + properties: + metric_types: + type: array + items: + type: string + example: ["steps", "heart_rate", "sleep", "calories"] + date_range: + type: object + properties: + start_date: + type: string + format: date + end_date: + type: string + format: date + + /api/metrics/query: + get: + summary: Query health metrics + description: Allows filtering and retrieval of specific metrics by date range, type, or other criteria + parameters: + - name: metric_type + in: query + schema: + type: string + description: Type of metric to retrieve + - name: start_date + in: query + schema: + type: string + format: date + description: Start date for the query + - name: end_date + in: query + schema: + type: string + format: date + description: End date for the query + - name: limit + in: query + schema: + type: integer + default: 100 + description: Number of records to return + responses: + '200': + description: Array of health metrics + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/HealthMetric' + + /api/activities/list: + get: + summary: List available activities + description: Returns metadata for all downloaded/available activities + parameters: + - name: limit + in: query + schema: + type: integer + default: 50 + description: Number of activities to return + - name: offset + in: query + schema: + type: integer + default: 0 + description: Offset for pagination + responses: + '200': + description: Array of activity metadata + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/Activity' + + /api/activities/query: + get: + summary: Query activities + description: Allows advanced filtering of activities by type, date, duration, etc. + parameters: + - name: activity_type + in: query + schema: + type: string + description: Type of activity to filter + - name: start_date + in: query + schema: + type: string + format: date + description: Start date for the query + - name: end_date + in: query + schema: + type: string + format: date + description: End date for the query + - name: download_status + in: query + schema: + type: string + enum: [pending, downloaded, failed] + description: Download status to filter + responses: + '200': + description: Array of activities matching the filter + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/Activity' + + /api/health-data/summary: + get: + summary: Get health data summary + description: Provides aggregated health statistics + parameters: + - name: start_date + in: query + schema: + type: string + format: date + description: Start date for the summary + - name: end_date + in: query + schema: + type: string + format: date + description: End date for the summary + responses: + '200': + description: Aggregated health statistics + content: + application/json: + schema: + type: object + properties: + total_steps: + type: integer + avg_heart_rate: + type: number + format: float + total_sleep_hours: + type: number + format: float + avg_calories: + type: number + format: float + +components: + schemas: + SyncLog: + type: object + properties: + id: + type: integer + operation: + type: string + enum: [weight_sync, activity_archive, metrics_download] + status: + type: string + enum: [started, in_progress, completed, failed] + message: + type: string + start_time: + type: string + format: date-time + end_time: + type: string + format: date-time + records_processed: + type: integer + records_failed: + type: integer + + HealthMetric: + type: object + properties: + id: + type: integer + metric_type: + type: string + metric_value: + type: number + unit: + type: string + timestamp: + type: string + format: date-time + date: + type: string + format: date + source: + type: string + detailed_data: + type: object + + Activity: + type: object + properties: + id: + type: integer + garmin_activity_id: + type: string + activity_name: + type: string + activity_type: + type: string + start_time: + type: string + format: date-time + duration: + type: integer + file_path: + type: string + file_type: + type: string + download_status: + type: string + enum: [pending, downloaded, failed] + downloaded_at: + type: string + format: date-time \ No newline at end of file diff --git a/FitnessSync/specs/001-fitbit-garmin-sync/data-model.md b/FitnessSync/specs/001-fitbit-garmin-sync/data-model.md new file mode 100644 index 0000000..6fdf08d --- /dev/null +++ b/FitnessSync/specs/001-fitbit-garmin-sync/data-model.md @@ -0,0 +1,117 @@ +# Data Model: Fitbit-Garmin Local Sync + +## Overview +This document defines the data models for the Fitbit-Garmin Local Sync application based on the key entities identified in the feature specification. + +## Entity: Configuration +**Description**: Application settings including API credentials, sync settings, and database connection parameters +**Fields**: +- `id` (Integer): Unique identifier for the configuration record +- `fitbit_client_id` (String): Fitbit API client ID +- `fitbit_client_secret` (String): Fitbit API client secret (encrypted) +- `garmin_username` (String): Garmin Connect username +- `garmin_password` (String): Garmin Connect password (encrypted) +- `sync_settings` (JSON): Sync preferences and settings +- `created_at` (DateTime): Timestamp of creation +- `updated_at` (DateTime): Timestamp of last update + +## Entity: Weight Record +**Description**: Individual weight entries with timestamps, values, and sync status with unique identifiers to prevent duplicate processing +**Fields**: +- `id` (Integer): Unique identifier for the weight record +- `fitbit_id` (String): Original Fitbit ID for the weight entry +- `weight` (Float): Weight value in user's preferred units +- `unit` (String): Weight unit (e.g., 'kg', 'lbs') +- `date` (Date): Date of the weight measurement +- `timestamp` (DateTime): Exact timestamp of the measurement +- `sync_status` (String): Sync status ('unsynced', 'synced', 'failed') +- `garmin_id` (String, nullable): ID of the record if synced to Garmin +- `created_at` (DateTime): Timestamp of record creation +- `updated_at` (DateTime): Timestamp of last update + +## Entity: Activity Metadata +**Description**: Information about Garmin activities including download status, file content stored in database, and activity details +**Fields**: +- `id` (Integer): Unique identifier for the activity record +- `garmin_activity_id` (String): Original Garmin ID for the activity +- `activity_name` (String): Name of the activity +- `activity_type` (String): Type of activity (e.g., 'running', 'cycling') +- `start_time` (DateTime): Start time of the activity +- `duration` (Integer): Duration in seconds +- `file_content` (LargeBinary, nullable): Activity file content stored in database (base64 encoded) +- `file_type` (String): File type (.fit, .gpx, .tcx, etc.) +- `download_status` (String): Download status ('pending', 'downloaded', 'failed') +- `downloaded_at` (DateTime, nullable): Timestamp when downloaded +- `created_at` (DateTime): Timestamp of record creation +- `updated_at` (DateTime): Timestamp of last update + +## Entity: Health Metric +**Description**: Comprehensive health data including type, timestamp, values across categories (steps, calories, heart rate, sleep, etc.) +**Fields**: +- `id` (Integer): Unique identifier for the health metric record +- `metric_type` (String): Type of metric (e.g., 'steps', 'heart_rate', 'sleep', 'calories') +- `metric_value` (Float): Value of the metric +- `unit` (String): Unit of measurement +- `timestamp` (DateTime): When the metric was recorded +- `date` (Date): Date of the metric +- `source` (String): Source of the metric ('garmin') +- `detailed_data` (JSON, nullable): Additional details specific to the metric type +- `created_at` (DateTime): Timestamp of record creation +- `updated_at` (DateTime): Timestamp of last update + +## Entity: Sync Log +**Description**: Operation logs with timestamps, status, and results for monitoring and troubleshooting +**Fields**: +- `id` (Integer): Unique identifier for the sync log entry +- `operation` (String): Type of sync operation ('weight_sync', 'activity_archive', 'metrics_download') +- `status` (String): Status of the operation ('started', 'in_progress', 'completed', 'failed') +- `message` (String): Status message or error details +- `start_time` (DateTime): When the operation started +- `end_time` (DateTime, nullable): When the operation completed +- `records_processed` (Integer): Number of records processed +- `records_failed` (Integer): Number of records that failed +- `user_id` (Integer, nullable): Reference to user (if applicable) + +## Entity: API Token +**Description**: OAuth tokens for Fitbit and Garmin with expiration tracking and refresh mechanisms +**Fields**: +- `id` (Integer): Unique identifier for the token record +- `token_type` (String): Type of token ('fitbit', 'garmin') +- `access_token` (String): Access token (encrypted) +- `refresh_token` (String): Refresh token (encrypted) +- `expires_at` (DateTime): When the token expires +- `scopes` (String): OAuth scopes granted +- `last_used` (DateTime): When the token was last used +- `created_at` (DateTime): Timestamp of record creation +- `updated_at` (DateTime): Timestamp of last update + +## Entity: Auth Status +**Description**: Current authentication state for both Fitbit and Garmin, including token expiration times and last login information +**Fields**: +- `id` (Integer): Unique identifier for the auth status record +- `service_type` (String): Type of service ('fitbit', 'garmin') +- `username` (String): Username for the service (masked for security display) +- `authenticated` (Boolean): Whether currently authenticated +- `token_expires_at` (DateTime): When the current token expires +- `last_login` (DateTime): When the last successful login occurred +- `is_china` (Boolean): Whether using garmin.cn domain (Garmin only) +- `last_check` (DateTime): When status was last checked +- `created_at` (DateTime): Timestamp of record creation +- `updated_at` (DateTime): Timestamp of last update + +## Relationships +- Configuration has many API Tokens +- Authentication Status references API Tokens +- Sync Logs reference Configuration +- Weight Records may reference API Tokens for sync operations +- Activity Metadata may reference API Tokens for download operations +- Health Metrics may reference API Tokens for retrieval operations + +## Validation Rules +- Configuration records must have valid API credentials before sync operations +- Weight Records must have unique fitbit_id to prevent duplicates +- Activity Metadata records must have unique garmin_activity_id +- Health Metric records must have valid metric_type from allowed list +- Sync Log records must have valid operation and status values +- API Token records must be refreshed before expiration +- Authentication status must be updated when tokens are refreshed \ No newline at end of file diff --git a/FitnessSync/specs/001-fitbit-garmin-sync/plan.md b/FitnessSync/specs/001-fitbit-garmin-sync/plan.md new file mode 100644 index 0000000..1840b73 --- /dev/null +++ b/FitnessSync/specs/001-fitbit-garmin-sync/plan.md @@ -0,0 +1,103 @@ +# Implementation Plan: Fitbit-Garmin Local Sync + +**Branch**: `001-fitbit-garmin-sync` | **Date**: December 22, 2025 | **Spec**: [spec.md](./spec.md) +**Input**: Feature specification from `/specs/001-fitbit-garmin-sync/spec.md` + +**Note**: This template is filled in by the `/speckit.plan` command. See `.specify/templates/commands/plan.md` for the execution workflow. + +## Summary + +This feature implements a standalone Python application that synchronizes health and fitness data between Fitbit and Garmin Connect platforms. The primary requirements include: 1) Weight data synchronization from Fitbit to Garmin, 2) Activity file archiving from Garmin to local storage, and 3) Comprehensive health metrics download from Garmin to local database. The system uses a web interface with API endpoints for user interaction and operates with local-only data storage for privacy. + +## Technical Context + +**Language/Version**: Python 3.11 +**Primary Dependencies**: FastAPI, uvicorn, garminconnect, garth, fitbit, SQLAlchemy, Jinja2, psycopg2 +**Storage**: PostgreSQL database for all data including configuration, health metrics, activity files, and authentication status information +**Testing**: pytest for unit and integration tests, contract tests for API endpoints +**Target Platform**: Linux server (containerized with Docker) +**Project Type**: Web application (backend API + web UI) +**Performance Goals**: Process 1000 activity files within 2 hours, sync weight data with 95% success rate, API responses under 3 seconds +**Constraints**: All sensitive data stored locally, offline-capable operation, secure storage of OAuth tokens +**Scale/Scope**: Single user system supporting personal health data synchronization + +## Constitution Check + +*GATE: Must pass before Phase 0 research. Re-check after Phase 1 design.* + +Based on the project constitution, this implementation needs to follow library-first principles, exposing functionality via web API. All new features should have tests written before implementation, with integration tests for API contracts and inter-service communication. The system must include structured logging and observability for debugging. + +## Project Structure + +### Documentation (this feature) + +```text +specs/001-fitbit-garmin-sync/ +├── plan.md # This file (/speckit.plan command output) +├── research.md # Phase 0 output (/speckit.plan command) +├── data-model.md # Phase 1 output (/speckit.plan command) +├── quickstart.md # Phase 1 output (/speckit.plan command) +├── contracts/ # Phase 1 output (/speckit.plan command) +└── tasks.md # Phase 2 output (/speckit.tasks command - NOT created by /speckit.plan) +``` + +### Source Code (repository root) + +```text +backend/ +├── main.py +├── src/ +│ ├── models/ +│ │ ├── __init__.py +│ │ ├── config.py +│ │ ├── weight_record.py +│ │ ├── activity.py +│ │ ├── health_metric.py +│ │ ├── sync_log.py +│ │ ├── api_token.py +│ │ └── auth_status.py +│ ├── services/ +│ │ ├── __init__.py +│ │ ├── fitbit_client.py +│ │ ├── garmin_client.py +│ │ ├── postgresql_manager.py +│ │ └── sync_app.py +│ ├── api/ +│ │ ├── __init__.py +│ │ ├── auth.py +│ │ ├── sync.py +│ │ ├── setup.py +│ │ └── metrics.py +│ └── utils/ +│ ├── __init__.py +│ └── helpers.py +├── templates/ +│ ├── index.html +│ └── setup.html +├── static/ +│ ├── css/ +│ └── js/ +├── requirements.txt +├── Dockerfile +└── docker-compose.yml + +tests/ +├── unit/ +│ ├── test_models/ +│ ├── test_services/ +│ └── test_api/ +├── integration/ +│ └── test_sync_flow.py +└── contract/ + └── test_api_contracts.py +``` + +**Structure Decision**: Web application structure selected to support the backend API and web UI requirements from the feature specification. The backend includes models for data representation, services for business logic, and API endpoints for user interaction. + +## Complexity Tracking + +> **Fill ONLY if Constitution Check has violations that must be justified** + +| Violation | Why Needed | Simpler Alternative Rejected Because | +|-----------|------------|-------------------------------------| +| External API dependencies | Required for Fitbit and Garmin integration | Direct DB access insufficient for external services | \ No newline at end of file diff --git a/FitnessSync/specs/001-fitbit-garmin-sync/quickstart.md b/FitnessSync/specs/001-fitbit-garmin-sync/quickstart.md new file mode 100644 index 0000000..d6c1298 --- /dev/null +++ b/FitnessSync/specs/001-fitbit-garmin-sync/quickstart.md @@ -0,0 +1,102 @@ +# Quickstart Guide: Fitbit-Garmin Local Sync + +## Prerequisites + +- Python 3.11+ +- PostgreSQL database +- Docker and Docker Compose (for containerized deployment) +- Fitbit Developer Account (to create an app and get API credentials) +- Garmin Connect Account + +## Setup + +### 1. Clone and Install Dependencies + +```bash +# Clone the repository +git clone +cd fitbit-garmin-sync + +# Create virtual environment +python -m venv venv +source venv/bin/activate # On Windows: venv\Scripts\activate + +# Install dependencies +pip install -r requirements.txt +``` + +### 2. Database Setup + +```bash +# Create PostgreSQL database +createdb fitbit_garmin_sync + +# Update database configuration in application +# The application will handle schema creation automatically +``` + +### 3. Environment Configuration + +Create a `.env` file with the following: + +```env +DATABASE_URL=postgresql://username:password@localhost:5432/fitbit_garmin_sync +FITBIT_CLIENT_ID=your_fitbit_client_id +FITBIT_CLIENT_SECRET=your_fitbit_client_secret +FITBIT_REDIRECT_URI=http://localhost:8000/api/setup/fitbit/callback +``` + +### 4. Run the Application + +```bash +# Using uvicorn directly +uvicorn main:app --host 0.0.0.0 --port 8000 + +# Or using Docker +docker-compose up --build +``` + +## Initial Configuration + +1. Open the application in your browser at `http://localhost:8000` +2. Navigate to the Setup page (`/setup`) +3. Enter your Garmin Connect username and password +4. Enter your Fitbit Client ID and Client Secret +5. Click the authorization link provided to authenticate with Fitbit +6. Copy the full callback URL from your browser after authorizing and paste it into the input field on the setup page + +## Using the Application + +### Sync Weight Data + +1. Go to the home page (`/`) +2. Click the "Sync Weight" button +3. Monitor the sync status in the logs table + +### Archive Activities + +1. Go to the home page (`/`) +2. Click the "Sync Activities" button +3. Enter the number of days back to look for activities +4. Monitor the sync status in the logs table + +### View Health Metrics + +1. Use the API endpoints to query health metrics: + - `/api/metrics/list` - List available metric types + - `/api/metrics/query` - Query specific metrics + - `/api/health-data/summary` - Get aggregated health statistics + +## Docker Deployment + +```bash +# Build and run with Docker Compose +docker-compose up --build + +# The application will be available at http://localhost:8000 +# PostgreSQL database will be automatically set up +``` + +## API Endpoints + +See the full API documentation in the `contracts/api-contract.yaml` file or access the automatic documentation at `/docs` when running the application. \ No newline at end of file diff --git a/FitnessSync/specs/001-fitbit-garmin-sync/research.md b/FitnessSync/specs/001-fitbit-garmin-sync/research.md new file mode 100644 index 0000000..607f0b0 --- /dev/null +++ b/FitnessSync/specs/001-fitbit-garmin-sync/research.md @@ -0,0 +1,58 @@ +# Research: Fitbit-Garmin Local Sync + +## Overview +This document captures research findings for the Fitbit-Garmin Local Sync feature, addressing technology choices, best practices, and integration patterns. + +## Decision: Python 3.11 as primary language +**Rationale**: Python is well-suited for API integrations and web applications. Version 3.11 offers performance improvements and is widely supported by the required libraries (FastAPI, garminconnect, fitbit). +**Alternatives considered**: Node.js/JavaScript, Go, Rust - Python has the most mature ecosystem for health data API integrations. + +## Decision: FastAPI for web framework +**Rationale**: FastAPI provides automatic API documentation (OpenAPI), type validation, asynchronous support, and excellent performance. It's ideal for both the API endpoints and web UI rendering. +**Alternatives considered**: Flask (less modern features), Django (too heavy for this use case), Starlette (requires more manual work). + +## Decision: garminconnect and garth libraries for Garmin integration +**Rationale**: garminconnect is the most actively maintained Python library for Garmin Connect API. garth handles authentication, including the complex authentication flow for Garmin's API. +**Alternatives considered**: Custom HTTP requests implementation (more error-prone), selenium for web scraping (against ToS and less reliable). + +## Decision: Fitbit official Python library +**Rationale**: The official fitbit library provides proper OAuth 2.0 handling and is maintained by Fitbit. It includes all necessary endpoints for weight data retrieval. +**Alternatives considered**: Direct API calls with requests library (would require more OAuth management code). + +## Decision: PostgreSQL for data storage +**Rationale**: PostgreSQL provides ACID compliance, robustness, and complex query capabilities needed for health metrics. It supports the data types needed for timestamps and metric values. +**Alternatives considered**: SQLite (simpler but less scalable), MongoDB (document-based which may not suit structured health data), MySQL (similar capabilities but PostgreSQL has better JSON support). + +## Decision: SQLAlchemy as ORM +**Rationale**: SQLAlchemy provides database abstraction, migration support, and protection against SQL injection. It works well with FastAPI and supports asynchronous operations. +**Alternatives considered**: Peewee (simpler but less feature-rich), Django ORM (requires Django framework), direct database connectors (more error-prone). + +## Decision: Docker for deployment +**Rationale**: Docker provides consistent deployment across environments, easy dependency management, and isolation. It's the standard for modern application deployment. +**Alternatives considered**: Direct installation on host system (harder to manage dependencies), virtual environments (doesn't solve system-level dependency issues). + +## Decision: Jinja2 for templating +**Rationale**: Jinja2 is the standard Python templating engine, supported by FastAPI. It provides the right balance of functionality and simplicity for the web interface. +**Alternatives considered**: Mako, Chameleon (less common), building HTML responses directly (not maintainable). + +## Authentication Research +- **Fitbit OAuth 2.0**: Requires app registration with Fitbit, supports refresh tokens for long-term access +- **Garmin authentication**: Uses garth library to handle OAuth 1.0a/2.0 hybrid, stores session tokens for reuse +- **Multi-Factor Authentication (MFA)**: Garmin may require MFA for accounts with enhanced security. The garth library handles MFA flows by prompting for verification codes when required +- **Security**: Both systems support proper token refresh and secure storage + +## API Rate Limiting Considerations +- **Fitbit**: Has rate limits (150 req/hour for user endpoints) - need to implement backoff/retry logic +- **Garmin**: No official rate limits published, but need to be respectful to avoid being blocked +- **Best practice**: Implement exponential backoff and caching to minimize API calls + +## Data Synchronization Strategy +- **Deduplication**: Use unique identifiers and timestamps to prevent duplicate processing +- **State tracking**: Store sync status in database to enable resumption of interrupted operations +- **Conflict resolution**: For weight data, prefer Fitbit as source of truth since the feature is to sync FROM Fitbit TO Garmin + +## Error Handling Approach +- **Network errors**: Retry with exponential backoff +- **Authentication errors**: Detect and re-authenticate automatically +- **API errors**: Log with context and allow user to retry operations +- **Storage errors**: Validate disk space before downloading activity files \ No newline at end of file diff --git a/FitnessSync/specs/001-fitbit-garmin-sync/spec.md b/FitnessSync/specs/001-fitbit-garmin-sync/spec.md new file mode 100644 index 0000000..11db2dc --- /dev/null +++ b/FitnessSync/specs/001-fitbit-garmin-sync/spec.md @@ -0,0 +1,105 @@ +# Feature Specification: Fitbit-Garmin Local Sync + +**Feature Branch**: `001-fitbit-garmin-sync` +**Created**: December 22, 2025 +**Status**: Draft +**Input**: User description: "Fitbit-Garmin Local Sync application to synchronize health and fitness data between Fitbit and Garmin Connect platforms" + +## User Scenarios & Testing *(mandatory)* + +### User Story 1 - Sync Weight Data from Fitbit to Garmin (Priority: P1) + +A user with both Fitbit and Garmin devices wants to transfer their weight data from Fitbit to Garmin in a secure, automated way. The user accesses the web interface, triggers the sync process, and the system safely transfers their weight history from Fitbit to Garmin without duplicating entries. + +**Why this priority**: This is the foundational functionality that provides immediate value to users with both platforms, solving a common data silo problem. + +**Independent Test**: Can be fully tested by configuring Fitbit and Garmin credentials, triggering a weight sync, and verifying weight entries appear in Garmin without duplication. Delivers core value of unified health data. + +**Acceptance Scenarios**: + +1. **Given** user has configured Fitbit and Garmin credentials, **When** user clicks "Sync Weight" button, **Then** system fetches weight history from Fitbit and uploads to Garmin, tracking synced entries to prevent duplicates +2. **Given** user has previously synced weight data, **When** user runs sync again, **Then** system only uploads new weight entries that haven't been synced before + +--- + +### User Story 2 - Archive Activity Files from Garmin (Priority: P2) + +A user wants to preserve their historical activity data from Garmin by downloading original files (.fit, .gpx, .tcx) and storing them in the database. The user accesses the web interface, triggers the activity archiving process, and the system downloads activity files and stores them in the PostgreSQL database with proper organization. The system handles multi-factor authentication flows required by Garmin. + +**Why this priority**: Provides backup and archival capabilities that users value for data preservation and analysis, building on the core sync functionality. + +**Independent Test**: Can be tested by triggering the activity archiving process and verifying original activity files are downloaded and stored in the database with proper organization. + +**Acceptance Scenarios**: + +1. **Given** user has configured Garmin credentials, **When** user clicks "Archive Activities" button, **Then** system fetches activity list and downloads original files stored in database +2. **Given** user has previously downloaded activity files, **When** user runs archiving again, **Then** system only downloads activities that haven't been downloaded before + +--- + +### User Story 3 - Download Comprehensive Health Metrics from Garmin (Priority: P3) + +A user wants to store all available health metrics from Garmin for local analysis and backup. The user accesses the web interface, triggers the metrics download process, and the system retrieves a comprehensive range of health metrics (steps, calories, heart rate, sleep, etc.) and stores them in a local database. + +**Why this priority**: Provides complete health data backup and analysis capabilities, extending beyond basic sync to comprehensive data management. + +**Independent Test**: Can be tested by triggering the metrics download and verifying that various types of health metrics are stored in the database with proper timestamps and types. + +**Acceptance Scenarios**: + +1. **Given** user has configured Garmin credentials, **When** user triggers health metrics download, **Then** system retrieves and stores comprehensive health metrics in local database +2. **Given** user wants to query health metrics, **When** user makes API call to metrics endpoint, **Then** system returns requested metrics filtered by date range and type + +--- + +### Edge Cases + +- What happens when API rate limits are exceeded during sync operations? +- How does system handle authentication token expiration during long-running sync processes? +- What occurs when local storage is insufficient for activity file downloads? +- How does system handle duplicate data detection when timestamps are slightly different? + +## Requirements *(mandatory)* + +### Functional Requirements + +- **FR-001**: System MUST authenticate with both Fitbit (OAuth 2.0) and Garmin Connect (username/password with garth library) APIs +- **FR-002**: System MUST securely store API credentials, tokens, and configuration in a PostgreSQL database +- **FR-003**: System MUST synchronize weight data from Fitbit to Garmin, including historical records +- **FR-004**: System MUST track synced entries to prevent duplicate uploads +- **FR-005**: System MUST download original activity files (.fit, .gpx, .tcx) from Garmin and store them in the PostgreSQL database +- **FR-006**: System MUST download comprehensive health metrics from Garmin including daily summaries, heart rate, sleep data, stress levels, and body composition +- **FR-007**: System MUST provide a web-based user interface with status dashboard and sync controls +- **FR-008**: System MUST provide API endpoints for triggering sync operations and querying data +- **FR-009**: System MUST implement robust error handling and retry mechanisms for API failures +- **FR-010**: System MUST support both global (garmin.com) and China (garmin.cn) Garmin domains +- **FR-011**: System MUST automatically refresh OAuth tokens when they expire +- **FR-012**: System MUST log all sync operations with timestamps, status, and results +- **FR-013**: System MUST allow users to trigger sync operations through UI buttons +- **FR-014**: System MUST support querying health metrics by date range, type, and other criteria +- **FR-015**: System MUST implement ACID-compliant data storage for consistency and integrity +- **FR-016**: System MUST handle multi-factor authentication flows for Garmin Connect when required by user's account settings +- **FR-017**: System MUST provide current authentication status and token expiration information in the web UI +- **FR-018**: System MUST securely store and manage Garmin OAuth tokens and session information + +### Key Entities + +- **Configuration**: Application settings including API credentials, sync settings, and database connection parameters +- **Weight Record**: Individual weight entries with timestamps, values, and sync status with unique identifiers to prevent duplicate processing +- **Activity Metadata**: Information about Garmin activities including download status, file content stored in database, and activity details +- **Health Metric**: Comprehensive health data including type, timestamp, values across categories (steps, calories, heart rate, sleep, etc.) +- **Sync Log**: Operation logs with timestamps, status, and results for monitoring and troubleshooting +- **API Token**: OAuth tokens for Fitbit and Garmin with expiration tracking and refresh mechanisms +- **Authentication Status**: Current authentication state for both Fitbit and Garmin, including token expiration times and last login information + +## Success Criteria *(mandatory)* + +### Measurable Outcomes + +- **SC-001**: Users can successfully sync weight data from Fitbit to Garmin with 95% success rate for valid entries +- **SC-002**: System can download and archive 1000 activity files without errors within 2 hours +- **SC-003**: System can retrieve and store comprehensive health metrics for a full year of data within 4 hours +- **SC-004**: All sensitive data (credentials, tokens, health stats) remains stored locally without external cloud services +- **SC-005**: User can complete setup and authentication for both Fitbit and Garmin within 10 minutes +- **SC-006**: System prevents duplicate data uploads/downloads with 99.9% accuracy +- **SC-007**: Web interface loads and responds to user actions within 3 seconds under normal conditions \ No newline at end of file diff --git a/FitnessSync/specs/001-fitbit-garmin-sync/tasks.md b/FitnessSync/specs/001-fitbit-garmin-sync/tasks.md new file mode 100644 index 0000000..fc42893 --- /dev/null +++ b/FitnessSync/specs/001-fitbit-garmin-sync/tasks.md @@ -0,0 +1,256 @@ +--- +description: "Task list for Fitbit-Garmin Local Sync implementation" +--- + +# Tasks: Fitbit-Garmin Local Sync + +**Input**: Design documents from `/specs/001-fitbit-garmin-sync/` +**Prerequisites**: plan.md (required), spec.md (required for user stories), research.md, data-model.md, contracts/ + +**Tests**: The examples below include test tasks. Tests are OPTIONAL - only include them if explicitly requested in the feature specification. + +**Organization**: Tasks are grouped by user story to enable independent implementation and testing of each story. + +## Format: `[ID] [P?] [Story] Description` + +- **[P]**: Can run in parallel (different files, no dependencies) +- **[Story]**: Which user story this task belongs to (e.g., US1, US2, US3) +- Include exact file paths in descriptions + +## Path Conventions + +- **Single project**: `src/`, `tests/` at repository root +- **Web app**: `backend/src/`, `frontend/src/` +- **Mobile**: `api/src/`, `ios/src/` or `android/src/` +- Paths shown below assume single project - adjust based on plan.md structure + +## Phase 1: Setup (Shared Infrastructure) + +**Purpose**: Project initialization and basic structure + +- [x] T001 Create project structure per implementation plan in backend/ +- [x] T002 Initialize Python 3.11 project with FastAPI, SQLAlchemy, garminconnect, garth, fitbit dependencies in requirements.txt +- [x] T003 [P] Configure linting and formatting tools (black, flake8) in pyproject.toml + +--- + +## Phase 2: Foundational (Blocking Prerequisites) + +**Purpose**: Core infrastructure that MUST be complete before ANY user story can be implemented + +**⚠️ CRITICAL**: No user story work can begin until this phase is complete + +- [x] T004 Setup database schema and migrations framework using SQLAlchemy in backend/src/models/ +- [x] T005 [P] Create base models (Configuration, API Token) in backend/src/models/config.py and backend/src/models/api_token.py +- [x] T006 [P] Setup API routing and middleware structure in backend/main.py +- [x] T007 Create database manager for PostgreSQL in backend/src/services/postgresql_manager.py +- [x] T008 Configure error handling and logging infrastructure in backend/src/utils/ +- [x] T009 Setup environment configuration management in backend/src/utils/ +- [x] T010 Create FastAPI app structure with proper routing in backend/main.py +- [x] T011 Setup Docker configuration with PostgreSQL in backend/Dockerfile and backend/docker-compose.yml + +**Checkpoint**: Foundation ready - user story implementation can now begin in parallel + +--- + +## Phase 3: User Story 1 - Sync Weight Data from Fitbit to Garmin (Priority: P1) 🎯 MVP + +**Goal**: User can transfer weight data from Fitbit to Garmin without duplicating entries, with a web interface to trigger the sync. + +**Independent Test**: Can be fully tested by configuring Fitbit and Garmin credentials, triggering a weight sync, and verifying weight entries appear in Garmin without duplication. Delivers core value of unified health data. + +### Tests for User Story 1 (OPTIONAL - only if tests requested) ⚠️ + +> **NOTE: Write these tests FIRST, ensure they FAIL before implementation** + +- [ ] T012 [P] [US1] Contract test for /api/sync/weight endpoint in backend/tests/contract/test_weight_sync.py +- [ ] T013 [P] [US1] Integration test for weight sync flow in backend/tests/integration/test_weight_sync_flow.py + +### Implementation for User Story 1 + +- [x] T014 [P] [US1] Create Weight Record model in backend/src/models/weight_record.py +- [x] T015 [P] [US1] Create Sync Log model in backend/src/models/sync_log.py +- [x] T016 [US1] Implement Fitbit Client service in backend/src/services/fitbit_client.py +- [x] T017 [US1] Implement Garmin Client service in backend/src/services/garmin_client.py +- [x] T018 [US1] Implement weight sync logic in backend/src/services/sync_app.py +- [x] T019 [US1] Implement weight sync API endpoint in backend/src/api/sync.py +- [x] T020 [US1] Create status API endpoint in backend/src/api/status.py +- [x] T021 [US1] Add web UI for weight sync in backend/templates/index.html +- [x] T022 [US1] Add logging for weight sync operations in backend/src/utils/helpers.py + +**Checkpoint**: At this point, User Story 1 should be fully functional and testable independently + +--- + +## Phase 4: User Story 2 - Archive Activity Files from Garmin (Priority: P2) + +**Goal**: User can download and archive Garmin activity files (.fit, .gpx, .tcx) to a local directory structure with proper organization. + +**Independent Test**: Can be tested by triggering the activity archiving process and verifying original activity files are downloaded to the specified local directory with proper organization. + +### Tests for User Story 2 (OPTIONAL - only if tests requested) ⚠️ + +- [ ] T023 [P] [US2] Contract test for /api/sync/activities endpoint in backend/tests/contract/test_activities_sync.py +- [ ] T024 [P] [US2] Integration test for activity archiving flow in backend/tests/integration/test_activity_flow.py + +### Implementation for User Story 2 + +- [x] T025 [P] [US2] Create Activity Metadata model in backend/src/models/activity.py +- [x] T026 [US2] Extend Garmin Client service with activity download methods in backend/src/services/garmin_client.py +- [x] T027 [US2] Implement activity archiving logic in backend/src/services/sync_app.py +- [x] T028 [US2] Implement activity sync API endpoint in backend/src/api/sync.py +- [x] T029 [US2] Add activity list endpoint in backend/src/api/activities.py +- [x] T030 [US2] Update web UI to include activity archiving in backend/templates/index.html +- [x] T031 [US2] Add logging for activity archiving operations in backend/src/utils/helpers.py + +**Checkpoint**: At this point, User Stories 1 AND 2 should both work independently + +--- + +## Phase 5: User Story 3 - Download Comprehensive Health Metrics from Garmin (Priority: P3) + +**Goal**: User can download and store comprehensive health metrics from Garmin in a local database with API endpoints for querying. + +**Independent Test**: Can be tested by triggering the metrics download and verifying that various types of health metrics are stored in the database with proper timestamps and types. + +### Tests for User Story 3 (OPTIONAL - only if tests requested) ⚠️ + +- [ ] T032 [P] [US3] Contract test for /api/metrics endpoints in backend/tests/contract/test_metrics_api.py +- [ ] T033 [P] [US3] Integration test for health metrics download flow in backend/tests/integration/test_metrics_flow.py + +### Implementation for User Story 3 + +- [x] T034 [P] [US3] Create Health Metric model in backend/src/models/health_metric.py +- [x] T035 [US3] Extend Garmin Client service with comprehensive metrics download methods in backend/src/services/garmin_client.py +- [x] T036 [US3] Implement health metrics download logic in backend/src/services/sync_app.py +- [x] T037 [US3] Implement metrics list and query endpoints in backend/src/api/metrics.py +- [x] T038 [US3] Implement health data summary endpoint in backend/src/api/metrics.py +- [x] T039 [US3] Add UI elements for metrics management in backend/templates/ +- [x] T040 [US3] Add logging for metrics download operations in backend/src/utils/helpers.py + +**Checkpoint**: All user stories should now be independently functional + +--- + +## Phase 6: Setup API Endpoints for Configuration (Cross-cutting) + +**Goal**: Implement configuration and authentication endpoints that support all user stories + +- [x] T041 [P] Create setup API endpoints in backend/src/api/setup.py +- [x] T042 [P] Implement Garmin credentials save endpoint in backend/src/api/setup.py +- [x] T043 [P] Implement Fitbit credentials save and auth URL endpoint in backend/src/api/setup.py +- [x] T044 [P] Implement Fitbit OAuth callback endpoint in backend/src/api/setup.py +- [x] T045 Create logs endpoint in backend/src/api/logs.py +- [x] T046 Update UI with setup page in backend/templates/setup.html +- [x] T047 [P] Create auth status model in backend/src/models/auth_status.py +- [x] T048 [P] Create auth status API endpoint in backend/src/api/setup.py + +--- + +## Phase 7: Polish & Cross-Cutting Concerns + +**Purpose**: Improvements that affect multiple user stories + +- [x] T047 [P] Documentation updates in backend/README.md +- [ ] T048 Code cleanup and refactoring +- [ ] T049 Performance optimization across all stories +- [ ] T050 [P] Additional unit tests (if requested) in backend/tests/unit/ +- [ ] T051 Security hardening for OAuth token storage and API access +- [ ] T052 Run quickstart.md validation +- [ ] T053 Final integration testing across all features +- [x] T054 Update Docker configuration with all required services +- [x] T055 Create setup guide in backend/docs/setup.md + +--- + +## Dependencies & Execution Order + +### Phase Dependencies + +- **Setup (Phase 1)**: No dependencies - can start immediately +- **Foundational (Phase 2)**: Depends on Setup completion - BLOCKS all user stories +- **User Stories (Phase 3+)**: All depend on Foundational phase completion + - User stories can then proceed in parallel (if staffed) + - Or sequentially in priority order (P1 → P2 → P3) +- **Configuration Phase (Phase 6)**: Can proceed in parallel with user stories but may be needed first +- **Polish (Final Phase)**: Depends on all desired user stories being complete + +### User Story Dependencies + +- **User Story 1 (P1)**: Can start after Foundational (Phase 2) - No dependencies on other stories +- **User Story 2 (P2)**: Can start after Foundational (Phase 2) - May integrate with US1 but should be independently testable +- **User Story 3 (P3)**: Can start after Foundational (Phase 2) - May integrate with US1/US2 but should be independently testable + +### Within Each User Story + +- Tests (if included) MUST be written and FAIL before implementation +- Models before services +- Services before endpoints +- Core implementation before integration +- Story complete before moving to next priority + +### Parallel Opportunities + +- All Setup tasks marked [P] can run in parallel +- All Foundational tasks marked [P] can run in parallel (within Phase 2) +- Once Foundational phase completes, all user stories can start in parallel (if team capacity allows) +- All tests for a user story marked [P] can run in parallel +- Models within a story marked [P] can run in parallel +- Different user stories can be worked on in parallel by different team members + +--- + +## Parallel Example: User Story 1 + +```bash +# Launch all tests for User Story 1 together (if tests requested): +Task: "Contract test for /api/sync/weight endpoint in backend/tests/contract/test_weight_sync.py" +Task: "Integration test for weight sync flow in backend/tests/integration/test_weight_sync_flow.py" + +# Launch all models for User Story 1 together: +Task: "Create Weight Record model in backend/src/models/weight_record.py" +Task: "Create Sync Log model in backend/src/models/sync_log.py" +``` + +--- + +## Implementation Strategy + +### MVP First (User Story 1 Only) + +1. Complete Phase 1: Setup +2. Complete Phase 2: Foundational (CRITICAL - blocks all stories) +3. Complete Phase 3: User Story 1 +4. **STOP and VALIDATE**: Test User Story 1 independently +5. Deploy/demo if ready + +### Incremental Delivery + +1. Complete Setup + Foundational → Foundation ready +2. Add User Story 1 → Test independently → Deploy/Demo (MVP!) +3. Add User Story 2 → Test independently → Deploy/Demo +4. Add User Story 3 → Test independently → Deploy/Demo +5. Each story adds value without breaking previous stories + +### Parallel Team Strategy + +With multiple developers: + +1. Team completes Setup + Foundational together +2. Once Foundational is done: + - Developer A: User Story 1 + - Developer B: User Story 2 + - Developer C: User Story 3 +3. Stories complete and integrate independently + +--- + +## Notes + +- [P] tasks = different files, no dependencies +- [Story] label maps task to specific user story for traceability +- Each user story should be independently completable and testable +- Verify tests fail before implementing +- Commit after each task or logical group +- Stop at any checkpoint to validate story independently +- Avoid: vague tasks, same file conflicts, cross-story dependencies that break independence \ No newline at end of file