mirror of
https://github.com/sstent/FitTrack_GarminSync.git
synced 2026-03-14 17:05:31 +00:00
feat: Initial commit of FitTrack_GarminSync project
This commit is contained in:
1
examples/GarminSync/.aidigestignore
Normal file
1
examples/GarminSync/.aidigestignore
Normal file
@@ -0,0 +1 @@
|
||||
data/*
|
||||
48
examples/GarminSync/.dockerignore
Normal file
48
examples/GarminSync/.dockerignore
Normal file
@@ -0,0 +1,48 @@
|
||||
# Ignore version control and IDE files
|
||||
.git
|
||||
.gitignore
|
||||
.vscode
|
||||
|
||||
# Ignore local configuration files
|
||||
.env
|
||||
.env.*
|
||||
*.env
|
||||
|
||||
# Ignore build artifacts and cache
|
||||
__pycache__
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.pyd
|
||||
.pytest_cache
|
||||
.mypy_cache
|
||||
|
||||
# Ignore test files
|
||||
tests/
|
||||
tests_*.py
|
||||
|
||||
# Ignore documentation files
|
||||
docs/
|
||||
*.md
|
||||
*.rst
|
||||
|
||||
# Allow specific patch file we need for Docker
|
||||
!patches/garth_data_weight.py
|
||||
justfile
|
||||
requirements.txt # Replaced by pyproject.toml
|
||||
|
||||
# Ignore temporary files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
|
||||
# Ignore data directories
|
||||
data/*
|
||||
!data/README.md # Keep README if present
|
||||
|
||||
# Ignore migration files (handled separately)
|
||||
!migrations/alembic.ini
|
||||
!migrations/versions/*.py
|
||||
|
||||
# Ignore local development files
|
||||
docker-compose.yml
|
||||
docker-compose.*.yml
|
||||
2
examples/GarminSync/.gitignore
vendored
Normal file
2
examples/GarminSync/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
.env
|
||||
data/
|
||||
52
examples/GarminSync/DEVELOPMENT_WORKFLOW.md
Normal file
52
examples/GarminSync/DEVELOPMENT_WORKFLOW.md
Normal file
@@ -0,0 +1,52 @@
|
||||
# GarminSync Development Workflow
|
||||
|
||||
This document describes the new development workflow for GarminSync using UV and justfile.
|
||||
|
||||
## Dependency Management
|
||||
|
||||
We've switched from pip/requirements.txt to UV for faster dependency installation. The dependency specification is in `pyproject.toml`.
|
||||
|
||||
### Key Commands:
|
||||
|
||||
```bash
|
||||
# Install dependencies with UV
|
||||
just run_build
|
||||
|
||||
# Create and activate virtual environment
|
||||
uv venv .venv
|
||||
source .venv/bin/activate
|
||||
|
||||
# Update dependencies
|
||||
uv pip install -r pyproject.toml
|
||||
```
|
||||
|
||||
## Tooling Integration
|
||||
|
||||
### justfile Commands
|
||||
|
||||
Our workflow is managed through a justfile with these commands:
|
||||
|
||||
```bash
|
||||
just run_dev # Run server in development mode with live reload
|
||||
just run_test # Run validation tests
|
||||
just run_lint # Run linter (Ruff)
|
||||
just run_format # Run formatter (Black)
|
||||
just run_migrate # Run database migrations
|
||||
```
|
||||
|
||||
### Pre-commit Hooks
|
||||
|
||||
We've added pre-commit hooks for automatic formatting and linting:
|
||||
|
||||
```bash
|
||||
# Install pre-commit hooks
|
||||
pre-commit install
|
||||
|
||||
# Run pre-commit on all files
|
||||
pre-commit run --all-files
|
||||
```
|
||||
|
||||
The hooks enforce:
|
||||
- Code formatting with Black
|
||||
- Linting with Ruff
|
||||
- Type checking with mypy
|
||||
70
examples/GarminSync/Dockerfile
Normal file
70
examples/GarminSync/Dockerfile
Normal file
@@ -0,0 +1,70 @@
|
||||
# Use multi-stage build with UV package manager
|
||||
FROM python:3.12-slim AS builder
|
||||
|
||||
# Install minimal build dependencies and UV
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
RUN curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
|
||||
# Create virtual environment using the correct uv path
|
||||
RUN /root/.local/bin/uv venv /opt/venv
|
||||
ENV PATH="/opt/venv/bin:$PATH"
|
||||
|
||||
# Copy project definition
|
||||
COPY pyproject.toml .
|
||||
|
||||
# Set environment for optimized wheels
|
||||
ENV UV_EXTRA_INDEX_URL=https://pypi.org/simple
|
||||
ENV UV_FIND_LINKS=https://download.pytorch.org/whl/torch_stable.html
|
||||
|
||||
# Install dependencies with UV - use pre-compiled SciPy wheel with OpenBLAS optimization
|
||||
RUN /root/.local/bin/uv pip install \
|
||||
--only-binary=scipy \
|
||||
-r pyproject.toml
|
||||
|
||||
# Final runtime stage
|
||||
FROM python:3.12-slim
|
||||
|
||||
# Install only essential runtime libraries
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
libgomp1 \
|
||||
libgfortran5 \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& apt-get clean
|
||||
|
||||
# Copy virtual environment from builder
|
||||
COPY --from=builder /opt/venv /opt/venv
|
||||
ENV PATH="/opt/venv/bin:$PATH"
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Copy application files
|
||||
COPY garminsync/ ./garminsync/
|
||||
COPY migrations/ ./migrations/
|
||||
COPY migrations/alembic.ini ./alembic.ini
|
||||
COPY entrypoint.sh .
|
||||
COPY patches/garth_data_weight.py ./garth_data_weight.py
|
||||
|
||||
# Apply patches
|
||||
RUN cp garth_data_weight.py /opt/venv/lib/python3.12/site-packages/garth/data/weight.py
|
||||
|
||||
# Set permissions
|
||||
RUN chmod +x entrypoint.sh
|
||||
|
||||
# Create data directory
|
||||
RUN mkdir -p /app/data
|
||||
|
||||
# Create non-root user
|
||||
RUN groupadd -r appuser && useradd -r -g appuser appuser
|
||||
RUN chown -R appuser:appuser /app
|
||||
USER appuser
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:8888/health || exit 1
|
||||
|
||||
ENTRYPOINT ["./entrypoint.sh"]
|
||||
EXPOSE 8888
|
||||
32
examples/GarminSync/GPX_SUPPORT.md
Normal file
32
examples/GarminSync/GPX_SUPPORT.md
Normal file
@@ -0,0 +1,32 @@
|
||||
# GPX File Support in GarminSync
|
||||
|
||||
GarminSync now supports processing GPX files with accurate distance calculation using the Haversine formula.
|
||||
|
||||
## Features
|
||||
|
||||
- Parses GPX 1.1 files with extended Garmin TrackPoint extensions
|
||||
- Calculates total distance using Haversine formula
|
||||
- Extracts elevation data including gain/loss
|
||||
- Processes heart rate and cadence data
|
||||
- Calculates activity duration
|
||||
|
||||
## Supported Metrics
|
||||
|
||||
| Metric | Description | Data Source |
|
||||
|--------|-------------|-------------|
|
||||
| Distance | Total activity distance | Calculated from GPS coordinates |
|
||||
| Duration | Activity duration | Start/end timestamps |
|
||||
| Elevation | Min, max, gain, loss | ele tags in track points |
|
||||
| Heart Rate | Max and average | gpx:hr extension |
|
||||
| Cadence | Average cadence | gpx:cad extension |
|
||||
|
||||
## Implementation Details
|
||||
|
||||
The GPX parser:
|
||||
1. Uses XML parsing to extract track points
|
||||
2. Calculates distance between points using Haversine formula
|
||||
3. Processes elevation data to determine gain/loss
|
||||
4. Handles time zone conversions for timestamps
|
||||
5. Gracefully handles missing data points
|
||||
|
||||
For more details, see the [gpx_parser.py](garminsync/parsers/gpx_parser.py) file.
|
||||
246
examples/GarminSync/README.md
Normal file
246
examples/GarminSync/README.md
Normal file
@@ -0,0 +1,246 @@
|
||||
# GarminSync
|
||||
|
||||
GarminSync is a powerful Python application that automatically downloads `.fit` files for all your activities from Garmin Connect. It provides both a command-line interface for manual operations and a daemon mode for automatic background synchronization with a web-based dashboard for monitoring and configuration.
|
||||
|
||||
## Features
|
||||
|
||||
- **CLI Interface**: List and download activities with flexible filtering options
|
||||
- **Daemon Mode**: Automatic background synchronization with configurable schedules
|
||||
- **Web Dashboard**: Real-time monitoring and configuration through a web interface
|
||||
- **Offline Mode**: Work with cached data without internet connectivity
|
||||
- **Database Tracking**: SQLite database to track download status and file locations
|
||||
- **Rate Limiting**: Respects Garmin Connect's servers with built-in rate limiting
|
||||
- **GPX Support**: Parse and process GPX files for extended metrics
|
||||
- **Modern Development Workflow**: UV for dependency management and justfile for commands
|
||||
|
||||
## Technology Stack
|
||||
|
||||
- **Backend**: Python 3.10 with SQLAlchemy ORM
|
||||
- **CLI Framework**: Typer for command-line interface
|
||||
- **Web Framework**: FastAPI with Jinja2 templates
|
||||
- **Database**: SQLite for local data storage
|
||||
- **Scheduling**: APScheduler for daemon mode scheduling
|
||||
- **Containerization**: Docker support for easy deployment
|
||||
|
||||
## Installation
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Docker (recommended) OR Python 3.10+
|
||||
- Garmin Connect account credentials
|
||||
|
||||
### Using Docker (Recommended)
|
||||
|
||||
1. Clone the repository:
|
||||
```bash
|
||||
git clone https://github.com/sstent/GarminSync.git
|
||||
cd GarminSync
|
||||
```
|
||||
|
||||
2. Create a `.env` file with your Garmin credentials:
|
||||
```bash
|
||||
echo "GARMIN_EMAIL=your_email@example.com" > .env
|
||||
echo "GARMIN_PASSWORD=your_password" >> .env
|
||||
```
|
||||
|
||||
3. Build the Docker image:
|
||||
```bash
|
||||
docker build -t garminsync .
|
||||
```
|
||||
|
||||
## Development Workflow
|
||||
|
||||
We've implemented a modern development workflow using:
|
||||
- UV for fast dependency management
|
||||
- justfile commands for common tasks
|
||||
- Pre-commit hooks for automatic formatting and linting
|
||||
|
||||
See [DEVELOPMENT_WORKFLOW.md](DEVELOPMENT_WORKFLOW.md) for details.
|
||||
|
||||
## GPX File Support
|
||||
|
||||
GarminSync now supports processing GPX files with accurate metrics extraction including:
|
||||
|
||||
- Distance calculation using Haversine formula
|
||||
- Elevation gain/loss metrics
|
||||
- Heart rate and cadence data
|
||||
- Activity duration calculation
|
||||
|
||||
See [GPX_SUPPORT.md](GPX_SUPPORT.md) for implementation details.
|
||||
|
||||
### Using Python Directly
|
||||
|
||||
1. Clone the repository:
|
||||
```bash
|
||||
git clone https://github.com/sstent/GarminSync.git
|
||||
cd GarminSync
|
||||
```
|
||||
|
||||
2. Create a virtual environment and activate it:
|
||||
```bash
|
||||
python -m venv venv
|
||||
source venv/bin/activate # On Windows: venv\Scripts\activate
|
||||
```
|
||||
|
||||
3. Install dependencies:
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
4. Create a `.env` file with your Garmin credentials:
|
||||
```bash
|
||||
echo "GARMIN_EMAIL=your_email@example.com" > .env
|
||||
echo "GARMIN_PASSWORD=your_password" >> .env
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### CLI Commands
|
||||
|
||||
List all activities:
|
||||
```bash
|
||||
# Using Docker
|
||||
docker run -it --env-file .env -v $(pwd)/data:/app/data garminsync list --all
|
||||
|
||||
# Using Python directly
|
||||
python -m garminsync.cli list --all
|
||||
```
|
||||
|
||||
List missing activities:
|
||||
```bash
|
||||
# Using Docker
|
||||
docker run -it --env-file .env -v $(pwd)/data:/app/data garminsync list --missing
|
||||
|
||||
# Using Python directly
|
||||
python -m garminsync.cli list --missing
|
||||
```
|
||||
|
||||
List downloaded activities:
|
||||
```bash
|
||||
# Using Docker
|
||||
docker run -it --env-file .env -v $(pwd)/data:/app/data garminsync list --downloaded
|
||||
|
||||
# Using Python directly
|
||||
python -m garminsync.cli list --downloaded
|
||||
```
|
||||
|
||||
Download missing activities:
|
||||
```bash
|
||||
# Using Docker
|
||||
docker run -it --env-file .env -v $(pwd)/data:/app/data garminsync download --missing
|
||||
|
||||
# Using Python directly
|
||||
python -m garminsync.cli download --missing
|
||||
```
|
||||
|
||||
Work offline (without syncing with Garmin Connect):
|
||||
```bash
|
||||
# Using Docker
|
||||
docker run -it --env-file .env -v $(pwd)/data:/app/data garminsync list --missing --offline
|
||||
|
||||
# Using Python directly
|
||||
python -m garminsync.cli list --missing --offline
|
||||
```
|
||||
|
||||
### Daemon Mode
|
||||
|
||||
Start the daemon with web UI:
|
||||
```bash
|
||||
# Using Docker (expose port 8080 for web UI)
|
||||
docker run -it --env-file .env -v $(pwd)/data:/app/data -p 8080:8080 garminsync daemon --start
|
||||
|
||||
# Using Python directly
|
||||
python -m garminsync.cli daemon --start
|
||||
```
|
||||
|
||||
Access the web dashboard at `http://localhost:8080`
|
||||
|
||||
### Web Interface
|
||||
|
||||
The web interface provides real-time monitoring and configuration capabilities:
|
||||
|
||||
1. **Dashboard**: View activity statistics, daemon status, and recent logs
|
||||
2. **Activities**: Browse all activities with detailed information in a sortable table
|
||||
3. **Logs**: Filter and browse synchronization logs with pagination
|
||||
4. **Configuration**: Manage daemon settings and scheduling
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Create a `.env` file in the project root with your Garmin Connect credentials:
|
||||
|
||||
```env
|
||||
GARMIN_EMAIL=your_email@example.com
|
||||
GARMIN_PASSWORD=your_password
|
||||
```
|
||||
|
||||
### Daemon Scheduling
|
||||
|
||||
The daemon uses cron-style scheduling. Configure the schedule through the web UI or by modifying the database directly. Default schedule is every 6 hours (`0 */6 * * *`).
|
||||
|
||||
### Data Storage
|
||||
|
||||
Downloaded `.fit` files and the SQLite database are stored in the `data/` directory by default. When using Docker, this directory is mounted as a volume to persist data between container runs.
|
||||
|
||||
## Web API Endpoints
|
||||
|
||||
The web interface provides RESTful API endpoints for programmatic access:
|
||||
|
||||
- `GET /api/status` - Get daemon status and recent logs
|
||||
- `GET /api/activities/stats` - Get activity statistics
|
||||
- `GET /api/activities` - Get paginated activities with filtering
|
||||
- `GET /api/activities/{activity_id}` - Get detailed activity information
|
||||
- `GET /api/dashboard/stats` - Get comprehensive dashboard statistics
|
||||
- `GET /api/logs` - Get filtered and paginated logs
|
||||
- `POST /api/sync/trigger` - Manually trigger synchronization
|
||||
- `POST /api/schedule` - Update daemon schedule configuration
|
||||
- `POST /api/daemon/start` - Start the daemon
|
||||
- `POST /api/daemon/stop` - Stop the daemon
|
||||
- `DELETE /api/logs` - Clear all logs
|
||||
|
||||
## Development
|
||||
|
||||
### Project Structure
|
||||
|
||||
```
|
||||
garminsync/
|
||||
├── garminsync/ # Main application package
|
||||
│ ├── cli.py # Command-line interface
|
||||
│ ├── config.py # Configuration management
|
||||
│ ├── database.py # Database models and operations
|
||||
│ ├── garmin.py # Garmin Connect client wrapper
|
||||
│ ├── daemon.py # Daemon mode implementation
|
||||
│ └── web/ # Web interface components
|
||||
│ ├── app.py # FastAPI application setup
|
||||
│ ├── routes.py # API endpoints
|
||||
│ ├── static/ # CSS, JavaScript files
|
||||
│ └── templates/ # HTML templates
|
||||
├── data/ # Downloaded files and database
|
||||
├── .env # Environment variables (gitignored)
|
||||
├── Dockerfile # Docker configuration
|
||||
├── requirements.txt # Python dependencies
|
||||
└── README.md # This file
|
||||
```
|
||||
|
||||
### Running Tests
|
||||
|
||||
(Add test instructions when tests are implemented)
|
||||
|
||||
## Known Limitations
|
||||
|
||||
- No support for two-factor authentication (2FA)
|
||||
- Limited automatic retry logic for failed downloads
|
||||
- No support for selective activity date range downloads
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome! Please feel free to submit a Pull Request.
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the MIT License - see the LICENSE file for details.
|
||||
|
||||
## Support
|
||||
|
||||
For issues and feature requests, please use the GitHub issue tracker.
|
||||
29
examples/GarminSync/docker/docker-compose.test.yml
Normal file
29
examples/GarminSync/docker/docker-compose.test.yml
Normal file
@@ -0,0 +1,29 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
backend:
|
||||
extends:
|
||||
file: docker-compose.yml
|
||||
service: backend
|
||||
command: pytest -v tests/
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://garmin:sync@db_test/garminsync_test
|
||||
- TESTING=True
|
||||
|
||||
db_test:
|
||||
image: postgres:15-alpine
|
||||
volumes:
|
||||
- postgres_test_data:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_USER=garmin
|
||||
- POSTGRES_PASSWORD=sync
|
||||
- POSTGRES_DB=garminsync_test
|
||||
networks:
|
||||
- garmin-net
|
||||
|
||||
volumes:
|
||||
postgres_test_data:
|
||||
|
||||
networks:
|
||||
garmin-net:
|
||||
external: true
|
||||
12
examples/GarminSync/docker/docker-compose.yml
Normal file
12
examples/GarminSync/docker/docker-compose.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
services:
|
||||
garminsync:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: Dockerfile
|
||||
# Removed entrypoint to rely on Dockerfile configuration
|
||||
volumes:
|
||||
- ../data:/app/data # Persistent storage for SQLite database
|
||||
ports:
|
||||
- "8888:8888"
|
||||
env_file:
|
||||
- ../.env # Use the root .env file
|
||||
39
examples/GarminSync/entrypoint.sh
Normal file
39
examples/GarminSync/entrypoint.sh
Normal file
@@ -0,0 +1,39 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Always run database migrations with retries
|
||||
echo "$(date) - Starting database migrations..."
|
||||
echo "ALEMBIC_CONFIG: ${ALEMBIC_CONFIG:-/app/migrations/alembic.ini}"
|
||||
echo "ALEMBIC_SCRIPT_LOCATION: ${ALEMBIC_SCRIPT_LOCATION:-/app/migrations/versions}"
|
||||
|
||||
max_retries=5
|
||||
retry_count=0
|
||||
migration_status=1
|
||||
|
||||
export ALEMBIC_CONFIG=${ALEMBIC_CONFIG:-/app/migrations/alembic.ini}
|
||||
export ALEMBIC_SCRIPT_LOCATION=${ALEMBIC_SCRIPT_LOCATION:-/app/migrations/versions}
|
||||
|
||||
while [ $retry_count -lt $max_retries ] && [ $migration_status -ne 0 ]; do
|
||||
echo "Attempt $((retry_count+1))/$max_retries: Running migrations..."
|
||||
start_time=$(date +%s)
|
||||
alembic upgrade head
|
||||
migration_status=$?
|
||||
end_time=$(date +%s)
|
||||
duration=$((end_time - start_time))
|
||||
|
||||
if [ $migration_status -ne 0 ]; then
|
||||
echo "$(date) - Migration attempt failed after ${duration} seconds! Retrying..."
|
||||
retry_count=$((retry_count+1))
|
||||
sleep 2
|
||||
else
|
||||
echo "$(date) - Migrations completed successfully in ${duration} seconds"
|
||||
fi
|
||||
done
|
||||
|
||||
if [ $migration_status -ne 0 ]; then
|
||||
echo "$(date) - Migration failed after $max_retries attempts!" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Start the application
|
||||
echo "$(date) - Starting application..."
|
||||
exec python -m garminsync.cli daemon --start --port 8888
|
||||
0
examples/GarminSync/garminsync/__init__.py
Normal file
0
examples/GarminSync/garminsync/__init__.py
Normal file
251
examples/GarminSync/garminsync/activity_parser.py
Normal file
251
examples/GarminSync/garminsync/activity_parser.py
Normal file
@@ -0,0 +1,251 @@
|
||||
import os
|
||||
import gzip
|
||||
import fitdecode
|
||||
import xml.etree.ElementTree as ET
|
||||
import numpy as np
|
||||
from .fit_processor.power_estimator import PowerEstimator
|
||||
from .fit_processor.gear_analyzer import SinglespeedAnalyzer
|
||||
from math import radians, sin, cos, sqrt, atan2
|
||||
|
||||
def detect_file_type(file_path):
|
||||
"""Detect file format (FIT, XML, or unknown)"""
|
||||
try:
|
||||
with open(file_path, 'rb') as f:
|
||||
header = f.read(128)
|
||||
if b'<?xml' in header[:20]:
|
||||
return 'xml'
|
||||
if len(header) >= 8 and header[4:8] == b'.FIT':
|
||||
return 'fit'
|
||||
if (len(header) >= 8 and
|
||||
(header[0:4] == b'.FIT' or
|
||||
header[4:8] == b'FIT.' or
|
||||
header[8:12] == b'.FIT')):
|
||||
return 'fit'
|
||||
return 'unknown'
|
||||
except Exception as e:
|
||||
return 'error'
|
||||
|
||||
def parse_xml_file(file_path):
|
||||
"""Parse XML (TCX) file to extract activity metrics"""
|
||||
try:
|
||||
tree = ET.parse(file_path)
|
||||
root = tree.getroot()
|
||||
namespaces = {'ns': 'http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2'}
|
||||
|
||||
sport = root.find('.//ns:Activity', namespaces).get('Sport', 'other')
|
||||
distance = root.find('.//ns:DistanceMeters', namespaces)
|
||||
distance = float(distance.text) if distance is not None else None
|
||||
duration = root.find('.//ns:TotalTimeSeconds', namespaces)
|
||||
duration = float(duration.text) if duration is not None else None
|
||||
calories = root.find('.//ns:Calories', namespaces)
|
||||
calories = int(calories.text) if calories is not None else None
|
||||
|
||||
hr_values = []
|
||||
for hr in root.findall('.//ns:HeartRateBpm/ns:Value', namespaces):
|
||||
try:
|
||||
hr_values.append(int(hr.text))
|
||||
except:
|
||||
continue
|
||||
max_hr = max(hr_values) if hr_values else None
|
||||
|
||||
return {
|
||||
"activityType": {"typeKey": sport},
|
||||
"summaryDTO": {
|
||||
"duration": duration,
|
||||
"distance": distance,
|
||||
"maxHR": max_hr,
|
||||
"avgPower": None,
|
||||
"calories": calories
|
||||
}
|
||||
}
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def compute_gradient(altitudes, positions, distance_m=10):
|
||||
"""Compute gradient percentage for each point using elevation changes"""
|
||||
if len(altitudes) < 2:
|
||||
return [0] * len(altitudes)
|
||||
|
||||
gradients = []
|
||||
for i in range(1, len(altitudes)):
|
||||
elev_change = altitudes[i] - altitudes[i-1]
|
||||
if positions and i < len(positions):
|
||||
distance = distance_between_points(positions[i-1], positions[i])
|
||||
else:
|
||||
distance = distance_m
|
||||
gradients.append((elev_change / distance) * 100)
|
||||
|
||||
return [gradients[0]] + gradients
|
||||
|
||||
def distance_between_points(point1, point2):
|
||||
"""Calculate distance between two (lat, lon) points in meters using Haversine"""
|
||||
R = 6371000 # Earth radius in meters
|
||||
|
||||
lat1, lon1 = radians(point1[0]), radians(point1[1])
|
||||
lat2, lon2 = radians(point2[0]), radians(point2[1])
|
||||
|
||||
dlat = lat2 - lat1
|
||||
dlon = lon2 - lon1
|
||||
|
||||
a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
|
||||
c = 2 * atan2(sqrt(a), sqrt(1-a))
|
||||
|
||||
return R * c
|
||||
|
||||
def parse_fit_file(file_path):
|
||||
"""Parse FIT file to extract activity metrics and detailed cycling data"""
|
||||
metrics = {}
|
||||
detailed_metrics = {
|
||||
'speeds': [], 'cadences': [], 'altitudes': [],
|
||||
'positions': [], 'gradients': [], 'powers': [], 'timestamps': []
|
||||
}
|
||||
|
||||
power_estimator = PowerEstimator()
|
||||
gear_analyzer = SinglespeedAnalyzer()
|
||||
|
||||
try:
|
||||
with open(file_path, 'rb') as f:
|
||||
magic = f.read(2)
|
||||
f.seek(0)
|
||||
is_gzipped = magic == b'\x1f\x8b'
|
||||
|
||||
if is_gzipped:
|
||||
with gzip.open(file_path, 'rb') as gz_file:
|
||||
from io import BytesIO
|
||||
with BytesIO(gz_file.read()) as fit_data:
|
||||
fit = fitdecode.FitReader(fit_data)
|
||||
for frame in fit:
|
||||
if frame.frame_type == fitdecode.FrameType.DATA:
|
||||
if frame.name == 'record':
|
||||
if timestamp := frame.get_value('timestamp'):
|
||||
detailed_metrics['timestamps'].append(timestamp)
|
||||
if (lat := frame.get_value('position_lat')) and (lon := frame.get_value('position_long')):
|
||||
detailed_metrics['positions'].append((lat, lon))
|
||||
if altitude := frame.get_value('altitude'):
|
||||
detailed_metrics['altitudes'].append(altitude)
|
||||
if speed := frame.get_value('speed'):
|
||||
detailed_metrics['speeds'].append(speed)
|
||||
if cadence := frame.get_value('cadence'):
|
||||
detailed_metrics['cadences'].append(cadence)
|
||||
if power := frame.get_value('power'):
|
||||
detailed_metrics['powers'].append(power)
|
||||
|
||||
elif frame.name == 'session':
|
||||
metrics = {
|
||||
"sport": frame.get_value("sport"),
|
||||
"total_timer_time": frame.get_value("total_timer_time"),
|
||||
"total_distance": frame.get_value("total_distance"),
|
||||
"max_heart_rate": frame.get_value("max_heart_rate"),
|
||||
"avg_power": frame.get_value("avg_power"),
|
||||
"total_calories": frame.get_value("total_calories")
|
||||
}
|
||||
else:
|
||||
with fitdecode.FitReader(file_path) as fit:
|
||||
for frame in fit:
|
||||
if frame.frame_type == fitdecode.FrameType.DATA:
|
||||
if frame.name == 'record':
|
||||
if timestamp := frame.get_value('timestamp'):
|
||||
detailed_metrics['timestamps'].append(timestamp)
|
||||
if (lat := frame.get_value('position_lat')) and (lon := frame.get_value('position_long')):
|
||||
detailed_metrics['positions'].append((lat, lon))
|
||||
if altitude := frame.get_value('altitude'):
|
||||
detailed_metrics['altitudes'].append(altitude)
|
||||
if speed := frame.get_value('speed'):
|
||||
detailed_metrics['speeds'].append(speed)
|
||||
if cadence := frame.get_value('cadence'):
|
||||
detailed_metrics['cadences'].append(cadence)
|
||||
if power := frame.get_value('power'):
|
||||
detailed_metrics['powers'].append(power)
|
||||
|
||||
elif frame.name == 'session':
|
||||
metrics = {
|
||||
"sport": frame.get_value("sport"),
|
||||
"total_timer_time": frame.get_value("total_timer_time"),
|
||||
"total_distance": frame.get_value("total_distance"),
|
||||
"max_heart_rate": frame.get_value("max_heart_rate"),
|
||||
"avg_power": frame.get_value("avg_power"),
|
||||
"total_calories": frame.get_value("total_calories")
|
||||
}
|
||||
|
||||
# Compute gradients if data available
|
||||
if detailed_metrics['altitudes']:
|
||||
detailed_metrics['gradients'] = compute_gradient(
|
||||
detailed_metrics['altitudes'],
|
||||
detailed_metrics['positions']
|
||||
)
|
||||
|
||||
# Process cycling-specific metrics
|
||||
if metrics.get('sport') in ['cycling', 'road_biking', 'mountain_biking']:
|
||||
# Estimate power if not present
|
||||
if not detailed_metrics['powers']:
|
||||
for speed, gradient in zip(detailed_metrics['speeds'], detailed_metrics['gradients']):
|
||||
estimated_power = power_estimator.calculate_power(speed, gradient)
|
||||
detailed_metrics['powers'].append(estimated_power)
|
||||
metrics['avg_power'] = np.mean(detailed_metrics['powers']) if detailed_metrics['powers'] else None
|
||||
|
||||
# Run gear analysis
|
||||
if detailed_metrics['speeds'] and detailed_metrics['cadences']:
|
||||
gear_analysis = gear_analyzer.analyze_gear_ratio(
|
||||
detailed_metrics['speeds'],
|
||||
detailed_metrics['cadences'],
|
||||
detailed_metrics['gradients']
|
||||
)
|
||||
metrics['gear_analysis'] = gear_analysis or {}
|
||||
|
||||
return {
|
||||
"activityType": {"typeKey": metrics.get("sport", "other")},
|
||||
"summaryDTO": {
|
||||
"duration": metrics.get("total_timer_time"),
|
||||
"distance": metrics.get("total_distance"),
|
||||
"maxHR": metrics.get("max_heart_rate"),
|
||||
"avgPower": metrics.get("avg_power"),
|
||||
"calories": metrics.get("total_calories"),
|
||||
"gearAnalysis": metrics.get("gear_analysis", {})
|
||||
},
|
||||
"detailedMetrics": detailed_metrics
|
||||
}
|
||||
except Exception as e:
|
||||
print(f"Error parsing FIT file: {str(e)}")
|
||||
return None
|
||||
|
||||
def get_activity_metrics(activity, client=None, force_reprocess=False):
|
||||
"""
|
||||
Get activity metrics from local file or Garmin API
|
||||
|
||||
:param activity: Activity object
|
||||
:param client: Optional GarminClient instance
|
||||
:param force_reprocess: If True, re-process file even if already parsed
|
||||
:return: Activity metrics dictionary
|
||||
"""
|
||||
metrics = None
|
||||
# Always re-process if force_reprocess is True
|
||||
if force_reprocess and activity.filename and os.path.exists(activity.filename):
|
||||
file_type = detect_file_type(activity.filename)
|
||||
try:
|
||||
if file_type == 'fit':
|
||||
metrics = parse_fit_file(activity.filename)
|
||||
elif file_type == 'xml':
|
||||
metrics = parse_xml_file(activity.filename)
|
||||
except Exception as e:
|
||||
print(f"Error parsing activity file: {str(e)}")
|
||||
|
||||
# Only parse if metrics not already obtained through force_reprocess
|
||||
if not metrics:
|
||||
if activity.filename and os.path.exists(activity.filename):
|
||||
file_type = detect_file_type(activity.filename)
|
||||
try:
|
||||
if file_type == 'fit':
|
||||
metrics = parse_fit_file(activity.filename)
|
||||
elif file_type == 'xml':
|
||||
metrics = parse_xml_file(activity.filename)
|
||||
except Exception as e:
|
||||
print(f"Error parsing activity file: {str(e)}")
|
||||
|
||||
if not metrics and client:
|
||||
try:
|
||||
metrics = client.get_activity_details(activity.activity_id)
|
||||
except Exception as e:
|
||||
print(f"Error fetching activity from API: {str(e)}")
|
||||
|
||||
# Return summary DTO for compatibility
|
||||
return metrics.get("summaryDTO") if metrics and "summaryDTO" in metrics else metrics
|
||||
369
examples/GarminSync/garminsync/cli.py
Normal file
369
examples/GarminSync/garminsync/cli.py
Normal file
@@ -0,0 +1,369 @@
|
||||
import os
|
||||
|
||||
import typer
|
||||
from typing_extensions import Annotated
|
||||
|
||||
from .config import load_config
|
||||
|
||||
# Initialize environment variables
|
||||
load_config()
|
||||
|
||||
app = typer.Typer(
|
||||
help="GarminSync - Download Garmin Connect activities", rich_markup_mode=None
|
||||
)
|
||||
|
||||
|
||||
@app.command("list")
|
||||
def list_activities(
|
||||
all_activities: Annotated[
|
||||
bool, typer.Option("--all", help="List all activities")
|
||||
] = False,
|
||||
missing: Annotated[
|
||||
bool, typer.Option("--missing", help="List missing activities")
|
||||
] = False,
|
||||
downloaded: Annotated[
|
||||
bool, typer.Option("--downloaded", help="List downloaded activities")
|
||||
] = False,
|
||||
offline: Annotated[
|
||||
bool, typer.Option("--offline", help="Work offline without syncing")
|
||||
] = False,
|
||||
):
|
||||
"""List activities based on specified filters"""
|
||||
from tqdm import tqdm
|
||||
|
||||
from .database import (Activity, get_offline_stats, get_session,
|
||||
sync_database)
|
||||
from .garmin import GarminClient
|
||||
|
||||
# Validate input
|
||||
if not any([all_activities, missing, downloaded]):
|
||||
typer.echo(
|
||||
"Error: Please specify at least one filter option (--all, --missing, --downloaded)"
|
||||
)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
try:
|
||||
client = GarminClient()
|
||||
session = get_session()
|
||||
|
||||
if not offline:
|
||||
# Sync database with latest activities
|
||||
typer.echo("Syncing activities from Garmin Connect...")
|
||||
sync_database(client)
|
||||
else:
|
||||
# Show offline status with last sync info
|
||||
stats = get_offline_stats()
|
||||
typer.echo(
|
||||
f"Working in offline mode - using cached data (last sync: {stats['last_sync']})"
|
||||
)
|
||||
|
||||
# Build query based on filters
|
||||
query = session.query(Activity)
|
||||
|
||||
if all_activities:
|
||||
pass # Return all activities
|
||||
elif missing:
|
||||
query = query.filter_by(downloaded=False)
|
||||
elif downloaded:
|
||||
query = query.filter_by(downloaded=True)
|
||||
|
||||
# Execute query and display results
|
||||
activities = query.all()
|
||||
if not activities:
|
||||
typer.echo("No activities found matching your criteria")
|
||||
return
|
||||
|
||||
# Display results with progress bar
|
||||
typer.echo(f"Found {len(activities)} activities:")
|
||||
for activity in tqdm(activities, desc="Listing activities"):
|
||||
status = "Downloaded" if activity.downloaded else "Missing"
|
||||
typer.echo(
|
||||
f"- ID: {activity.activity_id}, Start: {activity.start_time}, Status: {status}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
typer.echo(f"Error: {str(e)}")
|
||||
raise typer.Exit(code=1)
|
||||
finally:
|
||||
if "session" in locals():
|
||||
session.close()
|
||||
|
||||
|
||||
@app.command("download")
|
||||
def download(
|
||||
missing: Annotated[
|
||||
bool, typer.Option("--missing", help="Download missing activities")
|
||||
] = False,
|
||||
):
|
||||
"""Download activities based on specified filters"""
|
||||
from pathlib import Path
|
||||
|
||||
from tqdm import tqdm
|
||||
|
||||
from .database import Activity, get_session
|
||||
from .garmin import GarminClient
|
||||
|
||||
# Validate input
|
||||
if not missing:
|
||||
typer.echo("Error: Currently only --missing downloads are supported")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
try:
|
||||
client = GarminClient()
|
||||
session = get_session()
|
||||
|
||||
# Sync database with latest activities
|
||||
typer.echo("Syncing activities from Garmin Connect...")
|
||||
from .database import sync_database
|
||||
|
||||
sync_database(client)
|
||||
|
||||
# Get missing activities
|
||||
activities = session.query(Activity).filter_by(downloaded=False).all()
|
||||
if not activities:
|
||||
typer.echo("No missing activities found")
|
||||
return
|
||||
|
||||
# Create data directory if it doesn't exist
|
||||
data_dir = Path(os.getenv("DATA_DIR", "data"))
|
||||
data_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Download activities with progress bar
|
||||
typer.echo(f"Downloading {len(activities)} missing activities...")
|
||||
for activity in tqdm(activities, desc="Downloading"):
|
||||
try:
|
||||
# Download FIT data
|
||||
fit_data = client.download_activity_fit(activity.activity_id)
|
||||
|
||||
# Create filename-safe timestamp
|
||||
timestamp = activity.start_time.replace(":", "-").replace(" ", "_")
|
||||
filename = f"activity_{activity.activity_id}_{timestamp}.fit"
|
||||
filepath = data_dir / filename
|
||||
|
||||
# Save file
|
||||
with open(filepath, "wb") as f:
|
||||
f.write(fit_data)
|
||||
|
||||
# Update database
|
||||
activity.filename = str(filepath)
|
||||
activity.downloaded = True
|
||||
session.commit()
|
||||
|
||||
except Exception as e:
|
||||
typer.echo(
|
||||
f"Error downloading activity {activity.activity_id}: {str(e)}"
|
||||
)
|
||||
session.rollback()
|
||||
|
||||
typer.echo("Download completed successfully")
|
||||
|
||||
except Exception as e:
|
||||
typer.echo(f"Error: {str(e)}")
|
||||
raise typer.Exit(code=1)
|
||||
finally:
|
||||
if "session" in locals():
|
||||
session.close()
|
||||
|
||||
|
||||
@app.command("daemon")
|
||||
def daemon_mode(
|
||||
start: Annotated[bool, typer.Option("--start", help="Start daemon")] = False,
|
||||
stop: Annotated[bool, typer.Option("--stop", help="Stop daemon")] = False,
|
||||
status: Annotated[
|
||||
bool, typer.Option("--status", help="Show daemon status")
|
||||
] = False,
|
||||
port: Annotated[int, typer.Option("--port", help="Web UI port")] = 8080,
|
||||
run_migrations: Annotated[
|
||||
bool,
|
||||
typer.Option(
|
||||
"--run-migrations/--skip-migrations",
|
||||
help="Run database migrations on startup (default: run)"
|
||||
)
|
||||
] = True,
|
||||
):
|
||||
"""Daemon mode operations"""
|
||||
from .daemon import GarminSyncDaemon
|
||||
|
||||
if start:
|
||||
daemon = GarminSyncDaemon()
|
||||
daemon.start(web_port=port, run_migrations=run_migrations)
|
||||
elif stop:
|
||||
# Implementation for stopping daemon (PID file or signal)
|
||||
typer.echo("Stopping daemon...")
|
||||
# TODO: Implement stop (we can use a PID file to stop the daemon)
|
||||
typer.echo("Daemon stop not implemented yet")
|
||||
elif status:
|
||||
# Show current daemon status
|
||||
typer.echo("Daemon status not implemented yet")
|
||||
else:
|
||||
typer.echo("Please specify one of: --start, --stop, --status")
|
||||
|
||||
|
||||
@app.command("migrate")
|
||||
def migrate_activities():
|
||||
"""Migrate database to add new activity fields"""
|
||||
from .migrate_activities import migrate_activities as run_migration
|
||||
|
||||
typer.echo("Starting database migration...")
|
||||
success = run_migration()
|
||||
if success:
|
||||
typer.echo("Database migration completed successfully!")
|
||||
else:
|
||||
typer.echo("Database migration failed!")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
@app.command("analyze")
|
||||
def analyze_activities(
|
||||
activity_id: Annotated[int, typer.Option("--activity-id", help="Activity ID to analyze")] = None,
|
||||
missing: Annotated[bool, typer.Option("--missing", help="Analyze all cycling activities missing analysis")] = False,
|
||||
cycling: Annotated[bool, typer.Option("--cycling", help="Run cycling-specific analysis")] = False,
|
||||
):
|
||||
"""Analyze activity data for cycling metrics"""
|
||||
from tqdm import tqdm
|
||||
from .database import Activity, get_session
|
||||
from .activity_parser import get_activity_metrics
|
||||
|
||||
if not cycling:
|
||||
typer.echo("Error: Currently only cycling analysis is supported")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
session = get_session()
|
||||
activities = []
|
||||
|
||||
if activity_id:
|
||||
activity = session.query(Activity).get(activity_id)
|
||||
if not activity:
|
||||
typer.echo(f"Error: Activity with ID {activity_id} not found")
|
||||
raise typer.Exit(code=1)
|
||||
activities = [activity]
|
||||
elif missing:
|
||||
activities = session.query(Activity).filter(
|
||||
Activity.activity_type == 'cycling',
|
||||
Activity.analyzed == False # Only unanalyzed activities
|
||||
).all()
|
||||
if not activities:
|
||||
typer.echo("No unanalyzed cycling activities found")
|
||||
return
|
||||
else:
|
||||
typer.echo("Error: Please specify --activity-id or --missing")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
typer.echo(f"Analyzing {len(activities)} cycling activities...")
|
||||
for activity in tqdm(activities, desc="Processing"):
|
||||
metrics = get_activity_metrics(activity)
|
||||
if metrics and "gearAnalysis" in metrics:
|
||||
# Update activity with analysis results
|
||||
activity.analyzed = True
|
||||
activity.gear_ratio = metrics["gearAnalysis"].get("gear_ratio")
|
||||
activity.gear_inches = metrics["gearAnalysis"].get("gear_inches")
|
||||
# Add other metrics as needed
|
||||
session.commit()
|
||||
|
||||
typer.echo("Analysis completed successfully")
|
||||
|
||||
@app.command("reprocess")
|
||||
def reprocess_activities(
|
||||
all: Annotated[bool, typer.Option("--all", help="Reprocess all activities")] = False,
|
||||
missing: Annotated[bool, typer.Option("--missing", help="Reprocess activities missing metrics")] = False,
|
||||
activity_id: Annotated[int, typer.Option("--activity-id", help="Reprocess specific activity by ID")] = None,
|
||||
):
|
||||
"""Reprocess activities to calculate missing metrics"""
|
||||
from tqdm import tqdm
|
||||
from .database import Activity, get_session
|
||||
from .activity_parser import get_activity_metrics
|
||||
|
||||
session = get_session()
|
||||
activities = []
|
||||
|
||||
if activity_id:
|
||||
activity = session.query(Activity).get(activity_id)
|
||||
if not activity:
|
||||
typer.echo(f"Error: Activity with ID {activity_id} not found")
|
||||
raise typer.Exit(code=1)
|
||||
activities = [activity]
|
||||
elif missing:
|
||||
activities = session.query(Activity).filter(
|
||||
Activity.reprocessed == False
|
||||
).all()
|
||||
if not activities:
|
||||
typer.echo("No activities to reprocess")
|
||||
return
|
||||
elif all:
|
||||
activities = session.query(Activity).filter(
|
||||
Activity.downloaded == True
|
||||
).all()
|
||||
if not activities:
|
||||
typer.echo("No downloaded activities found")
|
||||
return
|
||||
else:
|
||||
typer.echo("Error: Please specify one of: --all, --missing, --activity-id")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
typer.echo(f"Reprocessing {len(activities)} activities...")
|
||||
for activity in tqdm(activities, desc="Reprocessing"):
|
||||
# Use force_reprocess=True to ensure we parse the file again
|
||||
metrics = get_activity_metrics(activity, force_reprocess=True)
|
||||
|
||||
# Update activity metrics
|
||||
if metrics:
|
||||
activity.activity_type = metrics.get("activityType", {}).get("typeKey")
|
||||
activity.duration = int(float(metrics.get("duration", 0))) if metrics.get("duration") else activity.duration
|
||||
activity.distance = float(metrics.get("distance", 0)) if metrics.get("distance") else activity.distance
|
||||
activity.max_heart_rate = int(float(metrics.get("maxHR", 0))) if metrics.get("maxHR") else activity.max_heart_rate
|
||||
activity.avg_heart_rate = int(float(metrics.get("avgHR", 0))) if metrics.get("avgHR") else activity.avg_heart_rate
|
||||
activity.avg_power = float(metrics.get("avgPower", 0)) if metrics.get("avgPower") else activity.avg_power
|
||||
activity.calories = int(float(metrics.get("calories", 0))) if metrics.get("calories") else activity.calories
|
||||
|
||||
# Mark as reprocessed
|
||||
activity.reprocessed = True
|
||||
session.commit()
|
||||
|
||||
typer.echo("Reprocessing completed")
|
||||
|
||||
@app.command("report")
|
||||
def generate_report(
|
||||
power_analysis: Annotated[bool, typer.Option("--power-analysis", help="Generate power metrics report")] = False,
|
||||
gear_analysis: Annotated[bool, typer.Option("--gear-analysis", help="Generate gear analysis report")] = False,
|
||||
):
|
||||
"""Generate performance reports for cycling activities"""
|
||||
from .database import Activity, get_session
|
||||
from .web import app as web_app
|
||||
|
||||
if not any([power_analysis, gear_analysis]):
|
||||
typer.echo("Error: Please specify at least one report type")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
session = get_session()
|
||||
activities = session.query(Activity).filter(
|
||||
Activity.activity_type == 'cycling',
|
||||
Activity.analyzed == True
|
||||
).all()
|
||||
|
||||
if not activities:
|
||||
typer.echo("No analyzed cycling activities found")
|
||||
return
|
||||
|
||||
# Simple CLI report - real implementation would use web UI
|
||||
typer.echo("Cycling Analysis Report")
|
||||
typer.echo("=======================")
|
||||
|
||||
for activity in activities:
|
||||
typer.echo(f"\nActivity ID: {activity.activity_id}")
|
||||
typer.echo(f"Date: {activity.start_time}")
|
||||
|
||||
if power_analysis:
|
||||
typer.echo(f"- Average Power: {activity.avg_power}W")
|
||||
# Add other power metrics as needed
|
||||
|
||||
if gear_analysis:
|
||||
typer.echo(f"- Gear Ratio: {activity.gear_ratio}")
|
||||
typer.echo(f"- Gear Inches: {activity.gear_inches}")
|
||||
|
||||
typer.echo("\nFull reports available in the web UI at http://localhost:8080")
|
||||
|
||||
def main():
|
||||
app()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
18
examples/GarminSync/garminsync/config.py
Normal file
18
examples/GarminSync/garminsync/config.py
Normal file
@@ -0,0 +1,18 @@
|
||||
import os
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
|
||||
def load_config():
|
||||
"""Load environment variables from .env file"""
|
||||
load_dotenv()
|
||||
|
||||
|
||||
class Config:
|
||||
GARMIN_EMAIL = os.getenv("GARMIN_EMAIL")
|
||||
GARMIN_PASSWORD = os.getenv("GARMIN_PASSWORD")
|
||||
|
||||
@classmethod
|
||||
def validate(cls):
|
||||
if not cls.GARMIN_EMAIL or not cls.GARMIN_PASSWORD:
|
||||
raise ValueError("Missing GARMIN_EMAIL or GARMIN_PASSWORD in environment")
|
||||
450
examples/GarminSync/garminsync/daemon.py
Normal file
450
examples/GarminSync/garminsync/daemon.py
Normal file
@@ -0,0 +1,450 @@
|
||||
import os
|
||||
import signal
|
||||
import asyncio
|
||||
import concurrent.futures
|
||||
import time
|
||||
from datetime import datetime
|
||||
from queue import PriorityQueue
|
||||
import threading
|
||||
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from .database import Activity, DaemonConfig, SyncLog, get_legacy_session, init_db, get_offline_stats
|
||||
from .garmin import GarminClient
|
||||
from .utils import logger
|
||||
from .activity_parser import get_activity_metrics
|
||||
|
||||
# Priority levels: 1=High (API requests), 2=Medium (Sync jobs), 3=Low (Reprocessing)
|
||||
PRIORITY_HIGH = 1
|
||||
PRIORITY_MEDIUM = 2
|
||||
PRIORITY_LOW = 3
|
||||
|
||||
class GarminSyncDaemon:
|
||||
def __init__(self):
|
||||
self.scheduler = BackgroundScheduler()
|
||||
self.running = False
|
||||
self.web_server = None
|
||||
# Process pool for CPU-bound tasks
|
||||
self.executor = concurrent.futures.ProcessPoolExecutor(
|
||||
max_workers=os.cpu_count() - 1 or 1
|
||||
)
|
||||
# Priority queue for task scheduling
|
||||
self.task_queue = PriorityQueue()
|
||||
# Worker thread for processing tasks
|
||||
self.worker_thread = threading.Thread(target=self._process_tasks, daemon=True)
|
||||
# Lock for database access during migration
|
||||
self.db_lock = threading.Lock()
|
||||
# Thread lock to prevent concurrent sync operations
|
||||
self.sync_lock = threading.Lock()
|
||||
self.sync_in_progress = False
|
||||
|
||||
def start(self, web_port=8888, run_migrations=True):
|
||||
"""Start daemon with scheduler and web UI"""
|
||||
try:
|
||||
# Initialize database (synchronous)
|
||||
with self.db_lock:
|
||||
init_db()
|
||||
|
||||
# Set migration flag for entrypoint
|
||||
if run_migrations:
|
||||
os.environ['RUN_MIGRATIONS'] = "1"
|
||||
else:
|
||||
os.environ['RUN_MIGRATIONS'] = "0"
|
||||
|
||||
# Start task processing worker
|
||||
self.worker_thread.start()
|
||||
|
||||
# Load configuration from database
|
||||
config_data = self.load_config()
|
||||
|
||||
# Setup scheduled jobs
|
||||
if config_data["enabled"]:
|
||||
# Sync job
|
||||
cron_str = config_data["schedule_cron"]
|
||||
try:
|
||||
# Validate cron string
|
||||
if not cron_str or len(cron_str.strip().split()) != 5:
|
||||
logger.error(
|
||||
f"Invalid cron schedule: '{cron_str}'. Using default '0 */6 * * *'"
|
||||
)
|
||||
cron_str = "0 */6 * * *"
|
||||
|
||||
self.scheduler.add_job(
|
||||
func=self._enqueue_sync,
|
||||
trigger=CronTrigger.from_crontab(cron_str),
|
||||
id="sync_job",
|
||||
replace_existing=True,
|
||||
)
|
||||
logger.info(f"Sync job scheduled with cron: '{cron_str}'")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create sync job: {str(e)}")
|
||||
# Fallback to default schedule
|
||||
self.scheduler.add_job(
|
||||
func=self._enqueue_sync,
|
||||
trigger=CronTrigger.from_crontab("0 */6 * * *"),
|
||||
id="sync_job",
|
||||
replace_existing=True,
|
||||
)
|
||||
logger.info("Using default schedule for sync job: '0 */6 * * *'")
|
||||
|
||||
# Reprocess job - run daily at 2 AM
|
||||
reprocess_cron = "0 2 * * *"
|
||||
try:
|
||||
self.scheduler.add_job(
|
||||
func=self._enqueue_reprocess,
|
||||
trigger=CronTrigger.from_crontab(reprocess_cron),
|
||||
id="reprocess_job",
|
||||
replace_existing=True,
|
||||
)
|
||||
logger.info(f"Reprocess job scheduled with cron: '{reprocess_cron}'")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create reprocess job: {str(e)}")
|
||||
|
||||
# Start scheduler
|
||||
self.scheduler.start()
|
||||
self.running = True
|
||||
|
||||
# Update daemon status to running
|
||||
self.update_daemon_status("running")
|
||||
|
||||
# Start web UI in separate thread
|
||||
self.start_web_ui(web_port)
|
||||
|
||||
# Setup signal handlers for graceful shutdown
|
||||
signal.signal(signal.SIGINT, self.signal_handler)
|
||||
signal.signal(signal.SIGTERM, self.signal_handler)
|
||||
|
||||
logger.info(
|
||||
f"Daemon started. Web UI available at http://localhost:{web_port}"
|
||||
)
|
||||
|
||||
# Keep daemon running
|
||||
while self.running:
|
||||
time.sleep(1)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start daemon: {str(e)}")
|
||||
self.update_daemon_status("error")
|
||||
self.stop()
|
||||
|
||||
def _enqueue_sync(self):
|
||||
"""Enqueue sync job with medium priority"""
|
||||
self.task_queue.put((PRIORITY_MEDIUM, ("sync", None)))
|
||||
logger.debug("Enqueued sync job")
|
||||
|
||||
def _enqueue_reprocess(self):
|
||||
"""Enqueue reprocess job with low priority"""
|
||||
self.task_queue.put((PRIORITY_LOW, ("reprocess", None)))
|
||||
logger.debug("Enqueued reprocess job")
|
||||
|
||||
def _process_tasks(self):
|
||||
"""Worker thread to process tasks from the priority queue"""
|
||||
logger.info("Task worker started")
|
||||
while self.running:
|
||||
try:
|
||||
priority, (task_type, data) = self.task_queue.get(timeout=1)
|
||||
logger.info(f"Processing {task_type} task (priority {priority})")
|
||||
|
||||
if task_type == "sync":
|
||||
self._execute_in_process_pool(self.sync_and_download)
|
||||
elif task_type == "reprocess":
|
||||
self._execute_in_process_pool(self.reprocess_activities)
|
||||
elif task_type == "api":
|
||||
# Placeholder for high-priority API tasks
|
||||
logger.debug(f"Processing API task: {data}")
|
||||
|
||||
self.task_queue.task_done()
|
||||
except Exception as e:
|
||||
logger.error(f"Task processing error: {str(e)}")
|
||||
except asyncio.TimeoutError:
|
||||
# Timeout is normal when queue is empty
|
||||
pass
|
||||
logger.info("Task worker stopped")
|
||||
|
||||
def _execute_in_process_pool(self, func):
|
||||
"""Execute function in process pool and handle results"""
|
||||
try:
|
||||
future = self.executor.submit(func)
|
||||
# Block until done to maintain task order but won't block main thread
|
||||
result = future.result()
|
||||
logger.debug(f"Process pool task completed: {result}")
|
||||
except Exception as e:
|
||||
logger.error(f"Process pool task failed: {str(e)}")
|
||||
|
||||
def sync_and_download(self):
|
||||
"""Scheduled job function (run in process pool)"""
|
||||
# Check if sync is already in progress
|
||||
if not self.sync_lock.acquire(blocking=False):
|
||||
logger.info("Sync already in progress, skipping this run")
|
||||
return
|
||||
|
||||
try:
|
||||
self.sync_in_progress = True
|
||||
self.log_operation("sync", "started")
|
||||
|
||||
# Import here to avoid circular imports
|
||||
from .database import sync_database
|
||||
from .garmin import GarminClient
|
||||
|
||||
# Perform sync and download
|
||||
client = GarminClient()
|
||||
|
||||
# Sync database first
|
||||
with self.db_lock:
|
||||
sync_database(client)
|
||||
|
||||
# Download missing activities
|
||||
downloaded_count = 0
|
||||
session = get_legacy_session()
|
||||
missing_activities = (
|
||||
session.query(Activity).filter_by(downloaded=False).all()
|
||||
)
|
||||
|
||||
for activity in missing_activities:
|
||||
try:
|
||||
# Download FIT file
|
||||
fit_data = client.download_activity_fit(activity.activity_id)
|
||||
|
||||
# Save to file
|
||||
import os
|
||||
from pathlib import Path
|
||||
data_dir = Path(os.getenv("DATA_DIR", "data"))
|
||||
data_dir.mkdir(parents=True, exist_ok=True)
|
||||
timestamp = activity.start_time.replace(":", "-").replace(" ", "_")
|
||||
filename = f"activity_{activity.activity_id}_{timestamp}.fit"
|
||||
filepath = data_dir / filename
|
||||
|
||||
with open(filepath, "wb") as f:
|
||||
f.write(fit_data)
|
||||
|
||||
# Update activity record
|
||||
activity.filename = str(filepath)
|
||||
activity.downloaded = True
|
||||
activity.last_sync = datetime.now().isoformat()
|
||||
|
||||
# Get metrics immediately after download
|
||||
metrics = get_activity_metrics(activity, client)
|
||||
if metrics:
|
||||
# Update metrics if available
|
||||
activity.activity_type = metrics.get("activityType", {}).get("typeKey")
|
||||
activity.duration = int(float(metrics.get("duration", 0)))
|
||||
activity.distance = float(metrics.get("distance", 0))
|
||||
activity.max_heart_rate = int(float(metrics.get("maxHR", 0)))
|
||||
activity.avg_power = float(metrics.get("avgPower", 0))
|
||||
activity.calories = int(float(metrics.get("calories", 0)))
|
||||
|
||||
session.commit()
|
||||
downloaded_count += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to download activity {activity.activity_id}: {e}"
|
||||
)
|
||||
session.rollback()
|
||||
|
||||
self.log_operation(
|
||||
"sync", "success",
|
||||
f"Downloaded {downloaded_count} new activities and updated metrics"
|
||||
)
|
||||
|
||||
# Update last run time
|
||||
self.update_daemon_last_run()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Sync failed: {e}")
|
||||
self.log_operation("sync", "error", str(e))
|
||||
finally:
|
||||
self.sync_in_progress = False
|
||||
self.sync_lock.release()
|
||||
if session:
|
||||
session.close()
|
||||
|
||||
def load_config(self):
|
||||
"""Load daemon configuration from database and return dict"""
|
||||
session = get_session()
|
||||
try:
|
||||
config = session.query(DaemonConfig).first()
|
||||
if not config:
|
||||
# Create default configuration with explicit cron schedule
|
||||
config = DaemonConfig(
|
||||
schedule_cron="0 */6 * * *", enabled=True, status="stopped"
|
||||
)
|
||||
session.add(config)
|
||||
session.commit()
|
||||
session.refresh(config) # Ensure we have the latest data
|
||||
|
||||
# Return configuration as dictionary to avoid session issues
|
||||
return {
|
||||
"id": config.id,
|
||||
"enabled": config.enabled,
|
||||
"schedule_cron": config.schedule_cron,
|
||||
"last_run": config.last_run,
|
||||
"next_run": config.next_run,
|
||||
"status": config.status,
|
||||
}
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def update_daemon_status(self, status):
|
||||
"""Update daemon status in database"""
|
||||
session = get_session()
|
||||
try:
|
||||
config = session.query(DaemonConfig).first()
|
||||
if not config:
|
||||
config = DaemonConfig()
|
||||
session.add(config)
|
||||
|
||||
config.status = status
|
||||
session.commit()
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def update_daemon_last_run(self):
|
||||
"""Update daemon last run timestamp"""
|
||||
session = get_session()
|
||||
try:
|
||||
config = session.query(DaemonConfig).first()
|
||||
if config:
|
||||
config.last_run = datetime.now().isoformat()
|
||||
session.commit()
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def start_web_ui(self, port):
|
||||
"""Start FastAPI web server in a separate thread"""
|
||||
try:
|
||||
import uvicorn
|
||||
from .web.app import app
|
||||
|
||||
# Add shutdown hook to stop worker thread
|
||||
@app.on_event("shutdown")
|
||||
def shutdown_event():
|
||||
logger.info("Web server shutting down")
|
||||
self.running = False
|
||||
self.worker_thread.join(timeout=5)
|
||||
|
||||
def run_server():
|
||||
try:
|
||||
# Use async execution model for better concurrency
|
||||
config = uvicorn.Config(
|
||||
app,
|
||||
host="0.0.0.0",
|
||||
port=port,
|
||||
log_level="info",
|
||||
workers=1,
|
||||
loop="asyncio"
|
||||
)
|
||||
server = uvicorn.Server(config)
|
||||
server.run()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start web server: {e}")
|
||||
|
||||
web_thread = threading.Thread(target=run_server, daemon=True)
|
||||
web_thread.start()
|
||||
self.web_server = web_thread
|
||||
except ImportError as e:
|
||||
logger.warning(f"Could not start web UI: {e}")
|
||||
|
||||
def signal_handler(self, signum, frame):
|
||||
"""Handle shutdown signals"""
|
||||
logger.info("Received shutdown signal, stopping daemon...")
|
||||
self.stop()
|
||||
|
||||
def is_sync_in_progress(self):
|
||||
"""Check if sync operation is currently running"""
|
||||
return self.sync_in_progress
|
||||
|
||||
def stop(self):
|
||||
"""Stop daemon and clean up resources"""
|
||||
if self.scheduler.running:
|
||||
self.scheduler.shutdown()
|
||||
self.running = False
|
||||
self.update_daemon_status("stopped")
|
||||
self.log_operation("daemon", "stopped", "Daemon shutdown completed")
|
||||
logger.info("Daemon stopped")
|
||||
|
||||
def log_operation(self, operation, status, message=None):
|
||||
"""Log sync operation to database"""
|
||||
session = get_session()
|
||||
try:
|
||||
log = SyncLog(
|
||||
timestamp=datetime.now().isoformat(),
|
||||
operation=operation,
|
||||
status=status,
|
||||
message=message,
|
||||
activities_processed=0, # Can be updated later if needed
|
||||
activities_downloaded=0, # Can be updated later if needed
|
||||
)
|
||||
session.add(log)
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to log operation: {e}")
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def count_missing(self):
|
||||
"""Count missing activities"""
|
||||
session = get_session()
|
||||
try:
|
||||
return session.query(Activity).filter_by(downloaded=False).count()
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def reprocess_activities(self):
|
||||
"""Reprocess activities to calculate missing metrics"""
|
||||
from .database import get_session
|
||||
from .activity_parser import get_activity_metrics
|
||||
from .database import Activity
|
||||
from tqdm import tqdm
|
||||
|
||||
logger.info("Starting reprocess job")
|
||||
session = get_session()
|
||||
try:
|
||||
# Get activities that need reprocessing
|
||||
activities = session.query(Activity).filter(
|
||||
Activity.downloaded == True,
|
||||
Activity.reprocessed == False
|
||||
).all()
|
||||
|
||||
if not activities:
|
||||
logger.info("No activities to reprocess")
|
||||
return
|
||||
|
||||
logger.info(f"Reprocessing {len(activities)} activities")
|
||||
success_count = 0
|
||||
|
||||
# Reprocess each activity
|
||||
for activity in tqdm(activities, desc="Reprocessing"):
|
||||
try:
|
||||
# Use force_reprocess=True to ensure we parse the file again
|
||||
metrics = get_activity_metrics(activity, client=None, force_reprocess=True)
|
||||
|
||||
# Update activity metrics if we got new data
|
||||
if metrics:
|
||||
activity.activity_type = metrics.get("activityType", {}).get("typeKey")
|
||||
activity.duration = int(float(metrics.get("duration", 0))) if metrics.get("duration") else activity.duration
|
||||
activity.distance = float(metrics.get("distance", 0)) if metrics.get("distance") else activity.distance
|
||||
activity.max_heart_rate = int(float(metrics.get("maxHR", 0))) if metrics.get("maxHR") else activity.max_heart_rate
|
||||
activity.avg_heart_rate = int(float(metrics.get("avgHR", 0))) if metrics.get("avgHR") else activity.avg_heart_rate
|
||||
activity.avg_power = float(metrics.get("avgPower", 0)) if metrics.get("avgPower") else activity.avg_power
|
||||
activity.calories = int(float(metrics.get("calories", 0))) if metrics.get("calories") else activity.calories
|
||||
|
||||
# Mark as reprocessed regardless of success
|
||||
activity.reprocessed = True
|
||||
session.commit()
|
||||
success_count += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error reprocessing activity {activity.activity_id}: {str(e)}")
|
||||
session.rollback()
|
||||
|
||||
logger.info(f"Reprocessed {success_count}/{len(activities)} activities successfully")
|
||||
self.log_operation("reprocess", "success", f"Reprocessed {success_count} activities")
|
||||
self.update_daemon_last_run()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Reprocess job failed: {str(e)}")
|
||||
self.log_operation("reprocess", "error", str(e))
|
||||
finally:
|
||||
session.close()
|
||||
234
examples/GarminSync/garminsync/database.py
Normal file
234
examples/GarminSync/garminsync/database.py
Normal file
@@ -0,0 +1,234 @@
|
||||
"""Database module for GarminSync application with async support."""
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from sqlalchemy import Boolean, Column, Float, Integer, String
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
||||
from sqlalchemy.ext.asyncio import async_sessionmaker
|
||||
from sqlalchemy.future import select
|
||||
from sqlalchemy.orm import declarative_base
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import selectinload, joinedload
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class Activity(Base):
|
||||
"""Activity model representing a Garmin activity record."""
|
||||
|
||||
__tablename__ = "activities"
|
||||
|
||||
activity_id = Column(Integer, primary_key=True)
|
||||
start_time = Column(String, nullable=False)
|
||||
activity_type = Column(String, nullable=True)
|
||||
duration = Column(Integer, nullable=True)
|
||||
distance = Column(Float, nullable=True)
|
||||
max_heart_rate = Column(Integer, nullable=True)
|
||||
avg_heart_rate = Column(Integer, nullable=True)
|
||||
avg_power = Column(Float, nullable=True)
|
||||
calories = Column(Integer, nullable=True)
|
||||
filename = Column(String, unique=True, nullable=True)
|
||||
downloaded = Column(Boolean, default=False, nullable=False)
|
||||
reprocessed = Column(Boolean, default=False, nullable=False)
|
||||
created_at = Column(String, nullable=False)
|
||||
last_sync = Column(String, nullable=True)
|
||||
|
||||
@classmethod
|
||||
async def get_paginated(cls, db, page=1, per_page=10):
|
||||
"""Get paginated list of activities (async)."""
|
||||
async with db.begin() as session:
|
||||
query = select(cls).order_by(cls.start_time.desc())
|
||||
result = await session.execute(query.offset((page-1)*per_page).limit(per_page))
|
||||
activities = result.scalars().all()
|
||||
count_result = await session.execute(select(select(cls).count()))
|
||||
total = count_result.scalar_one()
|
||||
return {
|
||||
"items": activities,
|
||||
"page": page,
|
||||
"per_page": per_page,
|
||||
"total": total,
|
||||
"pages": (total + per_page - 1) // per_page
|
||||
}
|
||||
|
||||
def to_dict(self):
|
||||
"""Convert activity to dictionary representation."""
|
||||
return {
|
||||
"id": self.activity_id,
|
||||
"name": self.filename or "Unnamed Activity",
|
||||
"distance": self.distance,
|
||||
"duration": self.duration,
|
||||
"start_time": self.start_time,
|
||||
"activity_type": self.activity_type,
|
||||
"max_heart_rate": self.max_heart_rate,
|
||||
"avg_heart_rate": self.avg_heart_rate,
|
||||
"avg_power": self.avg_power,
|
||||
"calories": self.calories,
|
||||
}
|
||||
|
||||
|
||||
class DaemonConfig(Base):
|
||||
"""Daemon configuration model."""
|
||||
|
||||
__tablename__ = "daemon_config"
|
||||
|
||||
id = Column(Integer, primary_key=True, default=1)
|
||||
enabled = Column(Boolean, default=True, nullable=False)
|
||||
schedule_cron = Column(String, default="0 */6 * * *", nullable=False)
|
||||
last_run = Column(String, nullable=True)
|
||||
next_run = Column(String, nullable=True)
|
||||
status = Column(String, default="stopped", nullable=False)
|
||||
|
||||
@classmethod
|
||||
async def get(cls, db):
|
||||
"""Get configuration record (async)."""
|
||||
async with db.begin() as session:
|
||||
result = await session.execute(select(cls))
|
||||
return result.scalars().first()
|
||||
|
||||
|
||||
class SyncLog(Base):
|
||||
"""Sync log model for tracking sync operations."""
|
||||
|
||||
__tablename__ = "sync_logs"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
timestamp = Column(String, nullable=False)
|
||||
operation = Column(String, nullable=False)
|
||||
status = Column(String, nullable=False)
|
||||
message = Column(String, nullable=True)
|
||||
activities_processed = Column(Integer, default=0, nullable=False)
|
||||
activities_downloaded = Column(Integer, default=0, nullable=False)
|
||||
|
||||
|
||||
# Database initialization and session management
|
||||
engine = None
|
||||
async_session = None
|
||||
|
||||
async def init_db():
|
||||
"""Initialize database connection and create tables."""
|
||||
global engine, async_session
|
||||
db_path = os.getenv("DB_PATH", "data/garmin.db")
|
||||
engine = create_async_engine(
|
||||
f"sqlite+aiosqlite:///{db_path}",
|
||||
pool_size=10,
|
||||
max_overflow=20,
|
||||
pool_pre_ping=True
|
||||
)
|
||||
async_session = async_sessionmaker(engine, expire_on_commit=False)
|
||||
|
||||
# Create tables if they don't exist
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def get_db():
|
||||
"""Async context manager for database sessions."""
|
||||
async with async_session() as session:
|
||||
try:
|
||||
yield session
|
||||
await session.commit()
|
||||
except SQLAlchemyError:
|
||||
await session.rollback()
|
||||
raise
|
||||
|
||||
|
||||
# Compatibility layer for legacy sync functions
|
||||
def get_legacy_session():
|
||||
"""Temporary synchronous session for migration purposes."""
|
||||
db_path = os.getenv("DB_PATH", "data/garmin.db")
|
||||
sync_engine = create_engine(f"sqlite:///{db_path}")
|
||||
Base.metadata.create_all(sync_engine)
|
||||
Session = sessionmaker(bind=sync_engine)
|
||||
return Session()
|
||||
|
||||
|
||||
async def sync_database(garmin_client):
|
||||
"""Sync local database with Garmin Connect activities (async)."""
|
||||
from garminsync.activity_parser import get_activity_metrics
|
||||
async with get_db() as session:
|
||||
try:
|
||||
activities = garmin_client.get_activities(0, 1000)
|
||||
|
||||
if not activities:
|
||||
print("No activities returned from Garmin API")
|
||||
return
|
||||
|
||||
for activity_data in activities:
|
||||
if not isinstance(activity_data, dict):
|
||||
print(f"Invalid activity data: {activity_data}")
|
||||
continue
|
||||
|
||||
activity_id = activity_data.get("activityId")
|
||||
start_time = activity_data.get("startTimeLocal")
|
||||
|
||||
if not activity_id or not start_time:
|
||||
print(f"Missing required fields in activity: {activity_data}")
|
||||
continue
|
||||
|
||||
result = await session.execute(
|
||||
select(Activity).filter_by(activity_id=activity_id)
|
||||
)
|
||||
existing = result.scalars().first()
|
||||
|
||||
# Create or update basic activity info
|
||||
if not existing:
|
||||
activity = Activity(
|
||||
activity_id=activity_id,
|
||||
start_time=start_time,
|
||||
downloaded=False,
|
||||
created_at=datetime.now().isoformat(),
|
||||
last_sync=datetime.now().isoformat(),
|
||||
)
|
||||
session.add(activity)
|
||||
else:
|
||||
activity = existing
|
||||
|
||||
# Update metrics using shared parser
|
||||
metrics = get_activity_metrics(activity, garmin_client)
|
||||
if metrics:
|
||||
activity.activity_type = metrics.get("activityType", {}).get("typeKey")
|
||||
# ... rest of metric processing ...
|
||||
|
||||
# Update sync timestamp
|
||||
activity.last_sync = datetime.now().isoformat()
|
||||
|
||||
await session.commit()
|
||||
except SQLAlchemyError as e:
|
||||
await session.rollback()
|
||||
raise e
|
||||
|
||||
|
||||
async def get_offline_stats():
|
||||
"""Return statistics about cached data without API calls (async)."""
|
||||
async with get_db() as session:
|
||||
try:
|
||||
result = await session.execute(select(Activity))
|
||||
total = len(result.scalars().all())
|
||||
|
||||
result = await session.execute(
|
||||
select(Activity).filter_by(downloaded=True)
|
||||
)
|
||||
downloaded = len(result.scalars().all())
|
||||
|
||||
result = await session.execute(
|
||||
select(Activity).order_by(Activity.last_sync.desc())
|
||||
)
|
||||
last_sync = result.scalars().first()
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"downloaded": downloaded,
|
||||
"missing": total - downloaded,
|
||||
"last_sync": last_sync.last_sync if last_sync else "Never synced",
|
||||
}
|
||||
except SQLAlchemyError as e:
|
||||
print(f"Database error: {e}")
|
||||
return {
|
||||
"total": 0,
|
||||
"downloaded": 0,
|
||||
"missing": 0,
|
||||
"last_sync": "Error"
|
||||
}
|
||||
@@ -0,0 +1,73 @@
|
||||
import numpy as np
|
||||
|
||||
class SinglespeedAnalyzer:
|
||||
def __init__(self):
|
||||
self.chainring_options = [38, 46] # teeth
|
||||
self.common_cogs = list(range(11, 28)) # 11t to 27t rear cogs
|
||||
self.wheel_circumference_m = 2.096 # 700x25c tire
|
||||
|
||||
def analyze_gear_ratio(self, speed_data, cadence_data, gradient_data):
|
||||
"""Determine most likely singlespeed gear ratio"""
|
||||
# Validate input parameters
|
||||
if not speed_data or not cadence_data or not gradient_data:
|
||||
raise ValueError("Input data cannot be empty")
|
||||
if len(speed_data) != len(cadence_data) or len(speed_data) != len(gradient_data):
|
||||
raise ValueError("Input data arrays must be of equal length")
|
||||
|
||||
# Filter for flat terrain segments (gradient < 3%)
|
||||
flat_indices = [i for i, grad in enumerate(gradient_data) if abs(grad) < 3.0]
|
||||
flat_speeds = [speed_data[i] for i in flat_indices]
|
||||
flat_cadences = [cadence_data[i] for i in flat_indices]
|
||||
|
||||
# Only consider data points with sufficient speed (15 km/h) and cadence
|
||||
valid_indices = [i for i in range(len(flat_speeds))
|
||||
if flat_speeds[i] > 4.17 and flat_cadences[i] > 0] # 15 km/h threshold
|
||||
|
||||
if not valid_indices:
|
||||
return None # Not enough data
|
||||
|
||||
valid_speeds = [flat_speeds[i] for i in valid_indices]
|
||||
valid_cadences = [flat_cadences[i] for i in valid_indices]
|
||||
|
||||
# Calculate gear ratios from speed and cadence
|
||||
gear_ratios = []
|
||||
for speed, cadence in zip(valid_speeds, valid_cadences):
|
||||
# Gear ratio = (speed in m/s * 60 seconds/minute) / (cadence in rpm * wheel circumference in meters)
|
||||
gr = (speed * 60) / (cadence * self.wheel_circumference_m)
|
||||
gear_ratios.append(gr)
|
||||
|
||||
# Calculate average gear ratio
|
||||
avg_gear_ratio = sum(gear_ratios) / len(gear_ratios)
|
||||
|
||||
# Find best matching chainring and cog combination
|
||||
best_fit = None
|
||||
min_diff = float('inf')
|
||||
for chainring in self.chainring_options:
|
||||
for cog in self.common_cogs:
|
||||
theoretical_ratio = chainring / cog
|
||||
diff = abs(theoretical_ratio - avg_gear_ratio)
|
||||
if diff < min_diff:
|
||||
min_diff = diff
|
||||
best_fit = (chainring, cog, theoretical_ratio)
|
||||
|
||||
if not best_fit:
|
||||
return None
|
||||
|
||||
chainring, cog, ratio = best_fit
|
||||
|
||||
# Calculate gear metrics
|
||||
wheel_diameter_inches = 27.0 # 700c wheel diameter
|
||||
gear_inches = ratio * wheel_diameter_inches
|
||||
development_meters = ratio * self.wheel_circumference_m
|
||||
|
||||
# Calculate confidence score (1 - relative error)
|
||||
confidence = max(0, 1 - (min_diff / ratio)) if ratio > 0 else 0
|
||||
|
||||
return {
|
||||
'estimated_chainring_teeth': chainring,
|
||||
'estimated_cassette_teeth': cog,
|
||||
'gear_ratio': ratio,
|
||||
'gear_inches': gear_inches,
|
||||
'development_meters': development_meters,
|
||||
'confidence_score': confidence
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
import numpy as np
|
||||
|
||||
class PowerEstimator:
|
||||
def __init__(self):
|
||||
self.bike_weight_kg = 10.0 # 22 lbs
|
||||
self.rider_weight_kg = 75.0 # Default assumption
|
||||
self.drag_coefficient = 0.88 # Road bike
|
||||
self.frontal_area_m2 = 0.4 # Typical road cycling position
|
||||
self.rolling_resistance = 0.004 # Road tires
|
||||
self.drivetrain_efficiency = 0.97
|
||||
self.air_density = 1.225 # kg/m³ at sea level, 20°C
|
||||
|
||||
def calculate_power(self, speed_ms, gradient_percent,
|
||||
air_temp_c=20, altitude_m=0):
|
||||
"""Calculate estimated power using physics model"""
|
||||
# Validate input parameters
|
||||
if not isinstance(speed_ms, (int, float)) or speed_ms < 0:
|
||||
raise ValueError("Speed must be a non-negative number")
|
||||
if not isinstance(gradient_percent, (int, float)):
|
||||
raise ValueError("Gradient must be a number")
|
||||
|
||||
# Calculate air density based on temperature and altitude
|
||||
temp_k = air_temp_c + 273.15
|
||||
pressure = 101325 * (1 - 0.0000225577 * altitude_m) ** 5.25588
|
||||
air_density = pressure / (287.05 * temp_k)
|
||||
|
||||
# Convert gradient to angle
|
||||
gradient_rad = np.arctan(gradient_percent / 100.0)
|
||||
|
||||
# Total mass
|
||||
total_mass = self.bike_weight_kg + self.rider_weight_kg
|
||||
|
||||
# Power components
|
||||
P_roll = self.rolling_resistance * total_mass * 9.81 * np.cos(gradient_rad) * speed_ms
|
||||
P_grav = total_mass * 9.81 * np.sin(gradient_rad) * speed_ms
|
||||
P_aero = 0.5 * air_density * self.drag_coefficient * self.frontal_area_m2 * speed_ms ** 3
|
||||
|
||||
# Power = (Rolling + Gravity + Aerodynamic) / Drivetrain efficiency
|
||||
return (P_roll + P_grav + P_aero) / self.drivetrain_efficiency
|
||||
|
||||
def estimate_peak_power(self, power_values, durations):
|
||||
"""Calculate peak power for various durations"""
|
||||
# This will be implemented in Phase 3
|
||||
return {}
|
||||
196
examples/GarminSync/garminsync/garmin.py
Normal file
196
examples/GarminSync/garminsync/garmin.py
Normal file
@@ -0,0 +1,196 @@
|
||||
"""Garmin API client module for GarminSync application."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
|
||||
from garminconnect import (Garmin, GarminConnectAuthenticationError,
|
||||
GarminConnectConnectionError,
|
||||
GarminConnectTooManyRequestsError)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GarminClient:
|
||||
"""Garmin API client for interacting with Garmin Connect services."""
|
||||
|
||||
def __init__(self):
|
||||
self.client = None
|
||||
|
||||
def authenticate(self):
|
||||
"""Authenticate using credentials from environment variables"""
|
||||
email = os.getenv("GARMIN_EMAIL")
|
||||
password = os.getenv("GARMIN_PASSWORD")
|
||||
|
||||
if not email or not password:
|
||||
raise ValueError("Garmin credentials not found in environment variables")
|
||||
|
||||
try:
|
||||
self.client = Garmin(email, password)
|
||||
self.client.login()
|
||||
logger.info("Successfully authenticated with Garmin Connect")
|
||||
return self.client
|
||||
except GarminConnectAuthenticationError as e:
|
||||
logger.error("Authentication failed: %s", e)
|
||||
raise ValueError(f"Garmin authentication failed: {e}") from e
|
||||
except GarminConnectConnectionError as e:
|
||||
logger.error("Connection error: %s", e)
|
||||
raise ConnectionError(f"Failed to connect to Garmin Connect: {e}") from e
|
||||
except Exception as e:
|
||||
logger.error("Unexpected error during authentication: %s", e)
|
||||
raise RuntimeError(f"Unexpected error during authentication: {e}") from e
|
||||
|
||||
def get_activities(self, start=0, limit=10):
|
||||
"""Get list of activities with rate limiting
|
||||
|
||||
Args:
|
||||
start: Starting index for activities
|
||||
limit: Maximum number of activities to return
|
||||
|
||||
Returns:
|
||||
List of activities or None if failed
|
||||
|
||||
Raises:
|
||||
ValueError: If authentication fails
|
||||
ConnectionError: If connection to Garmin fails
|
||||
RuntimeError: For other unexpected errors
|
||||
"""
|
||||
if not self.client:
|
||||
self.authenticate()
|
||||
|
||||
try:
|
||||
activities = self.client.get_activities(start, limit)
|
||||
time.sleep(2) # Rate limiting
|
||||
logger.info("Retrieved %d activities", len(activities) if activities else 0)
|
||||
return activities
|
||||
except (GarminConnectConnectionError, TimeoutError, GarminConnectTooManyRequestsError) as e:
|
||||
logger.error("Network error while fetching activities: %s", e)
|
||||
raise ConnectionError(f"Failed to fetch activities: {e}") from e
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
logger.error("Unexpected error while fetching activities: %s", e)
|
||||
raise RuntimeError(f"Failed to fetch activities: {e}") from e
|
||||
|
||||
def download_activity_fit(self, activity_id):
|
||||
"""Download .fit file for a specific activity"""
|
||||
if not self.client:
|
||||
self.authenticate()
|
||||
|
||||
print(f"Attempting to download activity {activity_id}")
|
||||
|
||||
# Try multiple methods to download FIT file
|
||||
methods_to_try = [
|
||||
# Method 1: No format parameter (most likely to work)
|
||||
lambda: self.client.download_activity(activity_id),
|
||||
# Method 2: Use correct parameter name with different values
|
||||
lambda: self.client.download_activity(activity_id, dl_fmt="FIT"),
|
||||
lambda: self.client.download_activity(
|
||||
activity_id, dl_fmt="tcx"
|
||||
), # Fallback format
|
||||
]
|
||||
|
||||
last_exception = None
|
||||
|
||||
for i, method in enumerate(methods_to_try, 1):
|
||||
try:
|
||||
# Try the download method
|
||||
print(f"Trying download method {i}...")
|
||||
fit_data = method()
|
||||
|
||||
if fit_data:
|
||||
print(
|
||||
f"Successfully downloaded {len(fit_data)} bytes using method {i}"
|
||||
)
|
||||
time.sleep(2) # Rate limiting
|
||||
return fit_data
|
||||
print(f"Method {i} returned empty data")
|
||||
|
||||
# Catch connection errors specifically
|
||||
except (GarminConnectConnectionError, ConnectionError) as e: # pylint: disable=duplicate-except
|
||||
print(f"Method {i} failed with connection error: {e}")
|
||||
last_exception = e
|
||||
continue
|
||||
# Catch all other exceptions as a fallback
|
||||
except (TimeoutError, GarminConnectTooManyRequestsError) as e:
|
||||
print(f"Method {i} failed with retryable error: {e}")
|
||||
last_exception = e
|
||||
continue
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
print(f"Method {i} failed with unexpected error: "
|
||||
f"{type(e).__name__}: {e}")
|
||||
last_exception = e
|
||||
continue
|
||||
|
||||
# If all methods failed, raise the last exception
|
||||
if last_exception:
|
||||
raise RuntimeError(
|
||||
f"All download methods failed. Last error: {last_exception}"
|
||||
) from last_exception
|
||||
raise RuntimeError(
|
||||
"All download methods failed, but no specific error was captured"
|
||||
)
|
||||
|
||||
def get_activity_details(self, activity_id):
|
||||
"""Get detailed information about a specific activity
|
||||
|
||||
Args:
|
||||
activity_id: ID of the activity to retrieve
|
||||
|
||||
Returns:
|
||||
Activity details dictionary or None if failed
|
||||
"""
|
||||
if not self.client:
|
||||
self.authenticate()
|
||||
|
||||
try:
|
||||
activity_details = self.client.get_activity(activity_id)
|
||||
time.sleep(2) # Rate limiting
|
||||
logger.info("Retrieved details for activity %s", activity_id)
|
||||
return activity_details
|
||||
except (GarminConnectConnectionError, TimeoutError) as e:
|
||||
logger.error(
|
||||
"Connection/timeout error fetching activity details for %s: %s",
|
||||
activity_id, e
|
||||
)
|
||||
return None
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
logger.error("Unexpected error fetching activity details for %s: %s", activity_id, e)
|
||||
return None
|
||||
|
||||
# Example usage and testing function
|
||||
|
||||
|
||||
def test_download(activity_id):
|
||||
"""Test function to verify download functionality"""
|
||||
client = GarminClient()
|
||||
try:
|
||||
fit_data = client.download_activity_fit(activity_id)
|
||||
|
||||
# Verify the data looks like a FIT file
|
||||
if not fit_data or len(fit_data) <= 14:
|
||||
print("❌ Downloaded data is empty or too small")
|
||||
return None
|
||||
|
||||
header = fit_data[:14]
|
||||
if b".FIT" in header or header[8:12] == b".FIT":
|
||||
print("✅ Downloaded data appears to be a valid FIT file")
|
||||
else:
|
||||
print("⚠️ Downloaded data may not be a FIT file")
|
||||
print(f"Header: {header}")
|
||||
return fit_data
|
||||
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
print(f"❌ Test failed: {e}")
|
||||
return None
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Test with a sample activity ID if provided
|
||||
import sys
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
test_activity_id = sys.argv[1]
|
||||
print(f"Testing download for activity ID: {test_activity_id}")
|
||||
test_download(test_activity_id)
|
||||
else:
|
||||
print("Usage: python garmin.py <activity_id>")
|
||||
print("This will test the download functionality with the provided activity ID")
|
||||
131
examples/GarminSync/garminsync/migrate_activities.py
Normal file
131
examples/GarminSync/garminsync/migrate_activities.py
Normal file
@@ -0,0 +1,131 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Migration script to populate activity fields from FIT files or Garmin API
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
import logging
|
||||
|
||||
from sqlalchemy import MetaData, Table, create_engine, text
|
||||
from sqlalchemy.exc import OperationalError
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Add parent directory to path to import garminsync modules
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from garminsync.database import Activity, get_session, init_db
|
||||
from garminsync.garmin import GarminClient
|
||||
from garminsync.activity_parser import get_activity_metrics
|
||||
|
||||
def migrate_activities():
|
||||
"""Migrate activities to populate fields from FIT files or Garmin API"""
|
||||
logger.info("Starting activity migration...")
|
||||
|
||||
# We assume database schema has been updated via Alembic migrations
|
||||
# during container startup. Columns should already exist.
|
||||
|
||||
# Initialize Garmin client
|
||||
try:
|
||||
client = GarminClient()
|
||||
logger.info("Garmin client initialized successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize Garmin client: {e}")
|
||||
# Continue with migration but without Garmin data
|
||||
client = None
|
||||
|
||||
# Get database session
|
||||
session = get_session()
|
||||
|
||||
try:
|
||||
# Get all activities that need to be updated (those with NULL activity_type)
|
||||
activities = session.query(Activity).filter(Activity.activity_type.is_(None)).all()
|
||||
logger.info(f"Found {len(activities)} activities to migrate")
|
||||
|
||||
# If no activities found, exit early
|
||||
if not activities:
|
||||
logger.info("No activities found for migration")
|
||||
return True
|
||||
|
||||
updated_count = 0
|
||||
error_count = 0
|
||||
|
||||
for i, activity in enumerate(activities):
|
||||
try:
|
||||
logger.info(f"Processing activity {i+1}/{len(activities)} (ID: {activity.activity_id})")
|
||||
|
||||
# Use shared parser to get activity metrics
|
||||
activity_details = get_activity_metrics(activity, client)
|
||||
|
||||
# Update activity fields if we have details
|
||||
if activity_details:
|
||||
logger.info(f"Successfully parsed metrics for activity {activity.activity_id}")
|
||||
|
||||
# Update activity fields
|
||||
activity.activity_type = activity_details.get("activityType", {}).get("typeKey", "Unknown")
|
||||
|
||||
# Extract duration in seconds
|
||||
duration = activity_details.get("summaryDTO", {}).get("duration")
|
||||
if duration is not None:
|
||||
activity.duration = int(float(duration))
|
||||
|
||||
# Extract distance in meters
|
||||
distance = activity_details.get("summaryDTO", {}).get("distance")
|
||||
if distance is not None:
|
||||
activity.distance = float(distance)
|
||||
|
||||
# Extract max heart rate
|
||||
max_hr = activity_details.get("summaryDTO", {}).get("maxHR")
|
||||
if max_hr is not None:
|
||||
activity.max_heart_rate = int(float(max_hr))
|
||||
|
||||
# Extract average power
|
||||
avg_power = activity_details.get("summaryDTO", {}).get("avgPower")
|
||||
if avg_power is not None:
|
||||
activity.avg_power = float(avg_power)
|
||||
|
||||
# Extract calories
|
||||
calories = activity_details.get("summaryDTO", {}).get("calories")
|
||||
if calories is not None:
|
||||
activity.calories = int(float(calories))
|
||||
else:
|
||||
# Set default values if we can't get details
|
||||
activity.activity_type = "Unknown"
|
||||
logger.warning(f"Could not retrieve metrics for activity {activity.activity_id}")
|
||||
|
||||
# Update last sync timestamp
|
||||
activity.last_sync = datetime.now().isoformat()
|
||||
|
||||
session.commit()
|
||||
updated_count += 1
|
||||
|
||||
# Log progress every 10 activities
|
||||
if (i + 1) % 10 == 0:
|
||||
logger.info(f"Progress: {i+1}/{len(activities)} activities processed")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing activity {activity.activity_id}: {e}")
|
||||
session.rollback()
|
||||
error_count += 1
|
||||
continue
|
||||
|
||||
logger.info(f"Migration completed. Updated: {updated_count}, Errors: {error_count}")
|
||||
return updated_count > 0 or error_count == 0 # Success if we updated any or had no errors
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Migration failed: {e}")
|
||||
return False
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = migrate_activities()
|
||||
sys.exit(0 if success else 1)
|
||||
153
examples/GarminSync/garminsync/parsers/gpx_parser.py
Normal file
153
examples/GarminSync/garminsync/parsers/gpx_parser.py
Normal file
@@ -0,0 +1,153 @@
|
||||
import xml.etree.ElementTree as ET
|
||||
from datetime import datetime
|
||||
import math
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def parse_gpx_file(file_path):
|
||||
"""
|
||||
Parse GPX file to extract activity metrics.
|
||||
Returns: Dictionary of activity metrics or None if parsing fails
|
||||
"""
|
||||
try:
|
||||
tree = ET.parse(file_path)
|
||||
root = tree.getroot()
|
||||
|
||||
# GPX namespace
|
||||
ns = {'gpx': 'http://www.topografix.com/GPX/1/1'}
|
||||
|
||||
# Extract metadata
|
||||
metadata = root.find('gpx:metadata', ns)
|
||||
if metadata is not None:
|
||||
time_elem = metadata.find('gpx:time', ns)
|
||||
if time_elem is not None:
|
||||
start_time = datetime.fromisoformat(time_elem.text.replace('Z', '+00:00'))
|
||||
else:
|
||||
# Fallback to first track point time
|
||||
trkpt = root.find('.//gpx:trkpt', ns)
|
||||
if trkpt is not None:
|
||||
time_elem = trkpt.find('gpx:time', ns)
|
||||
if time_elem is not None:
|
||||
start_time = datetime.fromisoformat(time_elem.text.replace('Z', '+00:00'))
|
||||
else:
|
||||
logger.error(f"No track points found in GPX file: {file_path}")
|
||||
return None
|
||||
|
||||
# Get all track points
|
||||
track_points = root.findall('.//gpx:trkpt', ns)
|
||||
if not track_points:
|
||||
logger.warning(f"No track points found in GPX file: {file_path}")
|
||||
return None
|
||||
|
||||
# Activity metrics
|
||||
total_distance = 0.0
|
||||
start_elevation = None
|
||||
min_elevation = float('inf')
|
||||
max_elevation = float('-inf')
|
||||
elevations = []
|
||||
heart_rates = []
|
||||
cadences = []
|
||||
|
||||
prev_point = None
|
||||
for point in track_points:
|
||||
# Parse coordinates
|
||||
lat = float(point.get('lat'))
|
||||
lon = float(point.get('lon'))
|
||||
|
||||
# Parse elevation
|
||||
ele_elem = point.find('gpx:ele', ns)
|
||||
ele = float(ele_elem.text) if ele_elem is not None else None
|
||||
if ele is not None:
|
||||
elevations.append(ele)
|
||||
if start_elevation is None:
|
||||
start_elevation = ele
|
||||
min_elevation = min(min_elevation, ele)
|
||||
max_elevation = max(max_elevation, ele)
|
||||
|
||||
# Parse time
|
||||
time_elem = point.find('gpx:time', ns)
|
||||
time = datetime.fromisoformat(time_elem.text.replace('Z', '+00:00')) if time_elem else None
|
||||
|
||||
# Parse extensions (heart rate, cadence, etc.)
|
||||
extensions = point.find('gpx:extensions', ns)
|
||||
if extensions is not None:
|
||||
# Garmin TrackPointExtension
|
||||
tpe = extensions.find('gpx:TrackPointExtension', ns)
|
||||
if tpe is not None:
|
||||
hr_elem = tpe.find('gpx:hr', ns)
|
||||
if hr_elem is not None:
|
||||
heart_rates.append(int(hr_elem.text))
|
||||
|
||||
cad_elem = tpe.find('gpx:cad', ns)
|
||||
if cad_elem is not None:
|
||||
cadences.append(int(cad_elem.text))
|
||||
|
||||
# Calculate distance from previous point
|
||||
if prev_point:
|
||||
prev_lat, prev_lon = prev_point
|
||||
total_distance += haversine(prev_lat, prev_lon, lat, lon)
|
||||
|
||||
prev_point = (lat, lon)
|
||||
|
||||
# Calculate duration
|
||||
if 'start_time' in locals() and time is not None:
|
||||
duration = (time - start_time).total_seconds()
|
||||
else:
|
||||
duration = None
|
||||
|
||||
# Calculate elevation gain/loss
|
||||
elevation_gain = 0
|
||||
elevation_loss = 0
|
||||
if elevations:
|
||||
prev_ele = elevations[0]
|
||||
for ele in elevations[1:]:
|
||||
if ele > prev_ele:
|
||||
elevation_gain += ele - prev_ele
|
||||
else:
|
||||
elevation_loss += prev_ele - ele
|
||||
prev_ele = ele
|
||||
|
||||
# Calculate averages
|
||||
avg_heart_rate = sum(heart_rates) / len(heart_rates) if heart_rates else None
|
||||
avg_cadence = sum(cadences) / len(cadences) if cadences else None
|
||||
|
||||
return {
|
||||
"activityType": {"typeKey": "other"},
|
||||
"summaryDTO": {
|
||||
"startTime": start_time.isoformat() if 'start_time' in locals() else None,
|
||||
"duration": duration,
|
||||
"distance": total_distance,
|
||||
"elevationGain": elevation_gain,
|
||||
"elevationLoss": elevation_loss,
|
||||
"minElevation": min_elevation,
|
||||
"maxElevation": max_elevation,
|
||||
"maxHR": max(heart_rates) if heart_rates else None,
|
||||
"avgHR": avg_heart_rate,
|
||||
"cadence": avg_cadence,
|
||||
"calories": None # Calories not typically in GPX files
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing GPX file {file_path}: {str(e)}")
|
||||
return None
|
||||
|
||||
def haversine(lat1, lon1, lat2, lon2):
|
||||
"""
|
||||
Calculate the great circle distance between two points
|
||||
on the earth (specified in decimal degrees)
|
||||
Returns distance in meters
|
||||
"""
|
||||
# Convert decimal degrees to radians
|
||||
lon1, lat1, lon2, lat2 = map(math.radians, [lon1, lat1, lon2, lat2])
|
||||
|
||||
# Haversine formula
|
||||
dlon = lon2 - lon1
|
||||
dlat = lat2 - lat1
|
||||
a = math.sin(dlat/2)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2)**2
|
||||
c = 2 * math.asin(math.sqrt(a))
|
||||
|
||||
# Radius of earth in meters
|
||||
r = 6371000
|
||||
return c * r
|
||||
96
examples/GarminSync/garminsync/utils.py
Normal file
96
examples/GarminSync/garminsync/utils.py
Normal file
@@ -0,0 +1,96 @@
|
||||
import logging
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
# Configure logging
|
||||
def setup_logger(name="garminsync", level=logging.INFO):
|
||||
"""Setup logger with consistent formatting"""
|
||||
logger = logging.getLogger(name)
|
||||
|
||||
# Prevent duplicate handlers
|
||||
if logger.handlers:
|
||||
return logger
|
||||
|
||||
logger.setLevel(level)
|
||||
|
||||
# Create console handler
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setLevel(level)
|
||||
|
||||
# Create formatter
|
||||
formatter = logging.Formatter(
|
||||
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
handler.setFormatter(formatter)
|
||||
|
||||
# Add handler to logger
|
||||
logger.addHandler(handler)
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
# Create default logger instance
|
||||
logger = setup_logger()
|
||||
|
||||
|
||||
def format_timestamp(timestamp_str=None):
|
||||
"""Format timestamp string for display"""
|
||||
if not timestamp_str:
|
||||
return "Never"
|
||||
|
||||
try:
|
||||
# Parse ISO format timestamp
|
||||
dt = datetime.fromisoformat(timestamp_str.replace("Z", "+00:00"))
|
||||
return dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
except (ValueError, AttributeError):
|
||||
return timestamp_str
|
||||
|
||||
|
||||
def safe_filename(filename):
|
||||
"""Make filename safe for filesystem"""
|
||||
import re
|
||||
|
||||
# Replace problematic characters
|
||||
safe_name = re.sub(r'[<>:"/\\|?*]', "_", filename)
|
||||
# Replace spaces and colons commonly found in timestamps
|
||||
safe_name = safe_name.replace(":", "-").replace(" ", "_")
|
||||
return safe_name
|
||||
|
||||
|
||||
def bytes_to_human_readable(bytes_count):
|
||||
"""Convert bytes to human readable format"""
|
||||
if bytes_count == 0:
|
||||
return "0 B"
|
||||
|
||||
for unit in ["B", "KB", "MB", "GB"]:
|
||||
if bytes_count < 1024.0:
|
||||
return f"{bytes_count:.1f} {unit}"
|
||||
bytes_count /= 1024.0
|
||||
return f"{bytes_count:.1f} TB"
|
||||
|
||||
|
||||
def validate_cron_expression(cron_expr):
|
||||
"""Basic validation of cron expression"""
|
||||
try:
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
# Try to create a CronTrigger with the expression
|
||||
CronTrigger.from_crontab(cron_expr)
|
||||
return True
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
|
||||
|
||||
# Utility function for error handling
|
||||
def handle_db_error(func):
|
||||
"""Decorator for database operations with error handling"""
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
logger.error(f"Database operation failed in {func.__name__}: {e}")
|
||||
raise
|
||||
|
||||
return wrapper
|
||||
1
examples/GarminSync/garminsync/web/__init__.py
Normal file
1
examples/GarminSync/garminsync/web/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Empty file to mark this directory as a Python package
|
||||
107
examples/GarminSync/garminsync/web/app.py
Normal file
107
examples/GarminSync/garminsync/web/app.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.templating import Jinja2Templates
|
||||
|
||||
from .routes import router
|
||||
|
||||
app = FastAPI(title="GarminSync Dashboard")
|
||||
|
||||
# Get the current directory path
|
||||
current_dir = Path(__file__).parent
|
||||
|
||||
# Mount static files and templates with error handling
|
||||
static_dir = current_dir / "static"
|
||||
templates_dir = current_dir / "templates"
|
||||
|
||||
if static_dir.exists():
|
||||
app.mount("/static", StaticFiles(directory=str(static_dir)), name="static")
|
||||
|
||||
if templates_dir.exists():
|
||||
templates = Jinja2Templates(directory=str(templates_dir))
|
||||
else:
|
||||
templates = None
|
||||
|
||||
# Include API routes
|
||||
app.include_router(router)
|
||||
|
||||
|
||||
@app.get("/")
|
||||
async def dashboard(request: Request):
|
||||
"""Dashboard route with fallback for missing templates"""
|
||||
if not templates:
|
||||
# Return JSON response if templates are not available
|
||||
from garminsync.database import get_offline_stats
|
||||
|
||||
stats = get_offline_stats()
|
||||
return JSONResponse(
|
||||
{
|
||||
"message": "GarminSync Dashboard",
|
||||
"stats": stats,
|
||||
"note": "Web UI templates not found, showing JSON response",
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
# Get current statistics
|
||||
from garminsync.database import get_offline_stats
|
||||
|
||||
stats = get_offline_stats()
|
||||
|
||||
return templates.TemplateResponse(
|
||||
"dashboard.html", {"request": request, "stats": stats}
|
||||
)
|
||||
except Exception as e:
|
||||
return JSONResponse(
|
||||
{
|
||||
"error": f"Failed to load dashboard: {str(e)}",
|
||||
"message": "Dashboard unavailable, API endpoints still functional",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check():
|
||||
"""Health check endpoint"""
|
||||
return {"status": "healthy", "service": "GarminSync Dashboard"}
|
||||
|
||||
|
||||
@app.get("/config")
|
||||
async def config_page(request: Request):
|
||||
"""Configuration page"""
|
||||
if not templates:
|
||||
return JSONResponse(
|
||||
{
|
||||
"message": "Configuration endpoint",
|
||||
"note": "Use /api/schedule endpoints for configuration",
|
||||
}
|
||||
)
|
||||
|
||||
return templates.TemplateResponse("config.html", {"request": request})
|
||||
|
||||
|
||||
@app.get("/activities")
|
||||
async def activities_page(request: Request):
|
||||
"""Activities page route"""
|
||||
if not templates:
|
||||
return JSONResponse({"message": "Activities endpoint"})
|
||||
|
||||
return templates.TemplateResponse("activities.html", {"request": request})
|
||||
|
||||
|
||||
# Error handlers
|
||||
@app.exception_handler(404)
|
||||
async def not_found_handler(request: Request, exc):
|
||||
return JSONResponse(
|
||||
status_code=404, content={"error": "Not found", "path": str(request.url.path)}
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(500)
|
||||
async def server_error_handler(request: Request, exc):
|
||||
return JSONResponse(
|
||||
status_code=500, content={"error": "Internal server error", "detail": str(exc)}
|
||||
)
|
||||
478
examples/GarminSync/garminsync/web/routes.py
Normal file
478
examples/GarminSync/garminsync/web/routes.py
Normal file
@@ -0,0 +1,478 @@
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, HTTPException
|
||||
from pydantic import BaseModel
|
||||
|
||||
from garminsync.database import Activity, DaemonConfig, SyncLog, get_session
|
||||
|
||||
router = APIRouter(prefix="/api")
|
||||
|
||||
|
||||
class ScheduleConfig(BaseModel):
|
||||
enabled: bool
|
||||
cron_schedule: str
|
||||
|
||||
|
||||
@router.get("/status")
|
||||
async def get_status():
|
||||
"""Get current daemon status"""
|
||||
session = get_session()
|
||||
try:
|
||||
config = session.query(DaemonConfig).first()
|
||||
|
||||
# Get recent logs
|
||||
logs = session.query(SyncLog).order_by(SyncLog.timestamp.desc()).limit(10).all()
|
||||
|
||||
# Convert to dictionaries to avoid session issues
|
||||
daemon_data = {
|
||||
"running": config.status == "running" if config else False,
|
||||
"next_run": config.next_run if config else None,
|
||||
"schedule": config.schedule_cron if config else None,
|
||||
"last_run": config.last_run if config else None,
|
||||
"enabled": config.enabled if config else False,
|
||||
}
|
||||
|
||||
# Add sync status
|
||||
from garminsync.daemon import daemon_instance
|
||||
daemon_data["sync_in_progress"] = daemon_instance.is_sync_in_progress() if hasattr(daemon_instance, 'is_sync_in_progress') else False
|
||||
|
||||
log_data = []
|
||||
for log in logs:
|
||||
log_data.append(
|
||||
{
|
||||
"timestamp": log.timestamp,
|
||||
"operation": log.operation,
|
||||
"status": log.status,
|
||||
"message": log.message,
|
||||
"activities_processed": log.activities_processed,
|
||||
"activities_downloaded": log.activities_downloaded,
|
||||
}
|
||||
)
|
||||
|
||||
return {"daemon": daemon_data, "recent_logs": log_data}
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@router.post("/schedule")
|
||||
async def update_schedule(config: ScheduleConfig):
|
||||
"""Update daemon schedule configuration"""
|
||||
session = get_session()
|
||||
try:
|
||||
daemon_config = session.query(DaemonConfig).first()
|
||||
|
||||
if not daemon_config:
|
||||
daemon_config = DaemonConfig()
|
||||
session.add(daemon_config)
|
||||
|
||||
daemon_config.enabled = config.enabled
|
||||
daemon_config.schedule_cron = config.cron_schedule
|
||||
session.commit()
|
||||
|
||||
return {"message": "Configuration updated successfully"}
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to update configuration: {str(e)}"
|
||||
)
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@router.post("/sync/trigger")
|
||||
async def trigger_sync():
|
||||
"""Manually trigger a sync operation"""
|
||||
try:
|
||||
# Import here to avoid circular imports
|
||||
import os
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from garminsync.database import Activity, sync_database
|
||||
from garminsync.garmin import GarminClient
|
||||
|
||||
# Create client and sync
|
||||
client = GarminClient()
|
||||
sync_database(client)
|
||||
|
||||
# Download missing activities
|
||||
session = get_session()
|
||||
try:
|
||||
missing_activities = (
|
||||
session.query(Activity).filter_by(downloaded=False).all()
|
||||
)
|
||||
downloaded_count = 0
|
||||
|
||||
data_dir = Path(os.getenv("DATA_DIR", "data"))
|
||||
data_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for activity in missing_activities:
|
||||
try:
|
||||
fit_data = client.download_activity_fit(activity.activity_id)
|
||||
|
||||
timestamp = activity.start_time.replace(":", "-").replace(" ", "_")
|
||||
filename = f"activity_{activity.activity_id}_{timestamp}.fit"
|
||||
filepath = data_dir / filename
|
||||
|
||||
with open(filepath, "wb") as f:
|
||||
f.write(fit_data)
|
||||
|
||||
activity.filename = str(filepath)
|
||||
activity.downloaded = True
|
||||
activity.last_sync = datetime.now().isoformat()
|
||||
downloaded_count += 1
|
||||
session.commit()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Failed to download activity {activity.activity_id}: {e}")
|
||||
session.rollback()
|
||||
|
||||
return {
|
||||
"message": f"Sync completed successfully. Downloaded {downloaded_count} activities."
|
||||
}
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Sync failed: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/activities/stats")
|
||||
async def get_activity_stats():
|
||||
"""Get activity statistics"""
|
||||
from garminsync.database import get_offline_stats
|
||||
|
||||
return get_offline_stats()
|
||||
|
||||
|
||||
@router.get("/logs")
|
||||
async def get_logs(
|
||||
status: str = None,
|
||||
operation: str = None,
|
||||
date: str = None,
|
||||
page: int = 1,
|
||||
per_page: int = 20,
|
||||
):
|
||||
"""Get sync logs with filtering and pagination"""
|
||||
session = get_session()
|
||||
try:
|
||||
query = session.query(SyncLog)
|
||||
|
||||
# Apply filters
|
||||
if status:
|
||||
query = query.filter(SyncLog.status == status)
|
||||
if operation:
|
||||
query = query.filter(SyncLog.operation == operation)
|
||||
if date:
|
||||
# Filter by date (assuming ISO format)
|
||||
query = query.filter(SyncLog.timestamp.like(f"{date}%"))
|
||||
|
||||
# Get total count for pagination
|
||||
total = query.count()
|
||||
|
||||
# Apply pagination
|
||||
logs = (
|
||||
query.order_by(SyncLog.timestamp.desc())
|
||||
.offset((page - 1) * per_page)
|
||||
.limit(per_page)
|
||||
.all()
|
||||
)
|
||||
|
||||
log_data = []
|
||||
for log in logs:
|
||||
log_data.append(
|
||||
{
|
||||
"id": log.id,
|
||||
"timestamp": log.timestamp,
|
||||
"operation": log.operation,
|
||||
"status": log.status,
|
||||
"message": log.message,
|
||||
"activities_processed": log.activities_processed,
|
||||
"activities_downloaded": log.activities_downloaded,
|
||||
}
|
||||
)
|
||||
|
||||
return {"logs": log_data, "total": total, "page": page, "per_page": per_page}
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@router.post("/daemon/start")
|
||||
async def start_daemon():
|
||||
"""Start the daemon process"""
|
||||
from garminsync.daemon import daemon_instance
|
||||
|
||||
try:
|
||||
# Start the daemon in a separate thread to avoid blocking
|
||||
import threading
|
||||
|
||||
daemon_thread = threading.Thread(target=daemon_instance.start)
|
||||
daemon_thread.daemon = True
|
||||
daemon_thread.start()
|
||||
|
||||
# Update daemon status in database
|
||||
session = get_session()
|
||||
config = session.query(DaemonConfig).first()
|
||||
if not config:
|
||||
config = DaemonConfig()
|
||||
session.add(config)
|
||||
config.status = "running"
|
||||
session.commit()
|
||||
|
||||
return {"message": "Daemon started successfully"}
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Failed to start daemon: {str(e)}")
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@router.post("/daemon/stop")
|
||||
async def stop_daemon():
|
||||
"""Stop the daemon process"""
|
||||
from garminsync.daemon import daemon_instance
|
||||
|
||||
try:
|
||||
# Stop the daemon
|
||||
daemon_instance.stop()
|
||||
|
||||
# Update daemon status in database
|
||||
session = get_session()
|
||||
config = session.query(DaemonConfig).first()
|
||||
if config:
|
||||
config.status = "stopped"
|
||||
session.commit()
|
||||
|
||||
return {"message": "Daemon stopped successfully"}
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Failed to stop daemon: {str(e)}")
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@router.delete("/logs")
|
||||
async def clear_logs():
|
||||
"""Clear all sync logs"""
|
||||
session = get_session()
|
||||
try:
|
||||
session.query(SyncLog).delete()
|
||||
session.commit()
|
||||
return {"message": "Logs cleared successfully"}
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Failed to clear logs: {str(e)}")
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
@router.post("/activities/{activity_id}/reprocess")
|
||||
async def reprocess_activity(activity_id: int):
|
||||
"""Reprocess a single activity to update metrics"""
|
||||
from garminsync.database import Activity, get_session
|
||||
from garminsync.activity_parser import get_activity_metrics
|
||||
|
||||
session = get_session()
|
||||
try:
|
||||
activity = session.query(Activity).get(activity_id)
|
||||
if not activity:
|
||||
raise HTTPException(status_code=404, detail="Activity not found")
|
||||
|
||||
metrics = get_activity_metrics(activity, force_reprocess=True)
|
||||
if metrics:
|
||||
# Update activity metrics
|
||||
activity.activity_type = metrics.get("activityType", {}).get("typeKey")
|
||||
activity.duration = int(float(metrics.get("duration", 0))) if metrics.get("duration") else activity.duration
|
||||
activity.distance = float(metrics.get("distance", 0)) if metrics.get("distance") else activity.distance
|
||||
activity.max_heart_rate = int(float(metrics.get("maxHR", 0))) if metrics.get("maxHR") else activity.max_heart_rate
|
||||
activity.avg_heart_rate = int(float(metrics.get("avgHR", 0))) if metrics.get("avgHR") else activity.avg_heart_rate
|
||||
activity.avg_power = float(metrics.get("avgPower", 0)) if metrics.get("avgPower") else activity.avg_power
|
||||
activity.calories = int(float(metrics.get("calories", 0))) if metrics.get("calories") else activity.calories
|
||||
|
||||
# Mark as reprocessed
|
||||
activity.reprocessed = True
|
||||
session.commit()
|
||||
return {"message": f"Activity {activity_id} reprocessed successfully"}
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Reprocessing failed: {str(e)}")
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
@router.post("/reprocess")
|
||||
async def reprocess_activities(all: bool = False):
|
||||
"""Reprocess all activities or just missing ones"""
|
||||
from garminsync.daemon import daemon_instance
|
||||
|
||||
try:
|
||||
# Trigger reprocess job in daemon
|
||||
daemon_instance.reprocess_activities()
|
||||
return {"message": "Reprocess job started in background"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to start reprocess job: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/activities")
|
||||
async def get_activities(
|
||||
page: int = 1,
|
||||
per_page: int = 50,
|
||||
activity_type: str = None,
|
||||
date_from: str = None,
|
||||
date_to: str = None,
|
||||
):
|
||||
"""Get paginated activities with filtering"""
|
||||
session = get_session()
|
||||
try:
|
||||
query = session.query(Activity)
|
||||
|
||||
# Apply filters
|
||||
if activity_type:
|
||||
query = query.filter(Activity.activity_type == activity_type)
|
||||
if date_from:
|
||||
query = query.filter(Activity.start_time >= date_from)
|
||||
if date_to:
|
||||
query = query.filter(Activity.start_time <= date_to)
|
||||
|
||||
# Get total count for pagination
|
||||
total = query.count()
|
||||
|
||||
# Apply pagination
|
||||
activities = (
|
||||
query.order_by(Activity.start_time.desc())
|
||||
.offset((page - 1) * per_page)
|
||||
.limit(per_page)
|
||||
.all()
|
||||
)
|
||||
|
||||
activity_data = []
|
||||
for activity in activities:
|
||||
activity_data.append(
|
||||
{
|
||||
"activity_id": activity.activity_id,
|
||||
"start_time": activity.start_time,
|
||||
"activity_type": activity.activity_type,
|
||||
"duration": activity.duration,
|
||||
"distance": activity.distance,
|
||||
"max_heart_rate": activity.max_heart_rate,
|
||||
"avg_heart_rate": activity.avg_heart_rate,
|
||||
"avg_power": activity.avg_power,
|
||||
"calories": activity.calories,
|
||||
"filename": activity.filename,
|
||||
"downloaded": activity.downloaded,
|
||||
"created_at": activity.created_at,
|
||||
"last_sync": activity.last_sync,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"activities": activity_data,
|
||||
"total": total,
|
||||
"page": page,
|
||||
"per_page": per_page,
|
||||
}
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@router.get("/activities/{activity_id}")
|
||||
async def get_activity_details(activity_id: int):
|
||||
"""Get detailed activity information"""
|
||||
session = get_session()
|
||||
try:
|
||||
activity = (
|
||||
session.query(Activity).filter(Activity.activity_id == activity_id).first()
|
||||
)
|
||||
if not activity:
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Activity with ID {activity_id} not found"
|
||||
)
|
||||
|
||||
return {
|
||||
"id": activity.activity_id,
|
||||
"name": activity.filename or "Unnamed Activity",
|
||||
"distance": activity.distance,
|
||||
"duration": activity.duration,
|
||||
"start_time": activity.start_time,
|
||||
"activity_type": activity.activity_type,
|
||||
"max_heart_rate": activity.max_heart_rate,
|
||||
"avg_power": activity.avg_power,
|
||||
"calories": activity.calories,
|
||||
"filename": activity.filename,
|
||||
"downloaded": activity.downloaded,
|
||||
"created_at": activity.created_at,
|
||||
"last_sync": activity.last_sync,
|
||||
}
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@router.get("/dashboard/stats")
|
||||
async def get_dashboard_stats():
|
||||
"""Get comprehensive dashboard statistics"""
|
||||
from garminsync.database import get_offline_stats
|
||||
|
||||
return get_offline_stats()
|
||||
|
||||
|
||||
@router.get("/api/activities")
|
||||
async def get_api_activities(page: int = 1, per_page: int = 10):
|
||||
"""Get paginated activities for API"""
|
||||
session = get_session()
|
||||
try:
|
||||
# Use the existing get_paginated method from Activity class
|
||||
pagination = Activity.get_paginated(page, per_page)
|
||||
activities = pagination.items
|
||||
total_pages = pagination.pages
|
||||
current_page = pagination.page
|
||||
total_items = pagination.total
|
||||
|
||||
if not activities and page > 1:
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"No activities found for page {page}"
|
||||
)
|
||||
|
||||
if not activities and page == 1 and total_items == 0:
|
||||
raise HTTPException(status_code=404, detail="No activities found")
|
||||
|
||||
if not activities:
|
||||
raise HTTPException(status_code=404, detail="No activities found")
|
||||
|
||||
return {
|
||||
"activities": [
|
||||
{
|
||||
"id": activity.activity_id,
|
||||
"name": activity.filename or "Unnamed Activity",
|
||||
"distance": activity.distance,
|
||||
"duration": activity.duration,
|
||||
"start_time": activity.start_time,
|
||||
"activity_type": activity.activity_type,
|
||||
"max_heart_rate": activity.max_heart_rate,
|
||||
"avg_power": activity.avg_power,
|
||||
"calories": activity.calories,
|
||||
"downloaded": activity.downloaded,
|
||||
"created_at": activity.created_at,
|
||||
"last_sync": activity.last_sync,
|
||||
"device": activity.device or "Unknown",
|
||||
"intensity": activity.intensity or "Unknown",
|
||||
"average_speed": activity.average_speed,
|
||||
"elevation_gain": activity.elevation_gain,
|
||||
"heart_rate_zones": activity.heart_rate_zones or [],
|
||||
"power_zones": activity.power_zones or [],
|
||||
"training_effect": activity.training_effect or 0,
|
||||
"training_effect_label": activity.training_effect_label
|
||||
or "Unknown",
|
||||
}
|
||||
for activity in activities
|
||||
],
|
||||
"total_pages": total_pages,
|
||||
"current_page": current_page,
|
||||
"total_items": total_items,
|
||||
"page_size": per_page,
|
||||
"status": "success",
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"An error occurred while fetching activities: {str(e)}",
|
||||
)
|
||||
finally:
|
||||
session.close()
|
||||
140
examples/GarminSync/garminsync/web/static/activities.js
Normal file
140
examples/GarminSync/garminsync/web/static/activities.js
Normal file
@@ -0,0 +1,140 @@
|
||||
class ActivitiesPage {
|
||||
constructor() {
|
||||
this.currentPage = 1;
|
||||
this.pageSize = 25;
|
||||
this.totalPages = 1;
|
||||
this.activities = [];
|
||||
this.filters = {};
|
||||
this.init();
|
||||
}
|
||||
|
||||
init() {
|
||||
this.loadActivities();
|
||||
this.setupEventListeners();
|
||||
}
|
||||
|
||||
async loadActivities() {
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
page: this.currentPage,
|
||||
per_page: this.pageSize,
|
||||
...this.filters
|
||||
});
|
||||
|
||||
const response = await fetch(`/api/activities?${params}`);
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to load activities');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
this.activities = data.activities;
|
||||
this.totalPages = Math.ceil(data.total / this.pageSize);
|
||||
|
||||
this.renderTable();
|
||||
this.renderPagination();
|
||||
} catch (error) {
|
||||
console.error('Failed to load activities:', error);
|
||||
this.showError('Failed to load activities');
|
||||
}
|
||||
}
|
||||
|
||||
renderTable() {
|
||||
const tbody = document.getElementById('activities-tbody');
|
||||
if (!tbody) return;
|
||||
|
||||
if (!this.activities || this.activities.length === 0) {
|
||||
tbody.innerHTML = '<tr><td colspan="6">No activities found</td></tr>';
|
||||
return;
|
||||
}
|
||||
|
||||
tbody.innerHTML = '';
|
||||
|
||||
this.activities.forEach((activity, index) => {
|
||||
const row = this.createTableRow(activity, index);
|
||||
tbody.appendChild(row);
|
||||
});
|
||||
}
|
||||
|
||||
createTableRow(activity, index) {
|
||||
const row = document.createElement('tr');
|
||||
row.className = index % 2 === 0 ? 'row-even' : 'row-odd';
|
||||
|
||||
row.innerHTML = `
|
||||
<td>${Utils.formatDate(activity.start_time)}</td>
|
||||
<td>${activity.activity_type || '-'}</td>
|
||||
<td>${Utils.formatDuration(activity.duration)}</td>
|
||||
<td>${Utils.formatDistance(activity.distance)}</td>
|
||||
<td>${Utils.formatHeartRate(activity.max_heart_rate)}</td>
|
||||
<td>${Utils.formatHeartRate(activity.avg_heart_rate)}</td>
|
||||
<td>${Utils.formatPower(activity.avg_power)}</td>
|
||||
<td>${activity.calories ? activity.calories.toLocaleString() : '-'}</td>
|
||||
`;
|
||||
|
||||
return row;
|
||||
}
|
||||
|
||||
renderPagination() {
|
||||
const pagination = document.getElementById('pagination');
|
||||
if (!pagination) return;
|
||||
|
||||
if (this.totalPages <= 1) {
|
||||
pagination.innerHTML = '';
|
||||
return;
|
||||
}
|
||||
|
||||
let paginationHtml = '';
|
||||
|
||||
// Previous button
|
||||
paginationHtml += `
|
||||
<li class="${this.currentPage === 1 ? 'disabled' : ''}">
|
||||
<a href="#" onclick="activitiesPage.changePage(${this.currentPage - 1}); return false;">Previous</a>
|
||||
</li>
|
||||
`;
|
||||
|
||||
// Page numbers
|
||||
for (let i = 1; i <= this.totalPages; i++) {
|
||||
if (i === 1 || i === this.totalPages || (i >= this.currentPage - 2 && i <= this.currentPage + 2)) {
|
||||
paginationHtml += `
|
||||
<li class="${i === this.currentPage ? 'active' : ''}">
|
||||
<a href="#" onclick="activitiesPage.changePage(${i}); return false;">${i}</a>
|
||||
</li>
|
||||
`;
|
||||
} else if (i === this.currentPage - 3 || i === this.currentPage + 3) {
|
||||
paginationHtml += '<li><span>...</span></li>';
|
||||
}
|
||||
}
|
||||
|
||||
// Next button
|
||||
paginationHtml += `
|
||||
<li class="${this.currentPage === this.totalPages ? 'disabled' : ''}">
|
||||
<a href="#" onclick="activitiesPage.changePage(${this.currentPage + 1}); return false;">Next</a>
|
||||
</li>
|
||||
`;
|
||||
|
||||
pagination.innerHTML = paginationHtml;
|
||||
}
|
||||
|
||||
changePage(page) {
|
||||
if (page < 1 || page > this.totalPages) return;
|
||||
this.currentPage = page;
|
||||
this.loadActivities();
|
||||
}
|
||||
|
||||
setupEventListeners() {
|
||||
// We can add filter event listeners here if needed
|
||||
}
|
||||
|
||||
showError(message) {
|
||||
const tbody = document.getElementById('activities-tbody');
|
||||
if (tbody) {
|
||||
tbody.innerHTML = `<tr><td colspan="6">Error: ${message}</td></tr>`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize activities page when DOM is loaded
|
||||
let activitiesPage;
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
activitiesPage = new ActivitiesPage();
|
||||
});
|
||||
3
examples/GarminSync/garminsync/web/static/app.js
Normal file
3
examples/GarminSync/garminsync/web/static/app.js
Normal file
@@ -0,0 +1,3 @@
|
||||
// This file is deprecated and no longer used.
|
||||
// The functionality has been moved to home.js, activities.js, and logs.js
|
||||
// This file is kept for backward compatibility but is empty.
|
||||
1
examples/GarminSync/garminsync/web/static/charts.js
Normal file
1
examples/GarminSync/garminsync/web/static/charts.js
Normal file
@@ -0,0 +1 @@
|
||||
// This file is deprecated and no longer used.
|
||||
200
examples/GarminSync/garminsync/web/static/components.css
Normal file
200
examples/GarminSync/garminsync/web/static/components.css
Normal file
@@ -0,0 +1,200 @@
|
||||
/* Table Styling */
|
||||
.activities-table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.activities-table thead {
|
||||
background-color: #000;
|
||||
color: white;
|
||||
}
|
||||
|
||||
.activities-table th {
|
||||
padding: 12px 16px;
|
||||
text-align: left;
|
||||
font-weight: 600;
|
||||
border-right: 1px solid #333;
|
||||
}
|
||||
|
||||
.activities-table th:last-child {
|
||||
border-right: none;
|
||||
}
|
||||
|
||||
.activities-table td {
|
||||
padding: 12px 16px;
|
||||
border-bottom: 1px solid #eee;
|
||||
}
|
||||
|
||||
.activities-table .row-even {
|
||||
background-color: #f8f9fa;
|
||||
}
|
||||
|
||||
.activities-table .row-odd {
|
||||
background-color: #ffffff;
|
||||
}
|
||||
|
||||
.activities-table tr:hover {
|
||||
background-color: #e9ecef;
|
||||
}
|
||||
|
||||
/* Sync Button Styling */
|
||||
.btn-primary.btn-large {
|
||||
width: 100%;
|
||||
padding: 15px;
|
||||
font-size: 16px;
|
||||
font-weight: 600;
|
||||
border-radius: var(--border-radius);
|
||||
background: linear-gradient(135deg, #007bff 0%, #0056b3 100%);
|
||||
border: none;
|
||||
color: white;
|
||||
cursor: pointer;
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
.btn-primary.btn-large:hover {
|
||||
transform: translateY(-2px);
|
||||
box-shadow: 0 4px 12px rgba(0,123,255,0.3);
|
||||
}
|
||||
|
||||
.btn-primary.btn-large:disabled {
|
||||
opacity: 0.6;
|
||||
cursor: not-allowed;
|
||||
transform: none;
|
||||
}
|
||||
|
||||
/* Statistics Card */
|
||||
.statistics-card .stat-item {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
margin-bottom: 10px;
|
||||
padding: 8px 0;
|
||||
border-bottom: 1px solid #eee;
|
||||
}
|
||||
|
||||
.statistics-card .stat-item:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.statistics-card label {
|
||||
font-weight: 500;
|
||||
color: #666;
|
||||
}
|
||||
|
||||
.statistics-card span {
|
||||
font-weight: 600;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
/* Pagination */
|
||||
.pagination-container {
|
||||
margin-top: 20px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.pagination {
|
||||
display: flex;
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.pagination li {
|
||||
margin: 0 5px;
|
||||
}
|
||||
|
||||
.pagination a {
|
||||
display: block;
|
||||
padding: 8px 12px;
|
||||
text-decoration: none;
|
||||
color: var(--primary-color);
|
||||
border: 1px solid #ddd;
|
||||
border-radius: 4px;
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
.pagination a:hover {
|
||||
background-color: #f0f0f0;
|
||||
}
|
||||
|
||||
.pagination .active a {
|
||||
background-color: var(--primary-color);
|
||||
color: white;
|
||||
border-color: var(--primary-color);
|
||||
}
|
||||
|
||||
.pagination .disabled a {
|
||||
color: #ccc;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
/* Form elements */
|
||||
.form-group {
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
.form-group label {
|
||||
display: block;
|
||||
margin-bottom: 5px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.form-control {
|
||||
width: 100%;
|
||||
padding: 10px;
|
||||
border: 1px solid #ddd;
|
||||
border-radius: var(--border-radius);
|
||||
font-family: var(--font-family);
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.form-control:focus {
|
||||
outline: none;
|
||||
border-color: var(--primary-color);
|
||||
box-shadow: 0 0 0 2px rgba(0,123,255,0.25);
|
||||
}
|
||||
|
||||
/* Badges */
|
||||
.badge {
|
||||
display: inline-block;
|
||||
padding: 4px 8px;
|
||||
border-radius: 4px;
|
||||
font-size: 0.8rem;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.badge-success {
|
||||
background-color: var(--success-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.badge-error {
|
||||
background-color: var(--danger-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.badge-warning {
|
||||
background-color: var(--warning-color);
|
||||
color: #212529;
|
||||
}
|
||||
|
||||
/* Table responsive */
|
||||
.table-container {
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
/* Activities table card */
|
||||
.activities-table-card {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.activities-table-card .card-header {
|
||||
padding: 20px;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
/* Activities container */
|
||||
.activities-container {
|
||||
margin-top: 20px;
|
||||
}
|
||||
144
examples/GarminSync/garminsync/web/static/home.js
Normal file
144
examples/GarminSync/garminsync/web/static/home.js
Normal file
@@ -0,0 +1,144 @@
|
||||
class HomePage {
|
||||
constructor() {
|
||||
this.logSocket = null;
|
||||
this.statsRefreshInterval = null;
|
||||
this.init();
|
||||
}
|
||||
|
||||
init() {
|
||||
this.attachEventListeners();
|
||||
this.setupRealTimeUpdates();
|
||||
this.loadInitialData();
|
||||
}
|
||||
|
||||
attachEventListeners() {
|
||||
const syncButton = document.getElementById('sync-now-btn');
|
||||
if (syncButton) {
|
||||
syncButton.addEventListener('click', () => this.triggerSync());
|
||||
}
|
||||
}
|
||||
|
||||
async triggerSync() {
|
||||
const btn = document.getElementById('sync-now-btn');
|
||||
const status = document.getElementById('sync-status');
|
||||
|
||||
if (!btn || !status) return;
|
||||
|
||||
btn.disabled = true;
|
||||
btn.innerHTML = '<i class="icon-loading"></i> Syncing...';
|
||||
status.textContent = 'Sync in progress...';
|
||||
status.className = 'sync-status syncing';
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/sync/trigger', {method: 'POST'});
|
||||
const result = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
status.textContent = 'Sync completed successfully';
|
||||
status.className = 'sync-status success';
|
||||
this.updateStats();
|
||||
} else {
|
||||
throw new Error(result.detail || 'Sync failed');
|
||||
}
|
||||
} catch (error) {
|
||||
status.textContent = `Sync failed: ${error.message}`;
|
||||
status.className = 'sync-status error';
|
||||
} finally {
|
||||
btn.disabled = false;
|
||||
btn.innerHTML = '<i class="icon-sync"></i> Sync Now';
|
||||
|
||||
// Reset status message after 5 seconds
|
||||
setTimeout(() => {
|
||||
if (status.className.includes('success')) {
|
||||
status.textContent = 'Ready to sync';
|
||||
status.className = 'sync-status';
|
||||
}
|
||||
}, 5000);
|
||||
}
|
||||
}
|
||||
|
||||
setupRealTimeUpdates() {
|
||||
// Poll for log updates every 5 seconds during active operations
|
||||
this.startLogPolling();
|
||||
|
||||
// Update stats every 30 seconds
|
||||
this.statsRefreshInterval = setInterval(() => {
|
||||
this.updateStats();
|
||||
}, 30000);
|
||||
}
|
||||
|
||||
async startLogPolling() {
|
||||
// For now, we'll update logs every 10 seconds
|
||||
setInterval(() => {
|
||||
this.updateLogs();
|
||||
}, 10000);
|
||||
}
|
||||
|
||||
async updateStats() {
|
||||
try {
|
||||
const response = await fetch('/api/dashboard/stats');
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch stats');
|
||||
}
|
||||
|
||||
const stats = await response.json();
|
||||
|
||||
const totalEl = document.getElementById('total-activities');
|
||||
const downloadedEl = document.getElementById('downloaded-activities');
|
||||
const missingEl = document.getElementById('missing-activities');
|
||||
|
||||
if (totalEl) totalEl.textContent = stats.total;
|
||||
if (downloadedEl) downloadedEl.textContent = stats.downloaded;
|
||||
if (missingEl) missingEl.textContent = stats.missing;
|
||||
} catch (error) {
|
||||
console.error('Failed to update stats:', error);
|
||||
}
|
||||
}
|
||||
|
||||
async updateLogs() {
|
||||
try {
|
||||
const response = await fetch('/api/status');
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch logs');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
this.renderLogs(data.recent_logs);
|
||||
} catch (error) {
|
||||
console.error('Failed to update logs:', error);
|
||||
}
|
||||
}
|
||||
|
||||
renderLogs(logs) {
|
||||
const logContent = document.getElementById('log-content');
|
||||
if (!logContent) return;
|
||||
|
||||
if (!logs || logs.length === 0) {
|
||||
logContent.innerHTML = '<div class="log-entry">No recent activity</div>';
|
||||
return;
|
||||
}
|
||||
|
||||
const logsHtml = logs.map(log => `
|
||||
<div class="log-entry">
|
||||
<span class="timestamp">${Utils.formatTimestamp(log.timestamp)}</span>
|
||||
<span class="status ${log.status === 'success' ? 'success' : 'error'}">
|
||||
${log.status}
|
||||
</span>
|
||||
${log.operation}: ${log.message || ''}
|
||||
${log.activities_downloaded > 0 ? `Downloaded ${log.activities_downloaded} activities` : ''}
|
||||
</div>
|
||||
`).join('');
|
||||
|
||||
logContent.innerHTML = logsHtml;
|
||||
}
|
||||
|
||||
async loadInitialData() {
|
||||
// Load initial logs
|
||||
await this.updateLogs();
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize home page when DOM is loaded
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
new HomePage();
|
||||
});
|
||||
179
examples/GarminSync/garminsync/web/static/logs.js
Normal file
179
examples/GarminSync/garminsync/web/static/logs.js
Normal file
@@ -0,0 +1,179 @@
|
||||
// Global variables for pagination and filtering
|
||||
let currentPage = 1;
|
||||
const logsPerPage = 20;
|
||||
let totalLogs = 0;
|
||||
let currentFilters = {};
|
||||
|
||||
class LogsPage {
|
||||
constructor() {
|
||||
this.currentPage = 1;
|
||||
this.init();
|
||||
}
|
||||
|
||||
init() {
|
||||
this.loadLogs();
|
||||
this.setupEventListeners();
|
||||
}
|
||||
|
||||
async loadLogs() {
|
||||
try {
|
||||
// Build query string from filters
|
||||
const params = new URLSearchParams({
|
||||
page: this.currentPage,
|
||||
per_page: logsPerPage,
|
||||
...currentFilters
|
||||
}).toString();
|
||||
|
||||
const response = await fetch(`/api/logs?${params}`);
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch logs');
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
totalLogs = data.total;
|
||||
this.renderLogs(data.logs);
|
||||
this.renderPagination();
|
||||
} catch (error) {
|
||||
console.error('Error loading logs:', error);
|
||||
Utils.showError('Failed to load logs: ' + error.message);
|
||||
}
|
||||
}
|
||||
|
||||
renderLogs(logs) {
|
||||
const tbody = document.getElementById('logs-tbody');
|
||||
if (!tbody) return;
|
||||
|
||||
tbody.innerHTML = '';
|
||||
|
||||
if (!logs || logs.length === 0) {
|
||||
tbody.innerHTML = '<tr><td colspan="6">No logs found</td></tr>';
|
||||
return;
|
||||
}
|
||||
|
||||
logs.forEach(log => {
|
||||
const row = document.createElement('tr');
|
||||
row.className = 'row-odd'; // For alternating row colors
|
||||
|
||||
row.innerHTML = `
|
||||
<td>${Utils.formatTimestamp(log.timestamp)}</td>
|
||||
<td>${log.operation}</td>
|
||||
<td><span class="badge badge-${log.status === 'success' ? 'success' :
|
||||
log.status === 'error' ? 'error' :
|
||||
'warning'}">${log.status}</span></td>
|
||||
<td>${log.message || ''}</td>
|
||||
<td>${log.activities_processed}</td>
|
||||
<td>${log.activities_downloaded}</td>
|
||||
`;
|
||||
|
||||
tbody.appendChild(row);
|
||||
});
|
||||
}
|
||||
|
||||
renderPagination() {
|
||||
const totalPages = Math.ceil(totalLogs / logsPerPage);
|
||||
const pagination = document.getElementById('pagination');
|
||||
if (!pagination) return;
|
||||
|
||||
if (totalPages <= 1) {
|
||||
pagination.innerHTML = '';
|
||||
return;
|
||||
}
|
||||
|
||||
let paginationHtml = '';
|
||||
|
||||
// Previous button
|
||||
paginationHtml += `
|
||||
<li class="${this.currentPage === 1 ? 'disabled' : ''}">
|
||||
<a href="#" onclick="logsPage.changePage(${this.currentPage - 1}); return false;">Previous</a>
|
||||
</li>
|
||||
`;
|
||||
|
||||
// Page numbers
|
||||
for (let i = 1; i <= totalPages; i++) {
|
||||
if (i === 1 || i === totalPages || (i >= this.currentPage - 2 && i <= this.currentPage + 2)) {
|
||||
paginationHtml += `
|
||||
<li class="${i === this.currentPage ? 'active' : ''}">
|
||||
<a href="#" onclick="logsPage.changePage(${i}); return false;">${i}</a>
|
||||
</li>
|
||||
`;
|
||||
} else if (i === this.currentPage - 3 || i === this.currentPage + 3) {
|
||||
paginationHtml += '<li><span>...</span></li>';
|
||||
}
|
||||
}
|
||||
|
||||
// Next button
|
||||
paginationHtml += `
|
||||
<li class="${this.currentPage === totalPages ? 'disabled' : ''}">
|
||||
<a href="#" onclick="logsPage.changePage(${this.currentPage + 1}); return false;">Next</a>
|
||||
</li>
|
||||
`;
|
||||
|
||||
pagination.innerHTML = paginationHtml;
|
||||
}
|
||||
|
||||
changePage(page) {
|
||||
if (page < 1 || page > Math.ceil(totalLogs / logsPerPage)) return;
|
||||
this.currentPage = page;
|
||||
this.loadLogs();
|
||||
}
|
||||
|
||||
refreshLogs() {
|
||||
this.currentPage = 1;
|
||||
this.loadLogs();
|
||||
}
|
||||
|
||||
applyFilters() {
|
||||
currentFilters = {
|
||||
status: document.getElementById('status-filter').value,
|
||||
operation: document.getElementById('operation-filter').value,
|
||||
date: document.getElementById('date-filter').value
|
||||
};
|
||||
|
||||
this.currentPage = 1;
|
||||
this.loadLogs();
|
||||
}
|
||||
|
||||
async clearLogs() {
|
||||
if (!confirm('Are you sure you want to clear all logs? This cannot be undone.')) return;
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/logs', { method: 'DELETE' });
|
||||
if (response.ok) {
|
||||
Utils.showSuccess('Logs cleared successfully');
|
||||
this.refreshLogs();
|
||||
} else {
|
||||
throw new Error('Failed to clear logs');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error clearing logs:', error);
|
||||
Utils.showError('Failed to clear logs: ' + error.message);
|
||||
}
|
||||
}
|
||||
|
||||
setupEventListeners() {
|
||||
// Event listeners are handled in the global functions below
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize logs page when DOM is loaded
|
||||
let logsPage;
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
logsPage = new LogsPage();
|
||||
});
|
||||
|
||||
// Global functions for backward compatibility with HTML onclick attributes
|
||||
function changePage(page) {
|
||||
if (logsPage) logsPage.changePage(page);
|
||||
}
|
||||
|
||||
function refreshLogs() {
|
||||
if (logsPage) logsPage.refreshLogs();
|
||||
}
|
||||
|
||||
function applyFilters() {
|
||||
if (logsPage) logsPage.applyFilters();
|
||||
}
|
||||
|
||||
function clearLogs() {
|
||||
if (logsPage) logsPage.clearLogs();
|
||||
}
|
||||
52
examples/GarminSync/garminsync/web/static/navigation.js
Normal file
52
examples/GarminSync/garminsync/web/static/navigation.js
Normal file
@@ -0,0 +1,52 @@
|
||||
class Navigation {
|
||||
constructor() {
|
||||
this.currentPage = this.getCurrentPage();
|
||||
this.render();
|
||||
}
|
||||
|
||||
getCurrentPage() {
|
||||
return window.location.pathname === '/activities' ? 'activities' : 'home';
|
||||
}
|
||||
|
||||
render() {
|
||||
const nav = document.querySelector('.navigation');
|
||||
if (nav) {
|
||||
nav.innerHTML = this.getNavigationHTML();
|
||||
this.attachEventListeners();
|
||||
}
|
||||
}
|
||||
|
||||
getNavigationHTML() {
|
||||
return `
|
||||
<nav class="nav-tabs">
|
||||
<button class="nav-tab ${this.currentPage === 'home' ? 'active' : ''}"
|
||||
data-page="home">Home</button>
|
||||
<button class="nav-tab ${this.currentPage === 'activities' ? 'active' : ''}"
|
||||
data-page="activities">Activities</button>
|
||||
</nav>
|
||||
`;
|
||||
}
|
||||
|
||||
attachEventListeners() {
|
||||
const tabs = document.querySelectorAll('.nav-tab');
|
||||
tabs.forEach(tab => {
|
||||
tab.addEventListener('click', (e) => {
|
||||
const page = e.target.getAttribute('data-page');
|
||||
this.navigateToPage(page);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
navigateToPage(page) {
|
||||
if (page === 'home') {
|
||||
window.location.href = '/';
|
||||
} else if (page === 'activities') {
|
||||
window.location.href = '/activities';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize navigation when DOM is loaded
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
new Navigation();
|
||||
});
|
||||
78
examples/GarminSync/garminsync/web/static/responsive.css
Normal file
78
examples/GarminSync/garminsync/web/static/responsive.css
Normal file
@@ -0,0 +1,78 @@
|
||||
/* Mobile-first responsive design */
|
||||
@media (max-width: 768px) {
|
||||
.layout-grid {
|
||||
grid-template-columns: 1fr;
|
||||
gap: 15px;
|
||||
}
|
||||
|
||||
.sidebar {
|
||||
order: 2;
|
||||
}
|
||||
|
||||
.main-content {
|
||||
order: 1;
|
||||
}
|
||||
|
||||
.activities-table {
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.activities-table th,
|
||||
.activities-table td {
|
||||
padding: 8px 10px;
|
||||
}
|
||||
|
||||
.nav-tabs {
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.container {
|
||||
padding: 0 10px;
|
||||
}
|
||||
|
||||
.card {
|
||||
padding: 15px;
|
||||
}
|
||||
|
||||
.btn {
|
||||
padding: 8px 15px;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.btn-large {
|
||||
padding: 12px 20px;
|
||||
font-size: 15px;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 480px) {
|
||||
.activities-table {
|
||||
display: block;
|
||||
overflow-x: auto;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.stat-item {
|
||||
flex-direction: column;
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
.log-content {
|
||||
padding: 5px;
|
||||
font-size: 0.8rem;
|
||||
}
|
||||
|
||||
.log-entry {
|
||||
padding: 5px;
|
||||
}
|
||||
|
||||
.pagination a {
|
||||
padding: 6px 10px;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.form-control {
|
||||
padding: 8px;
|
||||
font-size: 14px;
|
||||
}
|
||||
}
|
||||
268
examples/GarminSync/garminsync/web/static/style.css
Normal file
268
examples/GarminSync/garminsync/web/static/style.css
Normal file
@@ -0,0 +1,268 @@
|
||||
/* CSS Variables for consistent theming */
|
||||
:root {
|
||||
--primary-color: #007bff;
|
||||
--secondary-color: #6c757d;
|
||||
--success-color: #28a745;
|
||||
--danger-color: #dc3545;
|
||||
--warning-color: #ffc107;
|
||||
--light-gray: #f8f9fa;
|
||||
--dark-gray: #343a40;
|
||||
--border-radius: 8px;
|
||||
--box-shadow: 0 2px 10px rgba(0,0,0,0.1);
|
||||
--font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||
}
|
||||
|
||||
/* Reset and base styles */
|
||||
* {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: var(--font-family);
|
||||
background-color: #f5f7fa;
|
||||
color: #333;
|
||||
line-height: 1.6;
|
||||
}
|
||||
|
||||
/* CSS Grid Layout System */
|
||||
.container {
|
||||
max-width: 1200px;
|
||||
margin: 0 auto;
|
||||
padding: 0 20px;
|
||||
}
|
||||
|
||||
.layout-grid {
|
||||
display: grid;
|
||||
grid-template-columns: 300px 1fr;
|
||||
gap: 20px;
|
||||
min-height: calc(100vh - 60px);
|
||||
}
|
||||
|
||||
/* Modern Card Components */
|
||||
.card {
|
||||
background: white;
|
||||
border-radius: var(--border-radius);
|
||||
box-shadow: var(--box-shadow);
|
||||
padding: 20px;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.card-header {
|
||||
font-weight: 600;
|
||||
font-size: 1.2rem;
|
||||
margin-bottom: 15px;
|
||||
padding-bottom: 10px;
|
||||
border-bottom: 1px solid #eee;
|
||||
}
|
||||
|
||||
/* Navigation */
|
||||
.navigation {
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.nav-tabs {
|
||||
display: flex;
|
||||
background: white;
|
||||
border-radius: var(--border-radius);
|
||||
box-shadow: var(--box-shadow);
|
||||
padding: 5px;
|
||||
}
|
||||
|
||||
.nav-tab {
|
||||
flex: 1;
|
||||
padding: 12px 20px;
|
||||
border: none;
|
||||
background: transparent;
|
||||
cursor: pointer;
|
||||
font-weight: 500;
|
||||
border-radius: var(--border-radius);
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
.nav-tab:hover {
|
||||
background-color: #f0f0f0;
|
||||
}
|
||||
|
||||
.nav-tab.active {
|
||||
background-color: var(--primary-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
/* Buttons */
|
||||
.btn {
|
||||
padding: 10px 20px;
|
||||
border: none;
|
||||
border-radius: var(--border-radius);
|
||||
cursor: pointer;
|
||||
font-weight: 500;
|
||||
transition: all 0.2s ease;
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.btn-primary {
|
||||
background: linear-gradient(135deg, var(--primary-color) 0%, #0056b3 100%);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.btn-primary:hover:not(:disabled) {
|
||||
transform: translateY(-2px);
|
||||
box-shadow: 0 4px 12px rgba(0,123,255,0.3);
|
||||
}
|
||||
|
||||
.btn-primary:disabled {
|
||||
opacity: 0.6;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.btn-secondary {
|
||||
background-color: var(--secondary-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.btn-success {
|
||||
background-color: var(--success-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.btn-danger {
|
||||
background-color: var(--danger-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.btn-warning {
|
||||
background-color: var(--warning-color);
|
||||
color: #212529;
|
||||
}
|
||||
|
||||
.btn-large {
|
||||
padding: 15px 25px;
|
||||
font-size: 16px;
|
||||
}
|
||||
|
||||
/* Icons */
|
||||
.icon-sync::before {
|
||||
content: "↻";
|
||||
margin-right: 8px;
|
||||
}
|
||||
|
||||
.icon-loading::before {
|
||||
content: "⏳";
|
||||
margin-right: 8px;
|
||||
}
|
||||
|
||||
/* Status display */
|
||||
.sync-status {
|
||||
margin-top: 15px;
|
||||
padding: 10px;
|
||||
border-radius: var(--border-radius);
|
||||
text-align: center;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.sync-status.syncing {
|
||||
background-color: #e3f2fd;
|
||||
color: var(--primary-color);
|
||||
}
|
||||
|
||||
.sync-status.success {
|
||||
background-color: #e8f5e9;
|
||||
color: var(--success-color);
|
||||
}
|
||||
|
||||
.sync-status.error {
|
||||
background-color: #ffebee;
|
||||
color: var(--danger-color);
|
||||
}
|
||||
|
||||
/* Statistics */
|
||||
.stat-item {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
margin-bottom: 10px;
|
||||
padding: 8px 0;
|
||||
border-bottom: 1px solid #eee;
|
||||
}
|
||||
|
||||
.stat-item:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.stat-item label {
|
||||
font-weight: 500;
|
||||
color: #666;
|
||||
}
|
||||
|
||||
.stat-item span {
|
||||
font-weight: 600;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
/* Log display */
|
||||
.log-content {
|
||||
max-height: 400px;
|
||||
overflow-y: auto;
|
||||
padding: 10px;
|
||||
background-color: #f8f9fa;
|
||||
border-radius: var(--border-radius);
|
||||
font-family: monospace;
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
.log-entry {
|
||||
margin-bottom: 8px;
|
||||
padding: 8px;
|
||||
border-left: 3px solid #ddd;
|
||||
background-color: white;
|
||||
border-radius: 0 var(--border-radius) var(--border-radius) 0;
|
||||
}
|
||||
|
||||
.log-entry .timestamp {
|
||||
font-size: 0.8rem;
|
||||
color: #666;
|
||||
margin-right: 10px;
|
||||
}
|
||||
|
||||
.log-entry .status {
|
||||
padding: 2px 6px;
|
||||
border-radius: 4px;
|
||||
font-size: 0.8rem;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.log-entry .status.success {
|
||||
background-color: var(--success-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.log-entry .status.error {
|
||||
background-color: var(--danger-color);
|
||||
color: white;
|
||||
}
|
||||
|
||||
/* Responsive Design */
|
||||
@media (max-width: 768px) {
|
||||
.layout-grid {
|
||||
grid-template-columns: 1fr;
|
||||
gap: 15px;
|
||||
}
|
||||
|
||||
.sidebar {
|
||||
order: 2;
|
||||
}
|
||||
|
||||
.main-content {
|
||||
order: 1;
|
||||
}
|
||||
|
||||
.nav-tabs {
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.container {
|
||||
padding: 0 10px;
|
||||
}
|
||||
}
|
||||
56
examples/GarminSync/garminsync/web/static/utils.js
Normal file
56
examples/GarminSync/garminsync/web/static/utils.js
Normal file
@@ -0,0 +1,56 @@
|
||||
// Utility functions for the GarminSync application
|
||||
|
||||
class Utils {
|
||||
// Format date for display
|
||||
static formatDate(dateStr) {
|
||||
if (!dateStr) return '-';
|
||||
return new Date(dateStr).toLocaleDateString();
|
||||
}
|
||||
|
||||
// Format duration from seconds to HH:MM:SS
|
||||
static formatDuration(seconds) {
|
||||
if (!seconds) return '-';
|
||||
const hours = Math.floor(seconds / 3600);
|
||||
const minutes = Math.floor((seconds % 3600) / 60);
|
||||
const secondsLeft = seconds % 60;
|
||||
return `${hours}:${minutes.toString().padStart(2, '0')}:${secondsLeft.toString().padStart(2, '0')}`;
|
||||
}
|
||||
|
||||
// Format distance from meters to kilometers
|
||||
static formatDistance(meters) {
|
||||
if (!meters) return '-';
|
||||
return `${(meters / 1000).toFixed(1)} km`;
|
||||
}
|
||||
|
||||
// Format power from watts
|
||||
static formatPower(watts) {
|
||||
return watts ? `${Math.round(watts)}W` : '-';
|
||||
}
|
||||
|
||||
// Format heart rate (adds 'bpm')
|
||||
static formatHeartRate(hr) {
|
||||
return hr ? `${hr} bpm` : '-';
|
||||
}
|
||||
|
||||
// Show error message
|
||||
static showError(message) {
|
||||
console.error(message);
|
||||
// In a real implementation, you might want to show this in the UI
|
||||
alert(`Error: ${message}`);
|
||||
}
|
||||
|
||||
// Show success message
|
||||
static showSuccess(message) {
|
||||
console.log(message);
|
||||
// In a real implementation, you might want to show this in the UI
|
||||
}
|
||||
|
||||
// Format timestamp for log entries
|
||||
static formatTimestamp(timestamp) {
|
||||
if (!timestamp) return '';
|
||||
return new Date(timestamp).toLocaleString();
|
||||
}
|
||||
}
|
||||
|
||||
// Make Utils available globally
|
||||
window.Utils = Utils;
|
||||
44
examples/GarminSync/garminsync/web/templates/activities.html
Normal file
44
examples/GarminSync/garminsync/web/templates/activities.html
Normal file
@@ -0,0 +1,44 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block content %}
|
||||
<div class="container">
|
||||
<div class="navigation"></div>
|
||||
|
||||
<div class="activities-container">
|
||||
<div class="card activities-table-card">
|
||||
<div class="card-header">
|
||||
<h3>Activities</h3>
|
||||
</div>
|
||||
<div class="table-container">
|
||||
<table class="activities-table" id="activities-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Date</th>
|
||||
<th>Activity Type</th>
|
||||
<th>Duration</th>
|
||||
<th>Distance</th>
|
||||
<th>Max HR</th>
|
||||
<th>Avg HR</th>
|
||||
<th>Power</th>
|
||||
<th>Calories</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="activities-tbody">
|
||||
<!-- Data populated by JavaScript -->
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div class="pagination-container">
|
||||
<div class="pagination" id="pagination">
|
||||
<!-- Pagination controls -->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
{% block page_scripts %}
|
||||
<script src="/static/activities.js"></script>
|
||||
{% endblock %}
|
||||
154
examples/GarminSync/garminsync/web/templates/activity.html
Normal file
154
examples/GarminSync/garminsync/web/templates/activity.html
Normal file
@@ -0,0 +1,154 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Activity Details - GarminSync</title>
|
||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet">
|
||||
<link href="/static/styles.css" rel="stylesheet">
|
||||
</head>
|
||||
<body>
|
||||
<div class="container mt-4">
|
||||
<h1 class="mb-4">Activity Details</h1>
|
||||
|
||||
<div id="activity-details">
|
||||
<!-- Activity details will be populated by JavaScript -->
|
||||
</div>
|
||||
|
||||
<div class="mt-4">
|
||||
<h2>Analysis Metrics</h2>
|
||||
<table class="table table-striped" id="metrics-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Metric</th>
|
||||
<th>Value</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<!-- Metrics will be populated by JavaScript -->
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div class="mt-4">
|
||||
<button id="reprocess-btn" class="btn btn-warning">
|
||||
<span id="spinner" class="spinner-border spinner-border-sm d-none" role="status" aria-hidden="true"></span>
|
||||
Reprocess Activity
|
||||
</button>
|
||||
<div id="reprocess-result" class="mt-2"></div>
|
||||
</div>
|
||||
|
||||
<div class="mt-4">
|
||||
<a href="/activities" class="btn btn-secondary">Back to Activities</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script src="/static/utils.js"></script>
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', async function() {
|
||||
const activityId = new URLSearchParams(window.location.search).get('id');
|
||||
if (!activityId) {
|
||||
showError('Activity ID not provided');
|
||||
return;
|
||||
}
|
||||
|
||||
// Load activity details
|
||||
await loadActivity(activityId);
|
||||
|
||||
// Setup reprocess button
|
||||
document.getElementById('reprocess-btn').addEventListener('click', () => {
|
||||
reprocessActivity(activityId);
|
||||
});
|
||||
});
|
||||
|
||||
async function loadActivity(activityId) {
|
||||
try {
|
||||
const response = await fetch(`/api/activities/${activityId}`);
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to load activity details');
|
||||
}
|
||||
|
||||
const activity = await response.json();
|
||||
renderActivity(activity);
|
||||
} catch (error) {
|
||||
showError(`Error loading activity: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
function renderActivity(activity) {
|
||||
const detailsEl = document.getElementById('activity-details');
|
||||
detailsEl.innerHTML = `
|
||||
<div class="card">
|
||||
<div class="card-body">
|
||||
<h5 class="card-title">${activity.name}</h5>
|
||||
<p class="card-text">
|
||||
<strong>Date:</strong> ${formatDateTime(activity.start_time)}<br>
|
||||
<strong>Type:</strong> ${activity.activity_type}<br>
|
||||
<strong>Duration:</strong> ${formatDuration(activity.duration)}<br>
|
||||
<strong>Distance:</strong> ${formatDistance(activity.distance)}<br>
|
||||
<strong>Status:</strong>
|
||||
<span class="badge ${activity.reprocessed ? 'bg-success' : 'bg-secondary'}">
|
||||
${activity.reprocessed ? 'Processed' : 'Not Processed'}
|
||||
</span>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
|
||||
// Render metrics
|
||||
const metrics = [
|
||||
{ name: 'Max Heart Rate', value: activity.max_heart_rate, unit: 'bpm' },
|
||||
{ name: 'Avg Heart Rate', value: activity.avg_heart_rate, unit: 'bpm' },
|
||||
{ name: 'Avg Power', value: activity.avg_power, unit: 'W' },
|
||||
{ name: 'Calories', value: activity.calories, unit: 'kcal' },
|
||||
{ name: 'Gear Ratio', value: activity.gear_ratio, unit: '' },
|
||||
{ name: 'Gear Inches', value: activity.gear_inches, unit: '' }
|
||||
];
|
||||
|
||||
const tableBody = document.getElementById('metrics-table').querySelector('tbody');
|
||||
tableBody.innerHTML = '';
|
||||
|
||||
metrics.forEach(metric => {
|
||||
if (metric.value !== undefined) {
|
||||
const row = document.createElement('tr');
|
||||
row.innerHTML = `<td>${metric.name}</td><td>${metric.value} ${metric.unit}</td>`;
|
||||
tableBody.appendChild(row);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function reprocessActivity(activityId) {
|
||||
const btn = document.getElementById('reprocess-btn');
|
||||
const spinner = document.getElementById('spinner');
|
||||
const resultEl = document.getElementById('reprocess-result');
|
||||
|
||||
btn.disabled = true;
|
||||
spinner.classList.remove('d-none');
|
||||
resultEl.innerHTML = '';
|
||||
resultEl.classList.remove('alert-success', 'alert-danger');
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/activities/${activityId}/reprocess`, {
|
||||
method: 'POST'
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.text();
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
resultEl.innerHTML = `<div class="alert alert-success">Activity reprocessed successfully!</div>`;
|
||||
|
||||
// Reload activity data to show updated metrics
|
||||
await loadActivity(activityId);
|
||||
} catch (error) {
|
||||
console.error('Reprocess error:', error);
|
||||
resultEl.innerHTML = `<div class="alert alert-danger">${error.message || 'Reprocessing failed'}</div>`;
|
||||
} finally {
|
||||
spinner.classList.add('d-none');
|
||||
btn.disabled = false;
|
||||
}
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
19
examples/GarminSync/garminsync/web/templates/base.html
Normal file
19
examples/GarminSync/garminsync/web/templates/base.html
Normal file
@@ -0,0 +1,19 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>GarminSync</title>
|
||||
<link href="/static/style.css" rel="stylesheet">
|
||||
<link href="/static/components.css" rel="stylesheet">
|
||||
<link href="/static/responsive.css" rel="stylesheet">
|
||||
</head>
|
||||
<body>
|
||||
{% block content %}{% endblock %}
|
||||
|
||||
<script src="/static/navigation.js"></script>
|
||||
<script src="/static/utils.js"></script>
|
||||
|
||||
{% block page_scripts %}{% endblock %}
|
||||
</body>
|
||||
</html>
|
||||
151
examples/GarminSync/garminsync/web/templates/config.html
Normal file
151
examples/GarminSync/garminsync/web/templates/config.html
Normal file
@@ -0,0 +1,151 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block content %}
|
||||
<div class="container">
|
||||
<div class="navigation"></div>
|
||||
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h3>GarminSync Configuration</h3>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<div class="card mb-4">
|
||||
<div class="card-header">Daemon Settings</div>
|
||||
<div class="card-body">
|
||||
<form id="daemon-config-form">
|
||||
<div class="form-group">
|
||||
<label for="daemon-enabled">Enable Daemon</label>
|
||||
<input type="checkbox" id="daemon-enabled" {% if config.enabled %}checked{% endif %}>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="cron-schedule">Synchronization Schedule</label>
|
||||
<input type="text" class="form-control" id="cron-schedule"
|
||||
value="{{ config.schedule_cron }}"
|
||||
placeholder="0 */6 * * *"
|
||||
title="Cron expression (every 6 hours by default)">
|
||||
<small class="form-text text-muted">
|
||||
Cron format: minute hour day(month) month day(week)
|
||||
</small>
|
||||
</div>
|
||||
<button type="submit" class="btn btn-primary">Save Settings</button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card">
|
||||
<div class="card-header">Daemon Status</div>
|
||||
<div class="card-body">
|
||||
<div class="stat-item">
|
||||
<label>Current Status:</label>
|
||||
<span id="daemon-status-text">{{ config.status|capitalize }}</span>
|
||||
</div>
|
||||
<div class="stat-item">
|
||||
<label>Last Run:</label>
|
||||
<span id="daemon-last-run">{{ config.last_run or 'Never' }}</span>
|
||||
</div>
|
||||
<div class="stat-item">
|
||||
<label>Next Run:</label>
|
||||
<span id="daemon-next-run">{{ config.next_run or 'Not scheduled' }}</span>
|
||||
</div>
|
||||
|
||||
<div class="mt-3">
|
||||
<button id="start-daemon-btn" class="btn btn-success">
|
||||
Start Daemon
|
||||
</button>
|
||||
<button id="stop-daemon-btn" class="btn btn-danger">
|
||||
Stop Daemon
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
{% block page_scripts %}
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
// Form submission handler
|
||||
document.getElementById('daemon-config-form').addEventListener('submit', async function(e) {
|
||||
e.preventDefault();
|
||||
|
||||
const enabled = document.getElementById('daemon-enabled').checked;
|
||||
const cronSchedule = document.getElementById('cron-schedule').value;
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/schedule', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
enabled: enabled,
|
||||
cron_schedule: cronSchedule
|
||||
})
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
Utils.showSuccess('Configuration saved successfully');
|
||||
updateStatus();
|
||||
} else {
|
||||
const error = await response.json();
|
||||
Utils.showError(`Error: ${error.detail}`);
|
||||
}
|
||||
} catch (error) {
|
||||
Utils.showError('Failed to save configuration: ' + error.message);
|
||||
}
|
||||
});
|
||||
|
||||
// Daemon control buttons
|
||||
document.getElementById('start-daemon-btn').addEventListener('click', async function() {
|
||||
try {
|
||||
const response = await fetch('/api/daemon/start', { method: 'POST' });
|
||||
if (response.ok) {
|
||||
Utils.showSuccess('Daemon started successfully');
|
||||
updateStatus();
|
||||
} else {
|
||||
const error = await response.json();
|
||||
Utils.showError(`Error: ${error.detail}`);
|
||||
}
|
||||
} catch (error) {
|
||||
Utils.showError('Failed to start daemon: ' + error.message);
|
||||
}
|
||||
});
|
||||
|
||||
document.getElementById('stop-daemon-btn').addEventListener('click', async function() {
|
||||
try {
|
||||
const response = await fetch('/api/daemon/stop', { method: 'POST' });
|
||||
if (response.ok) {
|
||||
Utils.showSuccess('Daemon stopped successfully');
|
||||
updateStatus();
|
||||
} else {
|
||||
const error = await response.json();
|
||||
Utils.showError(`Error: ${error.detail}`);
|
||||
}
|
||||
} catch (error) {
|
||||
Utils.showError('Failed to stop daemon: ' + error.message);
|
||||
}
|
||||
});
|
||||
|
||||
// Initial status update
|
||||
updateStatus();
|
||||
|
||||
async function updateStatus() {
|
||||
try {
|
||||
const response = await fetch('/api/status');
|
||||
const data = await response.json();
|
||||
|
||||
// Update status display
|
||||
document.getElementById('daemon-status-text').textContent =
|
||||
data.daemon.running ? 'Running' : 'Stopped';
|
||||
document.getElementById('daemon-last-run').textContent =
|
||||
data.daemon.last_run || 'Never';
|
||||
document.getElementById('daemon-next-run').textContent =
|
||||
data.daemon.next_run || 'Not scheduled';
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to update status:', error);
|
||||
}
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
54
examples/GarminSync/garminsync/web/templates/dashboard.html
Normal file
54
examples/GarminSync/garminsync/web/templates/dashboard.html
Normal file
@@ -0,0 +1,54 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block content %}
|
||||
<div class="container">
|
||||
<div class="navigation"></div>
|
||||
|
||||
<div class="layout-grid">
|
||||
<!-- Left Sidebar -->
|
||||
<div class="sidebar">
|
||||
<div class="card sync-card">
|
||||
<button id="sync-now-btn" class="btn btn-primary btn-large">
|
||||
<i class="icon-sync"></i>
|
||||
Sync Now
|
||||
</button>
|
||||
<div class="sync-status" id="sync-status">
|
||||
Ready to sync
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card statistics-card">
|
||||
<h3>Statistics</h3>
|
||||
<div class="stat-item">
|
||||
<label>Total Activities:</label>
|
||||
<span id="total-activities">{{stats.total}}</span>
|
||||
</div>
|
||||
<div class="stat-item">
|
||||
<label>Downloaded:</label>
|
||||
<span id="downloaded-activities">{{stats.downloaded}}</span>
|
||||
</div>
|
||||
<div class="stat-item">
|
||||
<label>Missing:</label>
|
||||
<span id="missing-activities">{{stats.missing}}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Right Content Area -->
|
||||
<div class="main-content">
|
||||
<div class="card log-display">
|
||||
<div class="card-header">
|
||||
<h3>Log Data</h3>
|
||||
</div>
|
||||
<div class="log-content" id="log-content">
|
||||
<!-- Real-time log updates will appear here -->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
{% block page_scripts %}
|
||||
<script src="/static/home.js"></script>
|
||||
{% endblock %}
|
||||
79
examples/GarminSync/garminsync/web/templates/logs.html
Normal file
79
examples/GarminSync/garminsync/web/templates/logs.html
Normal file
@@ -0,0 +1,79 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block content %}
|
||||
<div class="container">
|
||||
<div class="navigation"></div>
|
||||
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h3>Sync Logs</h3>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<!-- Filters -->
|
||||
<div class="card mb-4">
|
||||
<div class="card-header">Filters</div>
|
||||
<div class="card-body">
|
||||
<div class="form-group">
|
||||
<label for="status-filter">Status</label>
|
||||
<select id="status-filter" class="form-control">
|
||||
<option value="">All Statuses</option>
|
||||
<option value="success">Success</option>
|
||||
<option value="error">Error</option>
|
||||
<option value="partial">Partial</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="operation-filter">Operation</label>
|
||||
<select id="operation-filter" class="form-control">
|
||||
<option value="">All Operations</option>
|
||||
<option value="sync">Sync</option>
|
||||
<option value="download">Download</option>
|
||||
<option value="daemon">Daemon</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="date-filter">Date</label>
|
||||
<input type="date" id="date-filter" class="form-control">
|
||||
</div>
|
||||
|
||||
<button class="btn btn-primary" onclick="applyFilters()">Apply Filters</button>
|
||||
<button class="btn btn-secondary" onclick="refreshLogs()">Refresh</button>
|
||||
<button class="btn btn-warning" onclick="clearLogs()">Clear Logs</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Logs Table -->
|
||||
<div class="table-container">
|
||||
<table class="activities-table" id="logs-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Timestamp</th>
|
||||
<th>Operation</th>
|
||||
<th>Status</th>
|
||||
<th>Message</th>
|
||||
<th>Activities Processed</th>
|
||||
<th>Activities Downloaded</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="logs-tbody">
|
||||
<!-- Populated by JavaScript -->
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!-- Pagination -->
|
||||
<div class="pagination-container">
|
||||
<div class="pagination" id="pagination">
|
||||
<!-- Populated by JavaScript -->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
{% block page_scripts %}
|
||||
<script src="/static/logs.js"></script>
|
||||
{% endblock %}
|
||||
134
examples/GarminSync/garminsync/web/test_ui.py
Normal file
134
examples/GarminSync/garminsync/web/test_ui.py
Normal file
@@ -0,0 +1,134 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Simple test script to verify the new UI is working correctly
|
||||
"""
|
||||
|
||||
import sys
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
|
||||
# Add the parent directory to the path to import garminsync modules
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||
|
||||
|
||||
def test_ui_endpoints():
|
||||
"""Test that the new UI endpoints are working correctly"""
|
||||
base_url = "http://localhost:8000"
|
||||
|
||||
# Test endpoints to check
|
||||
endpoints = [
|
||||
"/",
|
||||
"/activities",
|
||||
"/config",
|
||||
"/logs",
|
||||
"/api/status",
|
||||
"/api/activities/stats",
|
||||
"/api/dashboard/stats",
|
||||
]
|
||||
|
||||
print("Testing UI endpoints...")
|
||||
|
||||
failed_endpoints = []
|
||||
|
||||
for endpoint in endpoints:
|
||||
try:
|
||||
url = base_url + endpoint
|
||||
print(f"Testing {url}...")
|
||||
|
||||
response = requests.get(url, timeout=10)
|
||||
|
||||
if response.status_code == 200:
|
||||
print(f" ✓ {endpoint} - OK")
|
||||
else:
|
||||
print(f" ✗ {endpoint} - Status code: {response.status_code}")
|
||||
failed_endpoints.append(endpoint)
|
||||
|
||||
except requests.exceptions.ConnectionError:
|
||||
print(f" ✗ {endpoint} - Connection error (server not running?)")
|
||||
failed_endpoints.append(endpoint)
|
||||
except requests.exceptions.Timeout:
|
||||
print(f" ✗ {endpoint} - Timeout")
|
||||
failed_endpoints.append(endpoint)
|
||||
except Exception as e:
|
||||
print(f" ✗ {endpoint} - Error: {e}")
|
||||
failed_endpoints.append(endpoint)
|
||||
|
||||
if failed_endpoints:
|
||||
print(f"\nFailed endpoints: {failed_endpoints}")
|
||||
return False
|
||||
else:
|
||||
print("\nAll endpoints are working correctly!")
|
||||
return True
|
||||
|
||||
|
||||
def test_api_endpoints():
|
||||
"""Test that the new API endpoints are working correctly"""
|
||||
base_url = "http://localhost:8000"
|
||||
|
||||
# Test API endpoints
|
||||
api_endpoints = [
|
||||
("/api/activities", "GET"),
|
||||
(
|
||||
"/api/activities/1",
|
||||
"GET",
|
||||
), # This might fail if activity doesn't exist, which is OK
|
||||
("/api/dashboard/stats", "GET"),
|
||||
]
|
||||
|
||||
print("\nTesting API endpoints...")
|
||||
|
||||
for endpoint, method in api_endpoints:
|
||||
try:
|
||||
url = base_url + endpoint
|
||||
print(f"Testing {method} {url}...")
|
||||
|
||||
if method == "GET":
|
||||
response = requests.get(url, timeout=10)
|
||||
else:
|
||||
response = requests.post(url, timeout=10)
|
||||
|
||||
# For activity details, 404 is acceptable if activity doesn't exist
|
||||
if endpoint == "/api/activities/1" and response.status_code == 404:
|
||||
print(f" ✓ {endpoint} - OK (404 expected if activity doesn't exist)")
|
||||
continue
|
||||
|
||||
if response.status_code == 200:
|
||||
print(f" ✓ {endpoint} - OK")
|
||||
# Try to parse JSON
|
||||
try:
|
||||
data = response.json()
|
||||
print(
|
||||
f" Response keys: {list(data.keys()) if isinstance(data, dict) else 'Not a dict'}"
|
||||
)
|
||||
except:
|
||||
print(" Response is not JSON")
|
||||
else:
|
||||
print(f" ✗ {endpoint} - Status code: {response.status_code}")
|
||||
|
||||
except requests.exceptions.ConnectionError:
|
||||
print(f" ✗ {endpoint} - Connection error (server not running?)")
|
||||
except requests.exceptions.Timeout:
|
||||
print(f" ✗ {endpoint} - Timeout")
|
||||
except Exception as e:
|
||||
print(f" ✗ {endpoint} - Error: {e}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("GarminSync UI Test Script")
|
||||
print("=" * 30)
|
||||
|
||||
# Test UI endpoints
|
||||
ui_success = test_ui_endpoints()
|
||||
|
||||
# Test API endpoints
|
||||
test_api_endpoints()
|
||||
|
||||
print("\n" + "=" * 30)
|
||||
if ui_success:
|
||||
print("UI tests completed successfully!")
|
||||
sys.exit(0)
|
||||
else:
|
||||
print("Some UI tests failed!")
|
||||
sys.exit(1)
|
||||
57
examples/GarminSync/justfile
Normal file
57
examples/GarminSync/justfile
Normal file
@@ -0,0 +1,57 @@
|
||||
# GarminSync project tasks
|
||||
|
||||
# Build container image
|
||||
build:
|
||||
docker build -t garminsync .
|
||||
|
||||
# Run server in development mode with live reload (container-based)
|
||||
dev:
|
||||
just build
|
||||
docker run -it --rm --env-file .env -v $(pwd)/garminsync:/app/garminsync -v $(pwd)/data:/app/data -p 8888:8888 --name garminsync-dev garminsync uvicorn garminsync.web.app:app --reload --host 0.0.0.0 --port 8080
|
||||
|
||||
# Run database migrations with enhanced logging (container-based)
|
||||
migrate:
|
||||
just build
|
||||
docker run --rm --env-file .env -v $(pwd)/data:/app/data --entrypoint "python" garminsync -m garminsync.cli migrate
|
||||
# Run validation tests (container-based)
|
||||
test:
|
||||
just build
|
||||
docker run --rm --env-file .env -v $(pwd)/tests:/app/tests -v $(pwd)/data:/app/data --entrypoint "pytest" garminsync /app/tests
|
||||
|
||||
# View logs of running container
|
||||
logs:
|
||||
docker logs garminsync
|
||||
|
||||
# Access container shell
|
||||
shell:
|
||||
docker exec -it garminsync /bin/bash
|
||||
|
||||
# Run linter (container-based)
|
||||
lint:
|
||||
just build
|
||||
docker run --rm -v $(pwd)/garminsync:/app/garminsync --entrypoint "pylint" garminsync garminsync/
|
||||
|
||||
# Run formatter (container-based)
|
||||
format:
|
||||
black garminsync/
|
||||
isort garminsync/
|
||||
just build
|
||||
|
||||
# Start production server
|
||||
run_server:
|
||||
cd ~/GarminSync/docker
|
||||
docker compose up --build
|
||||
|
||||
# Stop production server
|
||||
stop_server:
|
||||
docker stop garminsync
|
||||
|
||||
# Run server in live mode for debugging
|
||||
run_server_live:
|
||||
just build
|
||||
docker run -it --rm --env-file .env -e RUN_MIGRATIONS=1 -v $(pwd)/data:/app/data -p 8888:8888 --name garminsync garminsync daemon --start
|
||||
|
||||
# Clean up any existing container
|
||||
cleanup:
|
||||
docker stop garminsync
|
||||
docker rm garminsync
|
||||
10
examples/GarminSync/mandates.md
Normal file
10
examples/GarminSync/mandates.md
Normal file
@@ -0,0 +1,10 @@
|
||||
<Mandates>
|
||||
- use the just_run_* tools via the MCP server
|
||||
- all installs should be done in the docker container.
|
||||
- NO installs on the host
|
||||
- database upgrades should be handled during container server start up
|
||||
- always rebuild the container before running tests
|
||||
- if you need clarification return to PLAN mode
|
||||
- force rereading of the mandates on each cycle
|
||||
- always track progress of plans in todo.md
|
||||
</Mandates>
|
||||
37
examples/GarminSync/migrations/alembic.ini
Normal file
37
examples/GarminSync/migrations/alembic.ini
Normal file
@@ -0,0 +1,37 @@
|
||||
[alembic]
|
||||
script_location = migrations/versions
|
||||
sqlalchemy.url = sqlite:///data/garmin.db
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
@@ -0,0 +1,36 @@
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
def upgrade():
|
||||
op.add_column('power_analysis', sa.Column('peak_power_1s', sa.Float(), nullable=True))
|
||||
op.add_column('power_analysis', sa.Column('peak_power_5s', sa.Float(), nullable=True))
|
||||
op.add_column('power_analysis', sa.Column('peak_power_20s', sa.Float(), nullable=True))
|
||||
op.add_column('power_analysis', sa.Column('peak_power_300s', sa.Float(), nullable=True))
|
||||
op.add_column('power_analysis', sa.Column('normalized_power', sa.Float(), nullable=True))
|
||||
op.add_column('power_analysis', sa.Column('intensity_factor', sa.Float(), nullable=True))
|
||||
op.add_column('power_analysis', sa.Column('training_stress_score', sa.Float(), nullable=True))
|
||||
|
||||
op.add_column('gearing_analysis', sa.Column('estimated_chainring_teeth', sa.Integer(), nullable=True))
|
||||
op.add_column('gearing_analysis', sa.Column('estimated_cassette_teeth', sa.Integer(), nullable=True))
|
||||
op.add_column('gearing_analysis', sa.Column('gear_ratio', sa.Float(), nullable=True))
|
||||
op.add_column('gearing_analysis', sa.Column('gear_inches', sa.Float(), nullable=True))
|
||||
op.add_column('gearing_analysis', sa.Column('development_meters', sa.Float(), nullable=True))
|
||||
op.add_column('gearing_analysis', sa.Column('confidence_score', sa.Float(), nullable=True))
|
||||
op.add_column('gearing_analysis', sa.Column('analysis_method', sa.String(), default="singlespeed_estimation"))
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('power_analysis', 'peak_power_1s')
|
||||
op.drop_column('power_analysis', 'peak_power_5s')
|
||||
op.drop_column('power_analysis', 'peak_power_20s')
|
||||
op.drop_column('power_analysis', 'peak_power_300s')
|
||||
op.drop_column('power_analysis', 'normalized_power')
|
||||
op.drop_column('power_analysis', 'intensity_factor')
|
||||
op.drop_column('power_analysis', 'training_stress_score')
|
||||
|
||||
op.drop_column('gearing_analysis', 'estimated_chainring_teeth')
|
||||
op.drop_column('gearing_analysis', 'estimated_cassette_teeth')
|
||||
op.drop_column('gearing_analysis', 'gear_ratio')
|
||||
op.drop_column('gearing_analysis', 'gear_inches')
|
||||
op.drop_column('gearing_analysis', 'development_meters')
|
||||
op.drop_column('gearing_analysis', 'confidence_score')
|
||||
op.drop_column('gearing_analysis', 'analysis_method')
|
||||
@@ -0,0 +1,23 @@
|
||||
"""Add avg_heart_rate and calories columns to activities table
|
||||
|
||||
Revision ID: 20240822165438
|
||||
Revises: 20240821150000
|
||||
Create Date: 2024-08-22 16:54:38.123456
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '20240822165438'
|
||||
down_revision = '20240821150000'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
def upgrade():
|
||||
op.add_column('activities', sa.Column('avg_heart_rate', sa.Integer(), nullable=True))
|
||||
op.add_column('activities', sa.Column('calories', sa.Integer(), nullable=True))
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('activities', 'avg_heart_rate')
|
||||
op.drop_column('activities', 'calories')
|
||||
@@ -0,0 +1,31 @@
|
||||
"""Add reprocessed column
|
||||
|
||||
Revision ID: 20240823000000
|
||||
Revises: 20240822165438_add_hr_and_calories_columns
|
||||
Create Date: 2025-08-23 00:00:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '20240823000000'
|
||||
down_revision = '20240822165438_add_hr_and_calories_columns'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
def upgrade():
|
||||
# Add reprocessed column to activities table
|
||||
op.add_column('activities', sa.Column('reprocessed', sa.Boolean(), nullable=True, server_default='0'))
|
||||
|
||||
# Set default value for existing records
|
||||
op.execute("UPDATE activities SET reprocessed = 0 WHERE reprocessed IS NULL")
|
||||
|
||||
# Make the column NOT NULL after setting default values
|
||||
with op.batch_alter_table('activities') as batch_op:
|
||||
batch_op.alter_column('reprocessed', existing_type=sa.Boolean(), nullable=False)
|
||||
|
||||
def downgrade():
|
||||
# Remove reprocessed column
|
||||
with op.batch_alter_table('activities') as batch_op:
|
||||
batch_op.drop_column('reprocessed')
|
||||
72
examples/GarminSync/migrations/versions/env.py
Normal file
72
examples/GarminSync/migrations/versions/env.py
Normal file
@@ -0,0 +1,72 @@
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from logging.config import fileConfig
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file you've provided
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = None
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
80
examples/GarminSync/patches/garth_data_weight.py
Normal file
80
examples/GarminSync/patches/garth_data_weight.py
Normal file
@@ -0,0 +1,80 @@
|
||||
from datetime import date, datetime, timedelta
|
||||
from itertools import chain
|
||||
|
||||
from pydantic import Field, ValidationInfo, field_validator
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Self
|
||||
|
||||
from .. import http
|
||||
from ..utils import (
|
||||
camel_to_snake_dict,
|
||||
format_end_date,
|
||||
get_localized_datetime,
|
||||
)
|
||||
from ._base import MAX_WORKERS, Data
|
||||
|
||||
|
||||
@dataclass
|
||||
class WeightData(Data):
|
||||
sample_pk: int
|
||||
calendar_date: date
|
||||
weight: int
|
||||
source_type: str
|
||||
weight_delta: float
|
||||
datetime_utc: datetime = Field(..., alias="timestamp_gmt")
|
||||
datetime_local: datetime = Field(..., alias="date")
|
||||
bmi: float | None = None
|
||||
body_fat: float | None = None
|
||||
body_water: float | None = None
|
||||
bone_mass: int | None = None
|
||||
muscle_mass: int | None = None
|
||||
physique_rating: float | None = None
|
||||
visceral_fat: float | None = None
|
||||
metabolic_age: int | None = None
|
||||
|
||||
@field_validator("datetime_local", mode="before")
|
||||
@classmethod
|
||||
def to_localized_datetime(cls, v: int, info: ValidationInfo) -> datetime:
|
||||
return get_localized_datetime(info.data["datetime_utc"].timestamp() * 1000, v)
|
||||
|
||||
@classmethod
|
||||
def get(
|
||||
cls, day: date | str, *, client: http.Client | None = None
|
||||
) -> Self | None:
|
||||
client = client or http.client
|
||||
path = f"/weight-service/weight/dayview/{day}"
|
||||
data = client.connectapi(path)
|
||||
day_weight_list = data["dateWeightList"] if data else []
|
||||
|
||||
if not day_weight_list:
|
||||
return None
|
||||
|
||||
# Get first (most recent) weight entry for the day
|
||||
weight_data = camel_to_snake_dict(day_weight_list[0])
|
||||
return cls(**weight_data)
|
||||
|
||||
@classmethod
|
||||
def list(
|
||||
cls,
|
||||
end: date | str | None = None,
|
||||
days: int = 1,
|
||||
*,
|
||||
client: http.Client | None = None,
|
||||
max_workers: int = MAX_WORKERS,
|
||||
) -> list[Self]:
|
||||
client = client or http.client
|
||||
end = format_end_date(end)
|
||||
start = end - timedelta(days=days - 1)
|
||||
|
||||
data = client.connectapi(
|
||||
f"/weight-service/weight/range/{start}/{end}?includeAll=true"
|
||||
)
|
||||
weight_summaries = data["dailyWeightSummaries"] if data else []
|
||||
weight_metrics = chain.from_iterable(
|
||||
summary["allWeightMetrics"] for summary in weight_summaries
|
||||
)
|
||||
weight_data_list = (
|
||||
cls(**camel_to_snake_dict(weight_data))
|
||||
for weight_data in weight_metrics
|
||||
)
|
||||
return sorted(weight_data_list, key=lambda d: d.datetime_utc)
|
||||
1453
examples/GarminSync/plan.md
Normal file
1453
examples/GarminSync/plan.md
Normal file
File diff suppressed because it is too large
Load Diff
177
examples/GarminSync/plan_phase2.md
Normal file
177
examples/GarminSync/plan_phase2.md
Normal file
@@ -0,0 +1,177 @@
|
||||
# Implementation Improvements Needed
|
||||
|
||||
## 1. **Route Handler Completion** - HIGH PRIORITY
|
||||
|
||||
### Missing Import in `internal/web/routes.go`:
|
||||
```go
|
||||
import (
|
||||
"strconv" // ADD THIS - needed for strconv.Atoi
|
||||
// ... other imports
|
||||
)
|
||||
```
|
||||
|
||||
### Missing Route Connections in `main.go`:
|
||||
```go
|
||||
// Current setupRoutes function is incomplete - needs:
|
||||
func (app *App) setupRoutes(webHandler *web.WebHandler) *http.ServeMux {
|
||||
mux := http.NewServeMux()
|
||||
|
||||
// Health check
|
||||
mux.HandleFunc("/health", func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
w.Write([]byte("OK"))
|
||||
})
|
||||
|
||||
// Web UI routes
|
||||
mux.HandleFunc("/", webHandler.Index)
|
||||
mux.HandleFunc("/activities", webHandler.ActivityList)
|
||||
mux.HandleFunc("/activity", webHandler.ActivityDetail)
|
||||
|
||||
// ADD THESE API ROUTES:
|
||||
mux.HandleFunc("/api/activities", func(w http.ResponseWriter, r *http.Request) {
|
||||
// Implement API endpoint
|
||||
})
|
||||
mux.HandleFunc("/api/stats", func(w http.ResponseWriter, r *http.Request) {
|
||||
stats, _ := app.db.GetStats()
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(stats)
|
||||
})
|
||||
mux.HandleFunc("/api/sync", func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method == "POST" {
|
||||
go app.syncService.Sync(context.Background())
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(map[string]string{"status": "started"})
|
||||
}
|
||||
})
|
||||
|
||||
return mux
|
||||
}
|
||||
```
|
||||
|
||||
## 2. **Database Interface Issues** - HIGH PRIORITY
|
||||
|
||||
### Fix SQLiteDB Creation in `main.go`:
|
||||
```go
|
||||
// CURRENT (INCORRECT):
|
||||
app.db = database.NewSQLiteDBFromDB(dbConn)
|
||||
|
||||
// SHOULD BE:
|
||||
sqliteDB, err := database.NewSQLiteDB(dbPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
app.db = sqliteDB
|
||||
```
|
||||
|
||||
### Fix Return Type Mismatch:
|
||||
Your `NewSQLiteDB` returns `*SQLiteDB` but main.go expects `Database` interface.
|
||||
|
||||
## 3. **Template Function Issues** - MEDIUM PRIORITY
|
||||
|
||||
### Missing Template Functions in `activity_detail.html`:
|
||||
```go
|
||||
// Add these template functions to web handler:
|
||||
func (h *WebHandler) LoadTemplates(templateDir string) error {
|
||||
// ... existing code ...
|
||||
|
||||
// Add custom functions
|
||||
funcMap := template.FuncMap{
|
||||
"div": func(a, b float64) float64 { return a / b },
|
||||
"formatDuration": func(seconds int) string {
|
||||
hrs := seconds / 3600
|
||||
mins := (seconds % 3600) / 60
|
||||
return fmt.Sprintf("%dh %dm", hrs, mins)
|
||||
},
|
||||
"formatMeters": func(meters float64) string {
|
||||
return fmt.Sprintf("%.0f", meters)
|
||||
},
|
||||
}
|
||||
|
||||
for _, page := range pages {
|
||||
name := filepath.Base(page)
|
||||
files := append([]string{page}, layouts...)
|
||||
files = append(files, partials...)
|
||||
|
||||
h.templates[name], err = template.New(name).Funcs(funcMap).ParseFiles(files...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
```
|
||||
|
||||
## 4. **Parser Implementation** - MEDIUM PRIORITY
|
||||
|
||||
### Complete TCX/GPX Parsers:
|
||||
The factory references them but they return `nil`. Either:
|
||||
- Implement them fully, or
|
||||
- Remove references and return proper errors
|
||||
|
||||
```go
|
||||
// In factory.go, replace:
|
||||
func NewTCXParser() Parser { return nil }
|
||||
func NewGPXParser() Parser { return nil }
|
||||
|
||||
// With:
|
||||
func NewTCXParser() Parser {
|
||||
return &TCXParser{} // Implement basic TCX parser
|
||||
}
|
||||
func NewGPXParser() Parser {
|
||||
return &GPXParser{} // Or remove if not needed
|
||||
}
|
||||
```
|
||||
|
||||
## 5. **Sync Service Integration** - MEDIUM PRIORITY
|
||||
|
||||
### Missing Sync Service in Main App:
|
||||
```go
|
||||
// In main.go App struct, add:
|
||||
type App struct {
|
||||
db *database.SQLiteDB
|
||||
cron *cron.Cron
|
||||
server *http.Server
|
||||
garmin *garmin.Client
|
||||
syncService *sync.SyncService // ADD THIS
|
||||
shutdown chan os.Signal
|
||||
}
|
||||
|
||||
// In init() method:
|
||||
app.syncService = sync.NewSyncService(app.garmin, app.db, dataDir)
|
||||
```
|
||||
|
||||
## 6. **Build Issues** - LOW PRIORITY
|
||||
|
||||
### Fix Go Module Issues:
|
||||
Your `go.mod` has some unused dependencies and wrong module path:
|
||||
|
||||
```go
|
||||
// Update go.mod:
|
||||
module garminsync // Remove github.com path if local
|
||||
|
||||
go 1.21
|
||||
|
||||
require (
|
||||
github.com/gorilla/mux v1.8.0
|
||||
github.com/mattn/go-sqlite3 v1.14.17
|
||||
github.com/robfig/cron/v3 v3.0.1
|
||||
golang.org/x/net v0.12.0
|
||||
)
|
||||
|
||||
// Remove unused dependencies like:
|
||||
// - github.com/tormoder/fit (if not actually used)
|
||||
// - Various lint tools (should be in tools.go)
|
||||
```
|
||||
|
||||
## 7. **Docker Configuration** - LOW PRIORITY
|
||||
|
||||
### Health Check Enhancement:
|
||||
```dockerfile
|
||||
# In Dockerfile, improve health check:
|
||||
HEALTHCHECK --interval=30s --timeout=30s --retries=3 \
|
||||
CMD wget --quiet --tries=1 --spider http://localhost:8888/health || exit 1
|
||||
|
||||
# Make sure wget is available or use curl:
|
||||
RUN apk add --no-cache ca-certificates tzdata wget
|
||||
```
|
||||
52
examples/GarminSync/pyproject.toml
Normal file
52
examples/GarminSync/pyproject.toml
Normal file
@@ -0,0 +1,52 @@
|
||||
[project]
|
||||
name = "GarminSync"
|
||||
version = "0.1.0"
|
||||
description = "Sync and analyze Garmin activity data"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"flask==3.0.0",
|
||||
"flask-sqlalchemy==3.1.1",
|
||||
"flask-migrate==4.0.7",
|
||||
"python-dotenv==1.0.0",
|
||||
"uvicorn==0.27.0",
|
||||
"alembic==1.13.1",
|
||||
"flask-paginate==2024.4.12",
|
||||
"pytest==8.1.1",
|
||||
"typer==0.9.0",
|
||||
"apscheduler==3.10.4",
|
||||
"requests==2.32.0",
|
||||
"garminconnect==0.2.28",
|
||||
"garth",
|
||||
"fastapi==0.109.1",
|
||||
"pydantic==2.5.3",
|
||||
"tqdm==4.66.1",
|
||||
"sqlalchemy==2.0.30",
|
||||
"pylint==3.1.0",
|
||||
"pygments==2.18.0",
|
||||
"fitdecode",
|
||||
"numpy==1.26.0",
|
||||
"scipy==1.11.1",
|
||||
"aiosqlite",
|
||||
"asyncpg",
|
||||
"aiohttp"
|
||||
]
|
||||
|
||||
[build-system]
|
||||
requires = ["uv"]
|
||||
build-backend = "uv"
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 120
|
||||
target-version = "py311"
|
||||
select = ["E", "F", "W", "I", "B", "C", "N", "Q"]
|
||||
ignore = []
|
||||
|
||||
[tool.ruff.per-file-ignores]
|
||||
"__init__.py" = ["F401"]
|
||||
"tests/*.py" = ["S101", "INP001", "F811", "PLR2004", "ANN001", "ANN101", "ANN201"]
|
||||
|
||||
[tool.black]
|
||||
line-length = 120
|
||||
target-version = ["py311"]
|
||||
skip-string-normalization = true
|
||||
25
examples/GarminSync/requirements.txt
Normal file
25
examples/GarminSync/requirements.txt
Normal file
@@ -0,0 +1,25 @@
|
||||
flask==3.0.0
|
||||
flask-sqlalchemy==3.1.1
|
||||
flask-migrate==4.0.7
|
||||
python-dotenv==1.0.0
|
||||
uvicorn==0.27.0
|
||||
alembic==1.13.1
|
||||
flask-paginate==2024.4.12
|
||||
pytest==8.1.1
|
||||
typer==0.9.0
|
||||
apscheduler==3.10.4
|
||||
requests==2.32.0
|
||||
garminconnect==0.2.28
|
||||
garth
|
||||
fastapi==0.109.1
|
||||
pydantic==2.5.3
|
||||
tqdm==4.66.1
|
||||
sqlalchemy==2.0.30
|
||||
pylint==3.1.0
|
||||
pygments==2.18.0
|
||||
fitdecode
|
||||
numpy==1.26.0
|
||||
scipy==1.11.1
|
||||
aiosqlite
|
||||
asyncpg
|
||||
aiohttp
|
||||
114
examples/GarminSync/tests/activity_table_validation.sh
Executable file
114
examples/GarminSync/tests/activity_table_validation.sh
Executable file
@@ -0,0 +1,114 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Activity Table Validation Script
|
||||
# This script tests the activity table implementation
|
||||
|
||||
# Configuration
|
||||
API_URL="http://localhost:8888/api/api/activities" # Changed port to 8888 to match container
|
||||
TIMEOUT=10
|
||||
|
||||
# Function to display test results
|
||||
display_result() {
|
||||
local test_name=$1
|
||||
local result=$2
|
||||
local message=$3
|
||||
|
||||
if [ "$result" = "PASS" ]; then
|
||||
echo "✅ $test_name: $message"
|
||||
else
|
||||
echo "❌ $test_name: $message"
|
||||
fi
|
||||
}
|
||||
|
||||
# Function to wait for API to be ready
|
||||
wait_for_api() {
|
||||
echo "Waiting for API to start..."
|
||||
attempts=0
|
||||
max_attempts=60 # Increased timeout to 60 seconds
|
||||
|
||||
while true; do
|
||||
# Check for startup messages
|
||||
if curl -s -m 1 "http://localhost:8888" | grep -q "Uvicorn running on" || \
|
||||
curl -s -m 1 "http://localhost:8888" | grep -q "Application startup complete" || \
|
||||
curl -s -m 1 "http://localhost:8888" | grep -q "Server is ready"; then
|
||||
echo "API started successfully"
|
||||
break
|
||||
fi
|
||||
|
||||
attempts=$((attempts+1))
|
||||
if [ $attempts -ge $max_attempts ]; then
|
||||
echo "API failed to start within $max_attempts seconds"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
sleep 1
|
||||
done
|
||||
}
|
||||
|
||||
# Wait for API to be ready
|
||||
wait_for_api
|
||||
|
||||
# Test 1: Basic API response
|
||||
echo "Running basic API response test..."
|
||||
response=$(curl -s -m $TIMEOUT "$API_URL" | jq '.')
|
||||
if [ $? -eq 0 ]; then
|
||||
if [[ "$response" == *"activities"* ]] && [[ "$response" == *"total_pages"* ]] && [[ "$response" == *"status"* ]]; then
|
||||
display_result "Basic API Response" PASS "API returns expected structure"
|
||||
else
|
||||
display_result "Basic API Response" FAIL "API response doesn't contain expected fields"
|
||||
fi
|
||||
else
|
||||
display_result "Basic API Response" FAIL "API request failed"
|
||||
fi
|
||||
|
||||
# Test 2: Pagination test
|
||||
echo "Running pagination test..."
|
||||
page1=$(curl -s -m $TIMEOUT "$API_URL?page=1" | jq '.')
|
||||
page2=$(curl -s -m $TIMEOUT "$API_URL?page=2" | jq '.')
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
page1_count=$(echo "$page1" | jq '.activities | length')
|
||||
page2_count=$(echo "$page2" | jq '.activities | length')
|
||||
|
||||
if [ "$page1_count" -gt 0 ] && [ "$page2_count" -gt 0 ]; then
|
||||
display_result "Pagination Test" PASS "Both pages contain activities"
|
||||
else
|
||||
display_result "Pagination Test" FAIL "One or more pages are empty"
|
||||
fi
|
||||
else
|
||||
display_result "Pagination Test" FAIL "API request failed"
|
||||
fi
|
||||
|
||||
# Test 3: Data consistency test
|
||||
echo "Running data consistency test..."
|
||||
activity_id=$(echo "$page1" | jq -r '.activities[0].id')
|
||||
activity_name=$(echo "$page1" | jq -r '.activities[0].name')
|
||||
|
||||
details_response=$(curl -s -m $TIMEOUT "$API_URL/$activity_id" | jq '.')
|
||||
if [ $? -eq 0 ]; then
|
||||
details_id=$(echo "$details_response" | jq -r '.id')
|
||||
details_name=$(echo "$details_response" | jq -r '.name')
|
||||
|
||||
if [ "$activity_id" = "$details_id" ] && [ "$activity_name" = "$details_name" ]; then
|
||||
display_result "Data Consistency Test" PASS "Activity details match API response"
|
||||
else
|
||||
display_result "Data Consistency Test" FAIL "Activity details don't match API response"
|
||||
fi
|
||||
else
|
||||
display_result "Data Consistency Test" FAIL "API request failed"
|
||||
fi
|
||||
|
||||
# Test 4: Error handling test
|
||||
echo "Running error handling test..."
|
||||
error_response=$(curl -s -m $TIMEOUT "$API_URL/999999999" | jq '.')
|
||||
if [ $? -eq 0 ]; then
|
||||
if [[ "$error_response" == *"detail"* ]] && [[ "$error_response" == *"not found"* ]]; then
|
||||
display_result "Error Handling Test" PASS "API returns expected error for non-existent activity"
|
||||
else
|
||||
display_result "Error Handling Test" FAIL "API doesn't return expected error for non-existent activity"
|
||||
fi
|
||||
else
|
||||
display_result "Error Handling Test" FAIL "API request failed"
|
||||
fi
|
||||
|
||||
echo "All tests completed."
|
||||
110
examples/GarminSync/tests/test_sync.py
Normal file
110
examples/GarminSync/tests/test_sync.py
Normal file
@@ -0,0 +1,110 @@
|
||||
import pytest
|
||||
import sys
|
||||
from unittest.mock import Mock, patch, MagicMock
|
||||
|
||||
# Add the project root to the Python path
|
||||
sys.path.insert(0, '/app')
|
||||
|
||||
from garminsync.database import sync_database, Activity, get_activity_metrics
|
||||
|
||||
def test_sync_database_with_valid_activities():
|
||||
"""Test sync_database with valid API response"""
|
||||
mock_client = Mock()
|
||||
mock_client.get_activities.return_value = [
|
||||
{"activityId": 12345, "startTimeLocal": "2023-01-01T10:00:00"},
|
||||
{"activityId": 67890, "startTimeLocal": "2023-01-02T11:00:00"}
|
||||
]
|
||||
|
||||
mock_session = MagicMock()
|
||||
mock_session.query.return_value.filter_by.return_value.first.return_value = None
|
||||
|
||||
with patch('garminsync.database.get_session', return_value=mock_session), \
|
||||
patch('garminsync.database.get_activity_metrics', return_value={
|
||||
"activityType": {"typeKey": "running"},
|
||||
"summaryDTO": {
|
||||
"duration": 3600,
|
||||
"distance": 10.0,
|
||||
"maxHR": 180,
|
||||
"calories": 400
|
||||
}
|
||||
}):
|
||||
|
||||
sync_database(mock_client)
|
||||
|
||||
# Verify activities processed
|
||||
assert mock_session.add.call_count == 2
|
||||
assert mock_session.commit.called
|
||||
|
||||
def test_sync_database_with_none_activities():
|
||||
"""Test sync_database with None response from API"""
|
||||
mock_client = Mock()
|
||||
mock_client.get_activities.return_value = None
|
||||
|
||||
mock_session = MagicMock()
|
||||
|
||||
with patch('garminsync.database.get_session', return_value=mock_session):
|
||||
sync_database(mock_client)
|
||||
mock_session.add.assert_not_called()
|
||||
|
||||
def test_sync_database_with_missing_fields():
|
||||
"""Test sync_database with activities missing required fields"""
|
||||
mock_client = Mock()
|
||||
mock_client.get_activities.return_value = [
|
||||
{"activityId": 12345},
|
||||
{"startTimeLocal": "2023-01-02T11:00:00"},
|
||||
{"activityId": 67890, "startTimeLocal": "2023-01-03T12:00:00"}
|
||||
]
|
||||
|
||||
# Create a mock that returns None for existing activity
|
||||
mock_session = MagicMock()
|
||||
mock_session.query.return_value.filter_by.return_value.first.return_value = None
|
||||
|
||||
with patch('garminsync.database.get_session', return_value=mock_session), \
|
||||
patch('garminsync.database.get_activity_metrics', return_value={
|
||||
"summaryDTO": {"duration": 3600.0}
|
||||
}):
|
||||
sync_database(mock_client)
|
||||
# Only valid activity should be added
|
||||
assert mock_session.add.call_count == 1
|
||||
added_activity = mock_session.add.call_args[0][0]
|
||||
assert added_activity.activity_id == 67890
|
||||
|
||||
def test_sync_database_with_existing_activities():
|
||||
"""Test sync_database doesn't duplicate existing activities"""
|
||||
mock_client = Mock()
|
||||
mock_client.get_activities.return_value = [
|
||||
{"activityId": 12345, "startTimeLocal": "2023-01-01T10:00:00"}
|
||||
]
|
||||
|
||||
mock_session = MagicMock()
|
||||
mock_session.query.return_value.filter_by.return_value.first.return_value = Mock()
|
||||
|
||||
with patch('garminsync.database.get_session', return_value=mock_session), \
|
||||
patch('garminsync.database.get_activity_metrics', return_value={
|
||||
"summaryDTO": {"duration": 3600.0}
|
||||
}):
|
||||
sync_database(mock_client)
|
||||
mock_session.add.assert_not_called()
|
||||
|
||||
def test_sync_database_with_invalid_activity_data():
|
||||
"""Test sync_database with invalid activity data types"""
|
||||
mock_client = Mock()
|
||||
mock_client.get_activities.return_value = [
|
||||
"invalid data",
|
||||
None,
|
||||
{"activityId": 12345, "startTimeLocal": "2023-01-01T10:00:00"}
|
||||
]
|
||||
|
||||
# Create a mock that returns None for existing activity
|
||||
mock_session = MagicMock()
|
||||
mock_session.query.return_value.filter_by.return_value.first.return_value = None
|
||||
|
||||
with patch('garminsync.database.get_session', return_value=mock_session), \
|
||||
patch('garminsync.database.get_activity_metrics', return_value={
|
||||
"summaryDTO": {"duration": 3600.0}
|
||||
}):
|
||||
sync_database(mock_client)
|
||||
# Only valid activity should be added
|
||||
assert mock_session.add.call_count == 1
|
||||
added_activity = mock_session.add.call_args[0][0]
|
||||
assert added_activity.activity_id == 12345
|
||||
100
examples/GarminSync/workflows.md
Normal file
100
examples/GarminSync/workflows.md
Normal file
@@ -0,0 +1,100 @@
|
||||
# GarminSync Workflows
|
||||
|
||||
## Migration Workflow
|
||||
|
||||
### Purpose
|
||||
Add new columns to database and populate with activity metrics
|
||||
|
||||
### Trigger
|
||||
`python cli.py migrate`
|
||||
|
||||
### Steps
|
||||
1. Add required columns to activities table:
|
||||
- activity_type (TEXT)
|
||||
- duration (INTEGER)
|
||||
- distance (REAL)
|
||||
- max_heart_rate (INTEGER)
|
||||
- avg_power (REAL)
|
||||
- calories (INTEGER)
|
||||
2. For each activity:
|
||||
- Parse metrics from local FIT/XML files
|
||||
- Fetch from Garmin API if local files missing
|
||||
- Update database fields
|
||||
3. Commit changes
|
||||
4. Report migration status
|
||||
|
||||
### Error Handling
|
||||
- Logs errors per activity
|
||||
- Marks unprocessable activities as "Unknown"
|
||||
- Continues processing other activities on error
|
||||
|
||||
## Sync Workflow
|
||||
|
||||
### Purpose
|
||||
Keep local database synchronized with Garmin Connect
|
||||
|
||||
### Triggers
|
||||
- CLI commands (`list`, `download`)
|
||||
- Scheduled daemon (every 6 hours by default)
|
||||
- Web UI requests
|
||||
|
||||
### Core Components
|
||||
- `sync_database()`: Syncs activity metadata
|
||||
- `download()`: Fetches missing FIT files
|
||||
- Daemon: Background scheduler and web UI
|
||||
|
||||
### Process Flow
|
||||
1. Authenticate with Garmin API
|
||||
2. Fetch latest activities
|
||||
3. For each activity:
|
||||
- Parse metrics from FIT/XML files
|
||||
- Fetch from Garmin API if local files missing
|
||||
- Update database fields
|
||||
4. Download missing activity files
|
||||
5. Update sync timestamps
|
||||
6. Log operations
|
||||
|
||||
### Database Schema
|
||||
```mermaid
|
||||
erDiagram
|
||||
activities {
|
||||
integer activity_id PK
|
||||
string start_time
|
||||
string activity_type
|
||||
integer duration
|
||||
float distance
|
||||
integer max_heart_rate
|
||||
integer avg_heart_rate
|
||||
float avg_power
|
||||
integer calories
|
||||
string filename
|
||||
boolean downloaded
|
||||
string created_at
|
||||
string last_sync
|
||||
}
|
||||
|
||||
daemon_config {
|
||||
integer id PK
|
||||
boolean enabled
|
||||
string schedule_cron
|
||||
string last_run
|
||||
string next_run
|
||||
string status
|
||||
}
|
||||
|
||||
sync_logs {
|
||||
integer id PK
|
||||
string timestamp
|
||||
string operation
|
||||
string status
|
||||
string message
|
||||
integer activities_processed
|
||||
integer activities_downloaded
|
||||
}
|
||||
```
|
||||
|
||||
### Key Notes
|
||||
- Data directory: `data/` (configurable via DATA_DIR)
|
||||
- Web UI port: 8080 (default)
|
||||
- Downloaded files: `activity_{id}_{timestamp}.fit`
|
||||
- Metrics include: heart rate, power, calories, distance
|
||||
1
examples/Garmin_Analyser
Submodule
1
examples/Garmin_Analyser
Submodule
Submodule examples/Garmin_Analyser added at afba5973d2
1217
examples/project_map.txt
Normal file
1217
examples/project_map.txt
Normal file
File diff suppressed because it is too large
Load Diff
129
examples/python_project_mapper.py
Executable file
129
examples/python_project_mapper.py
Executable file
@@ -0,0 +1,129 @@
|
||||
import os
|
||||
import ast
|
||||
from pathlib import Path
|
||||
|
||||
def get_docstring(node):
|
||||
"""Extract docstring from a node."""
|
||||
return ast.get_docstring(node) or ""
|
||||
|
||||
def analyze_file(filepath):
|
||||
"""Analyze a Python file and extract structure."""
|
||||
try:
|
||||
with open(filepath, 'r', encoding='utf-8') as f:
|
||||
tree = ast.parse(f.read())
|
||||
except Exception as e:
|
||||
return {'error': str(e)}
|
||||
|
||||
result = {
|
||||
'classes': [],
|
||||
'functions': [],
|
||||
'imports': []
|
||||
}
|
||||
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.ClassDef):
|
||||
methods = []
|
||||
for item in node.body:
|
||||
if isinstance(item, ast.FunctionDef):
|
||||
methods.append({
|
||||
'name': item.name,
|
||||
'docstring': get_docstring(item)
|
||||
})
|
||||
|
||||
result['classes'].append({
|
||||
'name': node.name,
|
||||
'docstring': get_docstring(node),
|
||||
'methods': methods
|
||||
})
|
||||
|
||||
elif isinstance(node, ast.FunctionDef) and node.col_offset == 0:
|
||||
# Only top-level functions
|
||||
result['functions'].append({
|
||||
'name': node.name,
|
||||
'docstring': get_docstring(node)
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
def generate_project_map(root_dir, output_file='project_map.txt', exclude_dirs=None):
|
||||
"""Generate a project map for LLM consumption."""
|
||||
if exclude_dirs is None:
|
||||
exclude_dirs = {'__pycache__', '.git', 'venv', 'env', '.venv', 'node_modules', '.pytest_cache'}
|
||||
|
||||
root_path = Path(root_dir)
|
||||
lines = []
|
||||
|
||||
lines.append("=" * 80)
|
||||
lines.append(f"PROJECT MAP: {root_path.name}")
|
||||
lines.append("=" * 80)
|
||||
lines.append("")
|
||||
|
||||
# Collect all Python files
|
||||
py_files = []
|
||||
for root, dirs, files in os.walk(root_path):
|
||||
# Remove excluded directories
|
||||
dirs[:] = [d for d in dirs if d not in exclude_dirs]
|
||||
|
||||
for file in sorted(files):
|
||||
if file.endswith('.py'):
|
||||
py_files.append(Path(root) / file)
|
||||
|
||||
# Analyze each file
|
||||
for filepath in sorted(py_files):
|
||||
rel_path = filepath.relative_to(root_path)
|
||||
lines.append(f"\n{'─' * 80}")
|
||||
lines.append(f"FILE: {rel_path}")
|
||||
lines.append('─' * 80)
|
||||
|
||||
analysis = analyze_file(filepath)
|
||||
|
||||
if 'error' in analysis:
|
||||
lines.append(f" ⚠ Error parsing file: {analysis['error']}")
|
||||
continue
|
||||
|
||||
# Classes
|
||||
if analysis['classes']:
|
||||
lines.append("\n CLASSES:")
|
||||
for cls in analysis['classes']:
|
||||
lines.append(f" • {cls['name']}")
|
||||
if cls['docstring']:
|
||||
doc_preview = cls['docstring'].split('\n')[0][:60]
|
||||
lines.append(f" └─ {doc_preview}")
|
||||
|
||||
if cls['methods']:
|
||||
lines.append(f" Methods:")
|
||||
for method in cls['methods']:
|
||||
lines.append(f" - {method['name']}()")
|
||||
|
||||
# Functions
|
||||
if analysis['functions']:
|
||||
lines.append("\n FUNCTIONS:")
|
||||
for func in analysis['functions']:
|
||||
lines.append(f" • {func['name']}()")
|
||||
if func['docstring']:
|
||||
doc_preview = func['docstring'].split('\n')[0][:60]
|
||||
lines.append(f" └─ {doc_preview}")
|
||||
|
||||
if not analysis['classes'] and not analysis['functions']:
|
||||
lines.append(" (No classes or functions found)")
|
||||
|
||||
# Write to file
|
||||
output = '\n'.join(lines)
|
||||
with open(output_file, 'w', encoding='utf-8') as f:
|
||||
f.write(output)
|
||||
|
||||
print(f"Project map generated: {output_file}")
|
||||
print(f"Total files analyzed: {len(py_files)}")
|
||||
return output
|
||||
|
||||
# Usage
|
||||
if __name__ == "__main__":
|
||||
# Change this to your project directory
|
||||
project_dir = "."
|
||||
|
||||
# Generate the map
|
||||
generate_project_map(project_dir, output_file="project_map.txt")
|
||||
|
||||
# Also print to console
|
||||
with open("project_map.txt", 'r') as f:
|
||||
print(f.read())
|
||||
Reference in New Issue
Block a user