This commit is contained in:
2025-11-17 06:26:37 -08:00
parent afba5973d2
commit 4f9221f5d4
6 changed files with 504 additions and 57 deletions

View File

@@ -26,6 +26,15 @@ A comprehensive Python application for analyzing Garmin workout data from FIT, T
pip install -r requirements.txt
```
### Database Setup (New Feature)
The application now uses SQLite with Alembic for database migrations to track downloaded activities. To initialize the database:
```bash
# Run database migrations
alembic upgrade head
```
### Optional Dependencies
For PDF report generation:
@@ -59,11 +68,26 @@ Download all cycling activities from Garmin Connect:
python main.py download --all --limit 100 --output-dir data/garmin_downloads
```
Download only missing activities (not already in database or filesystem):
```bash
python main.py download --missing --output-dir data/garmin_downloads
```
Dry-run to see what would be downloaded without actually downloading:
```bash
python main.py download --missing --dry-run --output-dir data/garmin_downloads
```
Re-analyze previously downloaded workouts:
```bash
python main.py reanalyze --input-dir data/garmin_downloads --output-dir reports/reanalysis --charts --report
```
Force re-download of specific activity (bypasses database tracking):
```bash
python main.py download --workout-id 123456789 --force
```
Show current configuration:
```bash
python main.py config --show
@@ -111,17 +135,20 @@ options:
--file FILE, -f FILE Path to workout file (FIT, TCX, or GPX)
--garmin-connect Download and analyze latest workout from Garmin Connect
--workout-id WORKOUT_ID
Analyze specific workout by ID from Garmin Connect
Analyze specific workout by ID from Garmin Connect
--ftp FTP Functional Threshold Power (W)
--max-hr MAX_HR Maximum heart rate (bpm)
--zones ZONES Path to zones configuration file
--cog COG Cog size (teeth) for power calculations. Auto-detected if not provided
--output-dir OUTPUT_DIR
Output directory for reports and charts
Output directory for reports and charts
--format {html,pdf,markdown}
Report format
Report format
--charts Generate charts
--report Generate comprehensive report
--force Force download even if activity already exists in database
--missing Download only activities not already in database or filesystem
--dry-run Show what would be downloaded without actually downloading
```
### Configuration:
@@ -171,6 +198,40 @@ Note on app passwords:
Parity and unaffected behavior:
- Authentication and download parity is maintained. Original ZIP downloads and FIT extraction workflows are unchanged in [clients/garmin_client.py](clients/garmin_client.py).
- Alternate format downloads (FIT, TCX, GPX) are unaffected by this credentials change.
## Database Tracking
The application now tracks downloaded activities in a SQLite database (`garmin_analyser.db`) to avoid redundant downloads and provide download history.
### Database Schema
The database tracks:
- Activity ID and metadata
- Download status and timestamps
- File checksums and sizes
- Error information for failed downloads
### Database Location
By default, the database is stored at:
- `garmin_analyser.db` in the project root directory
### Migration Commands
```bash
# Initialize database schema
alembic upgrade head
# Create new migration (for developers)
alembic revision --autogenerate -m "description"
# Check migration status
alembic current
# Downgrade database
alembic downgrade -1
```
## Configuration
### Basic Configuration
@@ -306,6 +367,15 @@ python main.py --garmin-connect --report --charts --summary
# Download specific period
python main.py --garmin-connect --report --output-dir reports/january/
# Download only missing activities (smart sync)
python main.py download --missing --output-dir data/garmin_downloads
# Preview what would be downloaded (dry-run)
python main.py download --missing --dry-run --output-dir data/garmin_downloads
# Force re-download of all activities (bypass database)
python main.py download --all --force --output-dir data/garmin_downloads
```
## Output Structure
@@ -324,6 +394,9 @@ output/
│ └── summary_report_20240115_143022.html
└── logs/
└── garmin_analyser.log
garmin_analyser.db # SQLite database for download tracking
alembic/ # Database migration scripts
```
## Analysis Features
@@ -442,6 +515,10 @@ def generate_custom_chart(self, workout: WorkoutData, analysis: dict) -> str:
- For large datasets, use batch processing
- Consider using `--summary` flag for multiple files
**Database Issues**
- If database becomes corrupted, delete `garmin_analyser.db` and run `alembic upgrade head`
- Check database integrity: `sqlite3 garmin_analyser.db "PRAGMA integrity_check;"`
### Debug Mode
Enable verbose logging for troubleshooting:

View File

@@ -6,21 +6,104 @@ import zipfile
from pathlib import Path
from typing import Optional, Dict, Any, List
import logging
import hashlib
from datetime import datetime
import time
from sqlalchemy.orm import Session
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
try:
from garminconnect import Garmin
except ImportError:
raise ImportError("garminconnect package required. Install with: pip install garminconnect")
from config.settings import get_garmin_credentials, DATA_DIR
from config.settings import get_garmin_credentials, DATA_DIR, DATABASE_URL
from db.models import ActivityDownload
from db.session import SessionLocal
logger = logging.getLogger(__name__)
def calculate_sha256(file_path: Path) -> str:
"""Calculate the SHA256 checksum of a file."""
hasher = hashlib.sha256()
with open(file_path, 'rb') as f:
while True:
chunk = f.read(8192) # Read in 8KB chunks
if not chunk:
break
hasher.update(chunk)
return hasher.hexdigest()
def upsert_activity_download(
activity_id: int,
source: str,
file_path: Path,
file_format: str,
status: str,
http_status: Optional[int] = None,
etag: Optional[str] = None,
last_modified: Optional[datetime] = None,
size_bytes: Optional[int] = None,
checksum_sha256: Optional[str] = None,
error_message: Optional[str] = None,
db_session: Optional[Session] = None,
):
"""Upsert an activity download record in the database."""
if db_session is not None:
db = db_session
close_session = False
else:
db = SessionLocal()
close_session = True
try:
record = db.query(ActivityDownload).filter_by(activity_id=activity_id).first()
if record:
record.source = source
record.file_path = str(file_path)
record.file_format = file_format
record.status = status
record.http_status = http_status
record.etag = etag
record.last_modified = last_modified
record.size_bytes = size_bytes
record.checksum_sha256 = checksum_sha256
record.updated_at = datetime.utcnow()
record.error_message = error_message
else:
record = ActivityDownload(
activity_id=activity_id,
source=source,
file_path=str(file_path),
file_format=file_format,
status=status,
http_status=http_status,
etag=etag,
last_modified=last_modified,
size_bytes=size_bytes,
checksum_sha256=checksum_sha256,
downloaded_at=datetime.utcnow(),
updated_at=datetime.utcnow(),
error_message=error_message,
)
db.add(record)
db.commit()
db.refresh(record)
finally:
if close_session:
db.close()
return record
class GarminClient:
"""Client for interacting with Garmin Connect API."""
def __init__(self, email: Optional[str] = None, password: Optional[str] = None):
def __init__(self, email: Optional[str] = None, password: Optional[str] = None, db_session: Optional[Session] = None):
"""Initialize Garmin client.
Args:
@@ -32,6 +115,8 @@ class GarminClient:
self.password = password
else:
self.email, self.password = get_garmin_credentials()
self.db_session = db_session if db_session else SessionLocal()
self.client = None
self._authenticated = False
@@ -117,13 +202,16 @@ class GarminClient:
logger.error(f"Failed to get activity {activity_id}: {e}")
return None
def download_activity_file(self, activity_id: str, file_format: str = "fit") -> Optional[Path]:
def download_activity_file(
self, activity_id: str, file_format: str = "fit", force_download: bool = False
) -> Optional[Path]:
"""Download activity file in specified format.
Args:
activity_id: Garmin activity ID
file_format: File format to download (fit, tcx, gpx, csv, original)
force_download: If True, bypasses database checks and forces a re-download.
Returns:
Path to downloaded file or None if download failed
"""
@@ -155,7 +243,9 @@ class GarminClient:
# FIT is not a direct dl_fmt in some client versions; use ORIGINAL to obtain ZIP and extract .fit
if fmt_upper in {"FIT", "ORIGINAL"} or file_format.lower() == "fit":
fit_path = self.download_activity_original(activity_id)
fit_path = self.download_activity_original(
activity_id, force_download=force_download
)
return fit_path
logger.error(f"Unsupported download format '{file_format}'. Valid: GPX, TCX, ORIGINAL, CSV")
@@ -165,11 +255,13 @@ class GarminClient:
logger.error(f"Failed to download activity {activity_id}: {e}")
return None
def download_activity_original(self, activity_id: str) -> Optional[Path]:
def download_activity_original(self, activity_id: str, force_download: bool = False, db_session: Optional[Session] = None) -> Optional[Path]:
"""Download original activity file (usually FIT format).
Args:
activity_id: Garmin activity ID
force_download: If True, bypasses database checks and forces a re-download.
db_session: Optional SQLAlchemy session to use for database operations.
Returns:
Path to downloaded file or None if download failed
@@ -178,6 +270,33 @@ class GarminClient:
if not self.authenticate():
return None
db = db_session if db_session else self.db_session
if not db:
db = SessionLocal()
close_session = True
else:
close_session = False
try:
# Check database for existing record unless force_download is True
if not force_download:
record = db.query(ActivityDownload).filter_by(activity_id=int(activity_id)).first()
if record and record.status == "success" and Path(record.file_path).exists():
current_checksum = calculate_sha256(Path(record.file_path))
if current_checksum == record.checksum_sha256:
logger.info(f"Activity {activity_id} already downloaded and verified; skipping.")
return Path(record.file_path)
else:
logger.warning(f"Checksum mismatch for activity {activity_id}. Re-downloading.")
finally:
if close_session:
db.close()
download_status = "failed"
error_message = None
http_status = None
downloaded_path = None
try:
# Create data directory if it doesn't exist
DATA_DIR.mkdir(exist_ok=True)
@@ -249,7 +368,7 @@ class GarminClient:
logger.debug(f"{method_name}(activity_id, '{fmt}') succeeded, got data type: {type(file_data).__name__}")
break
except Exception as e:
logger.debug(f"{method_name}(activity_id, '{fmt}') failed: {e} (type={type(e).__name__})")
logger.debug(f"Attempting {method_name}(activity_id, '{fmt}') failed: {e} (type={type(e).__name__})")
file_data = None
if file_data is not None:
break
@@ -265,7 +384,7 @@ class GarminClient:
logger.debug(f"{method_name}(activity_id) succeeded, got data type: {type(file_data).__name__}")
break
except Exception as e:
logger.debug(f"{method_name}(activity_id) failed: {e} (type={type(e).__name__})")
logger.debug(f"Attempting {method_name}(activity_id) failed: {e} (type={type(e).__name__})")
file_data = None
if file_data is None:
@@ -298,17 +417,30 @@ class GarminClient:
content_type = getattr(resp, "headers", {}).get("Content-Type", "")
logger.debug(f"HTTP fallback succeeded: status={status}, content-type='{content_type}', bytes={len(content)}")
file_data = content
http_status = status
break
else:
logger.debug(f"HTTP fallback GET {url} returned status={status} or empty content")
http_status = status
except Exception as e:
logger.debug(f"HTTP fallback GET {url} failed: {e} (type={type(e).__name__})")
error_message = str(e)
if file_data is None:
logger.error(
f"Failed to obtain original/FIT data for activity {activity_id}. "
f"Attempts: {attempts}"
)
upsert_activity_download(
activity_id=int(activity_id),
source="garmin-connect",
file_path=DATA_DIR / f"activity_{activity_id}.fit", # Placeholder path
file_format="fit", # Assuming fit as target format
status="failed",
http_status=http_status,
error_message=error_message or f"All download attempts failed: {attempts}",
db_session=db
)
return None
# Normalize to raw bytes if response-like object returned
@@ -326,6 +458,16 @@ class GarminClient:
if not isinstance(file_data, (bytes, bytearray)):
logger.error(f"Downloaded data for activity {activity_id} is not bytes (type={type(file_data).__name__}); aborting")
logger.debug(f"Data content: {repr(file_data)[:200]}")
upsert_activity_download(
activity_id=int(activity_id),
source="garmin-connect",
file_path=DATA_DIR / f"activity_{activity_id}.fit", # Placeholder path
file_format="fit", # Assuming fit as target format
status="failed",
http_status=http_status,
error_message=f"Downloaded data is not bytes: {type(file_data).__name__}",
db_session=db
)
return None
# Save to temporary file first
@@ -334,6 +476,9 @@ class GarminClient:
tmp_path = Path(tmp_file.name)
# Determine if the response is a ZIP archive (original) or a direct FIT file
file_format_detected = "fit" # Default to fit
extracted_path = DATA_DIR / f"activity_{activity_id}.fit" # Default path
if zipfile.is_zipfile(tmp_path):
# Extract zip file
with zipfile.ZipFile(tmp_path, 'r') as zip_ref:
@@ -343,7 +488,6 @@ class GarminClient:
if fit_files:
# Extract the first FIT file
fit_filename = fit_files[0]
extracted_path = DATA_DIR / f"activity_{activity_id}.fit"
with zip_ref.open(fit_filename) as source, open(extracted_path, 'wb') as target:
target.write(source.read())
@@ -352,27 +496,60 @@ class GarminClient:
tmp_path.unlink()
logger.info(f"Downloaded original activity file: {extracted_path}")
return extracted_path
downloaded_path = extracted_path
download_status = "success"
else:
logger.warning("No FIT file found in downloaded archive")
tmp_path.unlink()
return None
error_message = "No FIT file found in downloaded archive"
else:
# Treat data as direct FIT bytes
extracted_path = DATA_DIR / f"activity_{activity_id}.fit"
try:
tmp_path.rename(extracted_path)
downloaded_path = extracted_path
download_status = "success" # Consider copy as success if file is there
except Exception as move_err:
logger.debug(f"Rename temp FIT to destination failed ({move_err}); falling back to copy")
with open(extracted_path, 'wb') as target, open(tmp_path, 'rb') as source:
target.write(source.read())
tmp_path.unlink()
downloaded_path = extracted_path
download_status = "success" # Consider copy as success if file is there
logger.info(f"Downloaded original activity file: {extracted_path}")
return extracted_path
except Exception as e:
logger.error(f"Failed to download original activity {activity_id}: {e} (type={type(e).__name__})")
return None
error_message = str(e)
finally:
if downloaded_path:
file_size = os.path.getsize(downloaded_path)
file_checksum = calculate_sha256(downloaded_path)
upsert_activity_download(
activity_id=int(activity_id),
source="garmin-connect",
file_path=downloaded_path,
file_format=file_format_detected,
status=download_status,
http_status=http_status,
size_bytes=file_size,
checksum_sha256=file_checksum,
error_message=error_message,
db_session=db
)
else:
upsert_activity_download(
activity_id=int(activity_id),
source="garmin-connect",
file_path=DATA_DIR / f"activity_{activity_id}.fit", # Placeholder path
file_format="fit", # Assuming fit as target format
status="failed",
http_status=http_status,
error_message=error_message or "Unknown error during download",
db_session=db
)
if close_session:
db.close()
return downloaded_path
def get_activity_summary(self, activity_id: str) -> Optional[Dict[str, Any]]:
"""Get detailed activity summary.
@@ -403,6 +580,44 @@ class GarminClient:
logger.error(f"Failed to get activity summary for {activity_id}: {e}")
return None
def get_all_activities(self, limit: int = 1000) -> List[Dict[str, Any]]:
"""Get all activities from Garmin Connect.
Args:
limit: Maximum number of activities to retrieve
Returns:
List of activity dictionaries
"""
if not self.is_authenticated():
if not self.authenticate():
return []
try:
activities = []
offset = 0
batch_size = 100
while offset < limit:
batch = self.client.get_activities(offset, min(batch_size, limit - offset))
if not batch:
break
activities.extend(batch)
offset += len(batch)
# Stop if we got fewer activities than requested
if len(batch) < batch_size:
break
logger.info(f"Found {len(activities)} activities")
return activities
except Exception as e:
logger.error(f"Failed to get activities: {e}")
return []
def get_all_cycling_workouts(self, limit: int = 1000) -> List[Dict[str, Any]]:
"""Get all cycling activities from Garmin Connect.
@@ -481,15 +696,18 @@ class GarminClient:
downloaded_path.rename(file_path)
return True
return False
def download_all_workouts(self, limit: int = 50, output_dir: Path = DATA_DIR) -> List[Dict[str, Path]]:
"""Download up to 'limit' cycling workouts and save FIT files to output_dir.
def download_all_workouts(
self, limit: int = 50, output_dir: Path = DATA_DIR, force_download: bool = False
) -> List[Dict[str, Path]]:
"""Download up to 'limit' activities and save FIT files to output_dir.
Uses get_all_cycling_workouts() to list activities, then downloads each original
Uses get_all_activities() to list activities, then downloads each original
activity archive and extracts the FIT file via download_activity_original().
Args:
limit: Maximum number of cycling activities to download
limit: Maximum number of activities to download
output_dir: Directory to save downloaded FIT files
force_download: If True, bypasses database checks and forces a re-download.
Returns:
List of dicts with 'file_path' pointing to downloaded FIT paths
@@ -501,9 +719,9 @@ class GarminClient:
try:
output_dir.mkdir(parents=True, exist_ok=True)
activities = self.get_all_cycling_workouts(limit=limit)
activities = self.get_all_activities(limit=limit) # Changed from get_all_cycling_workouts
total = min(limit, len(activities))
logger.info(f"Preparing to download up to {total} cycling activities into {output_dir}")
logger.info(f"Preparing to download up to {total} activities into {output_dir}") # Changed from cycling activities
results: List[Dict[str, Path]] = []
for idx, activity in enumerate(activities[:limit], start=1):
@@ -516,25 +734,48 @@ class GarminClient:
logger.warning("Skipping activity with missing ID key (activityId/activity_id/id)")
continue
logger.debug(f"Downloading activity ID {activity_id} ({idx}/{total})")
src_path = self.download_activity_original(str(activity_id))
if src_path and src_path.exists():
dest_path = output_dir / src_path.name
try:
if src_path.resolve() != dest_path.resolve():
if dest_path.exists():
# Overwrite existing destination to keep most recent download
dest_path.unlink()
src_path.rename(dest_path)
else:
# Already in the desired location
pass
except Exception as move_err:
logger.error(f"Failed to move {src_path} to {dest_path}: {move_err}")
dest_path = src_path # fall back to original location
dest_path = output_dir / f"activity_{activity_id}.fit"
data_dir_path = DATA_DIR / f"activity_{activity_id}.fit"
logger.info(f"Saved activity {activity_id} to {dest_path}")
if dest_path.exists():
logger.info(f"Activity {activity_id} already exists in {output_dir}; skipping download.")
results.append({"file_path": dest_path})
continue
elif data_dir_path.exists():
logger.info(f"Activity {activity_id} found in {DATA_DIR}; moving to {output_dir} and skipping download.")
try:
data_dir_path.rename(dest_path)
results.append({"file_path": dest_path})
continue
except Exception as move_err:
logger.error(f"Failed to move {data_dir_path} to {dest_path}: {move_err}")
# Fall through to download if move fails
logger.debug(f"Downloading activity ID {activity_id} ({idx}/{total})")
# Add rate limiting
import time
time.sleep(1.0)
src_path = self.download_activity_original(
str(activity_id), force_download=force_download, db_session=self.db_session
)
if src_path and src_path.exists():
# Check if the downloaded file is already the desired destination
if src_path.resolve() == dest_path.resolve():
logger.info(f"Saved activity {activity_id} to {dest_path}")
results.append({"file_path": dest_path})
else:
try:
# If not, move it to the desired location
if dest_path.exists():
dest_path.unlink() # Overwrite existing destination to keep most recent download
src_path.rename(dest_path)
logger.info(f"Saved activity {activity_id} to {dest_path}")
results.append({"file_path": dest_path})
except Exception as move_err:
logger.error(f"Failed to move {src_path} to {dest_path}: {move_err}")
results.append({"file_path": src_path}) # Fall back to original location
else:
logger.warning(f"Download returned no file for activity {activity_id}")
@@ -545,7 +786,9 @@ class GarminClient:
logger.error(f"Failed during batch download: {e}")
return []
def download_latest_workout(self, output_dir: Path = DATA_DIR) -> Optional[Path]:
def download_latest_workout(
self, output_dir: Path = DATA_DIR, force_download: bool = False
) -> Optional[Path]:
"""Download the latest cycling workout and save FIT file to output_dir.
Uses get_latest_activity('cycling') to find the most recent cycling activity,
@@ -553,6 +796,7 @@ class GarminClient:
Args:
output_dir: Directory to save the downloaded FIT file
force_download: If True, bypasses database checks and forces a re-download.
Returns:
Path to the downloaded FIT file or None if download failed
@@ -578,7 +822,9 @@ class GarminClient:
return None
logger.info(f"Downloading latest cycling activity ID {activity_id}")
src_path = self.download_activity_original(str(activity_id))
src_path = self.download_activity_original(
str(activity_id), force_download=force_download, db_session=self.db_session
)
if src_path and src_path.exists():
output_dir.mkdir(parents=True, exist_ok=True)
dest_path = output_dir / src_path.name

View File

@@ -17,6 +17,10 @@ BASE_DIR = Path(__file__).parent.parent
DATA_DIR = BASE_DIR / "data"
REPORTS_DIR = BASE_DIR / "reports"
# Database settings
DB_PATH = BASE_DIR / "garmin_analyser.db"
DATABASE_URL = f"sqlite:///{DB_PATH}"
# Create directories if they don't exist
DATA_DIR.mkdir(exist_ok=True)
REPORTS_DIR.mkdir(exist_ok=True)

101
main.py
View File

@@ -133,17 +133,27 @@ def parse_args() -> argparse.Namespace:
# Download command
download_parser = subparsers.add_parser('download', help='Download activities from Garmin Connect')
download_parser.add_argument(
'--all', action='store_true', help='Download all cycling activities'
'--all', action='store_true', help='Download all activities'
)
download_parser.add_argument(
'--missing', action='store_true', help='Download only missing activities (not already downloaded)'
)
download_parser.add_argument(
'--workout-id', type=int, help='Download specific workout by ID'
)
download_parser.add_argument(
'--limit', type=int, default=50, help='Maximum number of activities to download (with --all)'
'--limit', type=int, default=50, help='Maximum number of activities to download (with --all or --missing)'
)
download_parser.add_argument(
'--output-dir', type=str, default='data', help='Directory to save downloaded files'
)
download_parser.add_argument(
'--force', action='store_true', help='Force re-download even if activity already tracked'
)
download_parser.add_argument(
'--dry-run', action='store_true', help='Show what would be downloaded without actually downloading'
)
# TODO: Add argument for --format {fit, tcx, gpx, csv, original} here in the future
# Reanalyze command
reanalyze_parser = subparsers.add_parser('reanalyze', help='Re-analyze all downloaded activities')
@@ -280,15 +290,75 @@ class GarminAnalyser:
download_output_dir = Path(getattr(args, 'output_dir', 'data'))
download_output_dir.mkdir(parents=True, exist_ok=True)
logging.debug(f"download_workouts: all={getattr(args, 'all', False)}, workout_id={getattr(args, 'workout_id', None)}, limit={getattr(args, 'limit', 50)}, output_dir={download_output_dir}")
logging.debug(f"download_workouts: all={getattr(args, 'all', False)}, missing={getattr(args, 'missing', False)}, workout_id={getattr(args, 'workout_id', None)}, limit={getattr(args, 'limit', 50)}, output_dir={download_output_dir}, dry_run={getattr(args, 'dry_run', False)}")
downloaded_activities = []
if getattr(args, 'all', False):
logging.info(f"Downloading up to {getattr(args, 'limit', 50)} cycling activities...")
downloaded_activities = client.download_all_workouts(limit=getattr(args, 'limit', 50), output_dir=download_output_dir)
elif getattr(args, 'workout_id', None):
if getattr(args, 'missing', False):
logging.info(f"Finding and downloading missing activities...")
# Get all activities from Garmin Connect
all_activities = client.get_all_activities(limit=getattr(args, "limit", 50))
# Get already downloaded activities
downloaded_ids = client.get_downloaded_activity_ids(download_output_dir)
# Find missing activities (those not in downloaded_ids)
missing_activities = [activity for activity in all_activities
if str(activity['activityId']) not in downloaded_ids]
if getattr(args, 'dry_run', False):
logging.info(f"DRY RUN: Would download {len(missing_activities)} missing activities:")
for activity in missing_activities:
activity_id = activity['activityId']
activity_name = activity.get('activityName', 'Unknown')
activity_date = activity.get('startTimeLocal', 'Unknown date')
logging.info(f" ID: {activity_id}, Name: {activity_name}, Date: {activity_date}")
return []
logging.info(f"Downloading {len(missing_activities)} missing activities...")
for activity in missing_activities:
activity_id = activity['activityId']
try:
activity_path = client.download_activity_original(
str(activity_id), force_download=getattr(args, "force", False)
)
if activity_path:
dest_path = download_output_dir / activity_path.name
try:
if activity_path.resolve() != dest_path.resolve():
if dest_path.exists():
dest_path.unlink()
activity_path.rename(dest_path)
except Exception as move_err:
logging.error(
f"Failed to move {activity_path} to {dest_path}: {move_err}"
)
dest_path = activity_path
downloaded_activities.append({"file_path": dest_path})
logging.info(f"Downloaded activity {activity_id} to {dest_path}")
except Exception as e:
logging.error(f"Error downloading activity {activity_id}: {e}")
elif getattr(args, 'all', False):
if getattr(args, 'dry_run', False):
logging.info(f"DRY RUN: Would download up to {getattr(args, 'limit', 50)} activities")
return []
logging.info(f"Downloading up to {getattr(args, 'limit', 50)} activities...")
downloaded_activities = client.download_all_workouts(
limit=getattr(args, "limit", 50),
output_dir=download_output_dir,
force_download=getattr(args, "force", False),
)
elif getattr(args, "workout_id", None):
if getattr(args, 'dry_run', False):
logging.info(f"DRY RUN: Would download workout {args.workout_id}")
return []
logging.info(f"Downloading workout {args.workout_id}...")
activity_path = client.download_activity_original(str(args.workout_id))
activity_path = client.download_activity_original(
str(args.workout_id), force_download=getattr(args, "force", False)
)
if activity_path:
dest_path = download_output_dir / activity_path.name
try:
@@ -297,12 +367,21 @@ class GarminAnalyser:
dest_path.unlink()
activity_path.rename(dest_path)
except Exception as move_err:
logging.error(f"Failed to move {activity_path} to {dest_path}: {move_err}")
logging.error(
f"Failed to move {activity_path} to {dest_path}: {move_err}"
)
dest_path = activity_path
downloaded_activities.append({'file_path': dest_path})
downloaded_activities.append({"file_path": dest_path})
else:
if getattr(args, 'dry_run', False):
logging.info("DRY RUN: Would download latest cycling activity")
return []
logging.info("Downloading latest cycling activity...")
activity_path = client.download_latest_workout(output_dir=download_output_dir)
activity_path = client.download_latest_workout(
output_dir=download_output_dir,
force_download=getattr(args, "force", False),
)
if activity_path:
downloaded_activities.append({'file_path': activity_path})

View File

@@ -1,13 +1,53 @@
alembic==1.8.1
annotated-types==0.7.0
Brotli==1.1.0
certifi==2025.10.5
cffi==2.0.0
charset-normalizer==3.4.3
contourpy==1.3.3
cssselect2==0.8.0
cycler==0.12.1
fitparse==1.2.0
fonttools==4.60.1
garminconnect==0.2.30
garth==0.5.17
greenlet==3.2.4
idna==3.10
Jinja2==3.1.6
kiwisolver==1.4.9
Mako==1.3.10
Markdown==3.9
MarkupSafe==3.0.3
matplotlib==3.10.6
narwhals==2.7.0
numpy==2.3.3
oauthlib==3.3.1
packaging==25.0
pandas==2.3.2
pillow==11.3.0
plotly==6.3.0
pycparser==2.23
pydantic==2.11.10
pydantic_core==2.33.2
pydyf==0.11.0
pyparsing==3.2.5
pyphen==0.17.2
python-dateutil==2.9.0.post0
python-dotenv==1.1.1
python_magic==0.4.27
python-magic==0.4.27
pytz==2025.2
requests==2.32.5
requests-oauthlib==2.0.0
seaborn==0.13.2
setuptools==80.9.0
six==1.17.0
SQLAlchemy==1.4.52
tinycss2==1.4.0
tinyhtml5==2.0.0
typing-inspection==0.4.2
typing_extensions==4.15.0
tzdata==2025.2
urllib3==2.5.0
weasyprint==66.0
webencodings==0.5.1
zopfli==0.2.3.post1

View File

@@ -53,5 +53,6 @@ setup(
include_package_data=True,
package_data={
"garmin_analyser": ["config/*.yaml", "visualizers/templates/*.html", "visualizers/templates/*.md"],
"alembic": ["alembic.ini", "alembic/env.py", "alembic/script.py.mako", "alembic/versions/*.py"],
},
)