added activity view

This commit is contained in:
2026-01-09 09:59:36 -08:00
parent c45e41b6a9
commit 55e37fbca8
168 changed files with 8799 additions and 2426 deletions

Binary file not shown.

View File

@@ -118,29 +118,55 @@ docker-compose up --build
backend/
├── main.py
├── src/
│ ├── models/
│ │ ├── __init__.py
│ │ ├── config.py
│ │ ├── weight_record.py
│ │ ├── activity.py
│ │ ├── health_metric.py
│ │ ├── sync_log.py
│ │ └── api_token.py
│ ├── services/
│ │ ├── __init__.py
│ │ ├── fitbit_client.py
│ │ ├── garmin_client.py
│ │ ├── postgresql_manager.py
│ │ └── sync_app.py
│ ├── api/
│ │ ├── __init__.py
│ │ ├── auth.py
│ │ ├── sync.py
│ │ ├── setup.py
│ │ ── metrics.py
│ │ ├── activities.py
│ │ ├── auth.py # Refactored from setup.py
│ │ ├── config_routes.py # Refactored from setup.py
│ │ ── logs.py
│ │ ├── metrics.py
│ │ ├── scheduling.py
│ │ ├── status.py
│ │ └── sync.py
│ ├── models/
│ │ ├── __init__.py
│ │ ├── activity.py
│ │ ├── activity_state.py
│ │ ├── api_token.py
│ │ ├── auth_status.py
│ │ ├── base.py
│ │ ├── config.py
│ │ ├── health_metric.py
│ │ ├── health_state.py
│ │ ├── job.py
│ │ ├── scheduled_job.py
│ │ ├── sync_log.py
│ │ └── weight_record.py
│ ├── routers/
│ │ ├── __init__.py
│ │ └── web.py
│ ├── services/
│ │ ├── garmin/
│ │ │ ├── auth.py
│ │ │ ├── client.py
│ │ │ └── data.py
│ │ ├── sync/
│ │ │ ├── activity.py
│ │ │ ├── health.py
│ │ │ ├── utils.py
│ │ │ └── weight.py
│ │ ├── __init__.py
│ │ ├── fitbit_client.py
│ │ ├── garth_helper.py
│ │ ├── job_manager.py
│ │ ├── postgresql_manager.py
│ │ ├── scheduler.py
│ │ └── sync_app.py
│ └── utils/
│ ├── __init__.py
── helpers.py
── config.py
│ ├── helpers.py
│ └── logging_config.py
├── templates/
│ ├── index.html
│ └── setup.html

View File

@@ -0,0 +1,46 @@
"""create_jobs_table
Revision ID: 1e157f880117
Revises: bd21a0528865
Create Date: 2026-01-03 18:45:18.109625
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '1e157f880117'
down_revision: Union[str, None] = 'bd21a0528865'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('jobs',
sa.Column('id', sa.String(), nullable=False),
sa.Column('operation', sa.String(), nullable=False),
sa.Column('status', sa.String(), nullable=False),
sa.Column('start_time', sa.DateTime(timezone=True), nullable=False),
sa.Column('end_time', sa.DateTime(timezone=True), nullable=True),
sa.Column('progress', sa.Integer(), nullable=True),
sa.Column('message', sa.Text(), nullable=True),
sa.Column('result', sa.JSON(), nullable=True),
sa.Column('cancel_requested', sa.Boolean(), nullable=True),
sa.Column('paused', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_jobs_id'), 'jobs', ['id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_jobs_id'), table_name='jobs')
op.drop_table('jobs')
# ### end Alembic commands ###

View File

@@ -0,0 +1,32 @@
"""add bike setup to activity
Revision ID: 73e349ef1d88
Revises: 95af0e911216
Create Date: 2026-01-07 13:47:24.670293
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '73e349ef1d88'
down_revision: Union[str, None] = '95af0e911216'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('activities', sa.Column('bike_setup_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'activities', 'bike_setups', ['bike_setup_id'], ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'activities', type_='foreignkey')
op.drop_column('activities', 'bike_setup_id')
# ### end Alembic commands ###

View File

@@ -0,0 +1,53 @@
"""Add state tables
Revision ID: 85c60ed462bf
Revises: b5a6d7ef97a5
Create Date: 2026-01-01 17:01:04.348349
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '85c60ed462bf'
down_revision: Union[str, None] = 'b5a6d7ef97a5'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('garmin_activity_state',
sa.Column('garmin_activity_id', sa.String(), nullable=False),
sa.Column('activity_name', sa.String(), nullable=True),
sa.Column('activity_type', sa.String(), nullable=True),
sa.Column('start_time', sa.DateTime(), nullable=True),
sa.Column('sync_status', sa.String(), nullable=True),
sa.Column('last_seen', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('garmin_activity_id')
)
op.create_index(op.f('ix_garmin_activity_state_garmin_activity_id'), 'garmin_activity_state', ['garmin_activity_id'], unique=False)
op.create_table('health_sync_state',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('date', sa.Date(), nullable=False),
sa.Column('metric_type', sa.String(), nullable=False),
sa.Column('source', sa.String(), nullable=False),
sa.Column('sync_status', sa.String(), nullable=True),
sa.Column('last_seen', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('date', 'metric_type', 'source', name='uq_health_state')
)
op.create_index(op.f('ix_health_sync_state_id'), 'health_sync_state', ['id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_health_sync_state_id'), table_name='health_sync_state')
op.drop_table('health_sync_state')
op.drop_index(op.f('ix_garmin_activity_state_garmin_activity_id'), table_name='garmin_activity_state')
op.drop_table('garmin_activity_state')
# ### end Alembic commands ###

View File

@@ -0,0 +1,41 @@
"""add bike setups table
Revision ID: 95af0e911216
Revises: 1e157f880117
Create Date: 2026-01-07 11:46:19.649500
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '95af0e911216'
down_revision: Union[str, None] = '1e157f880117'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('bike_setups',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('frame', sa.String(), nullable=False),
sa.Column('chainring', sa.Integer(), nullable=False),
sa.Column('rear_cog', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_bike_setups_id'), 'bike_setups', ['id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_bike_setups_id'), table_name='bike_setups')
op.drop_table('bike_setups')
# ### end Alembic commands ###

View File

@@ -0,0 +1,64 @@
"""expand_activity_schema_metrics
Revision ID: bd21a0528865
Revises: 85c60ed462bf
Create Date: 2026-01-01 22:53:14.358635
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'bd21a0528865'
down_revision: Union[str, None] = '85c60ed462bf'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('activities', sa.Column('distance', sa.Float(), nullable=True))
op.add_column('activities', sa.Column('calories', sa.Float(), nullable=True))
op.add_column('activities', sa.Column('avg_hr', sa.Integer(), nullable=True))
op.add_column('activities', sa.Column('max_hr', sa.Integer(), nullable=True))
op.add_column('activities', sa.Column('avg_speed', sa.Float(), nullable=True))
op.add_column('activities', sa.Column('max_speed', sa.Float(), nullable=True))
op.add_column('activities', sa.Column('elevation_gain', sa.Float(), nullable=True))
op.add_column('activities', sa.Column('elevation_loss', sa.Float(), nullable=True))
op.add_column('activities', sa.Column('avg_cadence', sa.Integer(), nullable=True))
op.add_column('activities', sa.Column('max_cadence', sa.Integer(), nullable=True))
op.add_column('activities', sa.Column('steps', sa.Integer(), nullable=True))
op.add_column('activities', sa.Column('aerobic_te', sa.Float(), nullable=True))
op.add_column('activities', sa.Column('anaerobic_te', sa.Float(), nullable=True))
op.add_column('activities', sa.Column('avg_power', sa.Integer(), nullable=True))
op.add_column('activities', sa.Column('max_power', sa.Integer(), nullable=True))
op.add_column('activities', sa.Column('norm_power', sa.Integer(), nullable=True))
op.add_column('activities', sa.Column('tss', sa.Float(), nullable=True))
op.add_column('activities', sa.Column('vo2_max', sa.Float(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('activities', 'vo2_max')
op.drop_column('activities', 'tss')
op.drop_column('activities', 'norm_power')
op.drop_column('activities', 'max_power')
op.drop_column('activities', 'avg_power')
op.drop_column('activities', 'anaerobic_te')
op.drop_column('activities', 'aerobic_te')
op.drop_column('activities', 'steps')
op.drop_column('activities', 'max_cadence')
op.drop_column('activities', 'avg_cadence')
op.drop_column('activities', 'elevation_loss')
op.drop_column('activities', 'elevation_gain')
op.drop_column('activities', 'max_speed')
op.drop_column('activities', 'avg_speed')
op.drop_column('activities', 'max_hr')
op.drop_column('activities', 'avg_hr')
op.drop_column('activities', 'calories')
op.drop_column('activities', 'distance')
# ### end Alembic commands ###

View File

@@ -18,7 +18,7 @@ async def lifespan(app: FastAPI):
alembic_cfg = Config("alembic.ini")
database_url = os.getenv("DATABASE_URL")
if database_url:
if database_url and not os.getenv("TESTING"):
alembic_cfg.set_main_option("sqlalchemy.url", database_url)
try:
command.upgrade(alembic_cfg, "head")
@@ -28,9 +28,22 @@ async def lifespan(app: FastAPI):
else:
logger.warning("DATABASE_URL not set, skipping migrations.")
# Start Scheduler
try:
from src.services.scheduler import scheduler
scheduler.start()
logger.info("Scheduler started.")
except Exception as e:
logger.error(f"Failed to start scheduler: {e}")
yield
logger.info("--- Application Shutting Down ---")
try:
from src.services.scheduler import scheduler
scheduler.stop()
except:
pass
app = FastAPI(lifespan=lifespan)
@@ -50,25 +63,27 @@ async def log_requests(request: Request, call_next):
app.mount("/static", StaticFiles(directory="../static"), name="static")
templates = Jinja2Templates(directory="templates")
from src.api import status, sync, setup, logs, metrics, activities
from src.api import status, sync, auth, logs, metrics, activities, scheduling, config_routes
app.include_router(status.router, prefix="/api")
app.include_router(sync.router, prefix="/api")
app.include_router(setup.router, prefix="/api")
app.include_router(auth.router, prefix="/api")
app.include_router(config_routes.router, prefix="/api")
app.include_router(logs.router, prefix="/api")
app.include_router(metrics.router, prefix="/api")
app.include_router(activities.router, prefix="/api")
app.include_router(activities.router, prefix="/api")
app.include_router(scheduling.router, prefix="/api")
from src.api import bike_setups
app.include_router(bike_setups.router)
@app.get("/")
async def read_root(request: Request):
return templates.TemplateResponse("index.html", {"request": request})
@app.get("/activities")
async def activities_page(request: Request):
return templates.TemplateResponse("activities.html", {"request": request})
from src.routers import web
app.include_router(web.router)
@app.get("/setup")
async def setup_page(request: Request):
return templates.TemplateResponse("setup.html", {"request": request})

View File

@@ -14,4 +14,4 @@ httpx==0.25.2
aiofiles==23.2.1
pytest==7.4.3
pytest-asyncio==0.21.1
alembic==1.13.1
alembic==1.13.1

View File

@@ -1,4 +1,4 @@
from fastapi import APIRouter, Query, Response, HTTPException, Depends
from fastapi import APIRouter, Query, Response, HTTPException, Depends, BackgroundTasks
from pydantic import BaseModel
from typing import List, Optional, Dict, Any
from sqlalchemy import func
@@ -8,6 +8,15 @@ from ..services.postgresql_manager import PostgreSQLManager
from sqlalchemy.orm import Session
from ..utils.config import config
# New Sync Imports
from ..services.job_manager import job_manager
from ..models.activity_state import GarminActivityState
import fitdecode
import io
import xml.etree.ElementTree as ET
from datetime import datetime
router = APIRouter()
logger = logging.getLogger(__name__)
@@ -17,6 +26,13 @@ def get_db():
with db_manager.get_db_session() as session:
yield session
class BikeSetupInfo(BaseModel):
id: int
frame: str
chainring: int
rear_cog: int
name: Optional[str] = None
class ActivityResponse(BaseModel):
id: Optional[int] = None
garmin_activity_id: Optional[str] = None
@@ -28,6 +44,28 @@ class ActivityResponse(BaseModel):
file_type: Optional[str] = None
download_status: Optional[str] = None
downloaded_at: Optional[str] = None
bike_setup: Optional[BikeSetupInfo] = None
class ActivityDetailResponse(ActivityResponse):
distance: Optional[float] = None
calories: Optional[float] = None
avg_hr: Optional[int] = None
max_hr: Optional[int] = None
avg_speed: Optional[float] = None
max_speed: Optional[float] = None
elevation_gain: Optional[float] = None
elevation_loss: Optional[float] = None
avg_cadence: Optional[int] = None
max_cadence: Optional[int] = None
steps: Optional[int] = None
aerobic_te: Optional[float] = None
anaerobic_te: Optional[float] = None
avg_power: Optional[int] = None
max_power: Optional[int] = None
norm_power: Optional[int] = None
tss: Optional[float] = None
vo2_max: Optional[float] = None
@router.get("/activities/list", response_model=List[ActivityResponse])
async def list_activities(
@@ -36,28 +74,60 @@ async def list_activities(
db: Session = Depends(get_db)
):
"""
Return metadata for all downloaded/available activities.
Return metadata for all scanned activities, indicating download status.
"""
try:
logger.info(f"Listing activities with limit={limit}, offset={offset}")
# Query the database for activities
activities = db.query(Activity).offset(offset).limit(limit).all()
# Query GarminActivityState (all known activities)
# Left join with Activity to get file status
results = (
db.query(GarminActivityState, Activity)
.outerjoin(Activity, GarminActivityState.garmin_activity_id == Activity.garmin_activity_id)
.order_by(GarminActivityState.start_time.desc())
.offset(offset)
.limit(limit)
.all()
)
# Convert SQLAlchemy objects to Pydantic models
activity_responses = []
for activity in activities:
for state, activity in results:
# Determine logic
# If activity exists in 'Activity' table, use its details?
# Or prefer GarminActivityState metadata?
# State metadata is from scan (Garth). Activity is from file parse (db import).
# Usually Activity data is richer IF downloaded.
is_downloaded = (
activity is not None and
activity.download_status == 'downloaded' and
activity.file_content is not None
)
download_status = 'downloaded' if is_downloaded else 'pending'
# Or use state.sync_status? state.sync_status is 'new', 'synced'.
# 'synced' usually means downloaded.
# Construct response
activity_responses.append(
ActivityResponse(
id=activity.id,
garmin_activity_id=activity.garmin_activity_id,
activity_name=activity.activity_name,
activity_type=activity.activity_type,
start_time=activity.start_time.isoformat() if activity.start_time else None,
duration=activity.duration,
file_type=activity.file_type,
download_status=activity.download_status,
downloaded_at=activity.downloaded_at.isoformat() if activity.downloaded_at else None
id=activity.id if activity else None,
garmin_activity_id=state.garmin_activity_id,
activity_name=state.activity_name,
activity_type=state.activity_type,
start_time=state.start_time.isoformat() if state.start_time else None,
duration=activity.duration if activity else None, # Duration might only be in file parse? Or scan could get it? Scan currently doesn't fetch duration.
file_type=activity.file_type if activity else None,
download_status=download_status,
downloaded_at=activity.downloaded_at.isoformat() if (activity and activity.downloaded_at) else None,
bike_setup=BikeSetupInfo(
id=activity.bike_setup.id,
frame=activity.bike_setup.frame,
chainring=activity.bike_setup.chainring,
rear_cog=activity.bike_setup.rear_cog,
name=activity.bike_setup.name
) if (activity and activity.bike_setup) else None
)
)
@@ -117,7 +187,14 @@ async def query_activities(
duration=activity.duration,
file_type=activity.file_type,
download_status=activity.download_status,
downloaded_at=activity.downloaded_at.isoformat() if activity.downloaded_at else None
downloaded_at=activity.downloaded_at.isoformat() if activity.downloaded_at else None,
bike_setup=BikeSetupInfo(
id=activity.bike_setup.id,
frame=activity.bike_setup.frame,
chainring=activity.bike_setup.chainring,
rear_cog=activity.bike_setup.rear_cog,
name=activity.bike_setup.name
) if activity.bike_setup else None
)
)
@@ -172,6 +249,78 @@ async def download_activity(activity_id: str, db: Session = Depends(get_db)):
except Exception as e:
raise HTTPException(status_code=500, detail=f"Error downloading activity: {str(e)}")
@router.get("/activities/{activity_id}/details", response_model=ActivityDetailResponse)
async def get_activity_details(activity_id: str, db: Session = Depends(get_db)):
"""
Get full details for a specific activity.
"""
try:
activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first()
if not activity:
raise HTTPException(status_code=404, detail="Activity not found")
# Fallback: Extraction from file if DB fields are missing
overrides = {}
if activity.file_content and (activity.distance is None or activity.elevation_gain is None or activity.avg_hr is None):
try:
if activity.file_type == 'fit':
overrides = _extract_summary_from_fit(activity.file_content)
elif activity.file_type == 'tcx':
# overrides = _extract_summary_from_tcx(activity.file_content) # Optional TODO
pass
except Exception as e:
logger.warning(f"Failed to extract summary from file: {e}")
# Helper to merge DB value or Override
def val(attr, key):
v = getattr(activity, attr)
if v is not None: return v
return overrides.get(key)
return ActivityDetailResponse(
id=activity.id,
garmin_activity_id=activity.garmin_activity_id,
activity_name=activity.activity_name,
activity_type=activity.activity_type,
start_time=activity.start_time.isoformat() if activity.start_time else None,
duration=val('duration', 'total_timer_time'),
file_type=activity.file_type,
download_status=activity.download_status,
downloaded_at=activity.downloaded_at.isoformat() if activity.downloaded_at else None,
# Extended metrics
distance=val('distance', 'total_distance'),
calories=val('calories', 'total_calories'),
avg_hr=val('avg_hr', 'avg_heart_rate'),
max_hr=val('max_hr', 'max_heart_rate'),
avg_speed=val('avg_speed', 'enhanced_avg_speed'), # fallback to avg_speed handled in extractor
max_speed=val('max_speed', 'enhanced_max_speed'),
elevation_gain=val('elevation_gain', 'total_ascent'),
elevation_loss=val('elevation_loss', 'total_descent'),
avg_cadence=val('avg_cadence', 'avg_cadence'),
max_cadence=val('max_cadence', 'max_cadence'),
steps=activity.steps, # No session step count usually
aerobic_te=val('aerobic_te', 'total_training_effect'),
anaerobic_te=val('anaerobic_te', 'total_anaerobic_training_effect'),
avg_power=val('avg_power', 'avg_power'),
max_power=val('max_power', 'max_power'),
norm_power=val('norm_power', 'normalized_power'),
tss=val('tss', 'training_stress_score'),
vo2_max=activity.vo2_max, # Usually not in simple session msg directly but maybe
bike_setup=BikeSetupInfo(
id=activity.bike_setup.id,
frame=activity.bike_setup.frame,
chainring=activity.bike_setup.chainring,
rear_cog=activity.bike_setup.rear_cog,
name=activity.bike_setup.name
) if activity.bike_setup else None
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting activity details: {e}")
raise HTTPException(status_code=500, detail=str(e))
# Import necessary auth dependencies
from ..models.api_token import APIToken
import garth
@@ -238,4 +387,419 @@ async def redownload_activity_endpoint(activity_id: str, db: Session = Depends(g
raise
except Exception as e:
logger.error(f"Error in redownload_activity_endpoint: {e}")
raise HTTPException(status_code=500, detail=f"Error processing redownload: {str(e)}")
raise HTTPException(status_code=500, detail=f"Error processing redownload: {str(e)}")
# New Sync Endpoints
def run_scan_job(job_id: str, days_back: int, db_session_factory):
"""Background task wrapper for scan"""
try:
from ..services.garmin.client import GarminClient
from ..services.sync_app import SyncApp
except Exception as e:
logger.error(f"Import error in background job: {e}")
job_manager.fail_job(job_id, f"Import error: {str(e)}")
return
try:
with db_session_factory() as db:
garmin_client = GarminClient()
sync_app = SyncApp(db, garmin_client)
job_manager.update_job(job_id, status="running", progress=0)
sync_app.scan_activities(days_back=days_back)
job_manager.complete_job(job_id)
except Exception as e:
logger.error(f"Scan job failed: {e}")
job_manager.fail_job(job_id, str(e))
def run_sync_job(job_id: str, limit: int, db_session_factory):
"""Background task wrapper for sync pending"""
try:
from ..services.garmin.client import GarminClient
from ..services.sync_app import SyncApp
except Exception as e:
logger.error(f"Import error in background job: {e}")
job_manager.fail_job(job_id, f"Import error: {str(e)}")
return
with db_session_factory() as db:
try:
garmin_client = GarminClient()
sync_app = SyncApp(db, garmin_client)
# sync_pending_activities handles job updates
sync_app.sync_pending_activities(limit=limit, job_id=job_id)
except Exception as e:
logger.error(f"Sync job failed: {e}")
job_manager.fail_job(job_id, str(e))
@router.post("/activities/sync/scan")
async def scan_activities_trigger(
background_tasks: BackgroundTasks,
days_back: int = Query(30, description="Number of days to scan back for new activities")
):
"""Trigger background scan of metadata"""
job_id = job_manager.create_job("scan_activities")
# We need a new session for the background task
db_manager = PostgreSQLManager(config.DATABASE_URL)
# Use context manager in wrapper
background_tasks.add_task(run_scan_job, job_id, days_back, db_manager.get_db_session)
return {"job_id": job_id, "status": "started"}
@router.post("/activities/sync/pending")
async def sync_pending_trigger(
background_tasks: BackgroundTasks,
limit: Optional[int] = Query(None, description="Limit number of activities to sync")
):
"""Trigger background sync of pending activities"""
job_id = job_manager.create_job("sync_pending_activities")
db_manager = PostgreSQLManager(config.DATABASE_URL)
background_tasks.add_task(run_sync_job, job_id, limit, db_manager.get_db_session)
return {"job_id": job_id, "status": "started"}
@router.get("/activities/sync/status")
async def get_sync_status_summary(db: Session = Depends(get_db)):
"""Get counts of activities by sync status"""
try:
from sqlalchemy import func
stats = db.query(
GarminActivityState.sync_status,
func.count(GarminActivityState.garmin_activity_id)
).group_by(GarminActivityState.sync_status).all()
return {s[0]: s[1] for s in stats}
except Exception as e:
logger.error(f"Error getting sync status: {e}")
return {}
def _extract_points_from_fit(file_content: bytes) -> List[List[float]]:
"""
Extract [lon, lat] points from a FIT file content.
Returns a list of [lon, lat].
"""
points = []
try:
with io.BytesIO(file_content) as f:
with fitdecode.FitReader(f) as fit:
for frame in fit:
if frame.frame_type == fitdecode.FIT_FRAME_DATA and frame.name == 'record':
# Check for position_lat and position_long
# Garmin stores lat/long as semicircles. Convert to degrees: semicircle * (180 / 2^31)
if frame.has_field('position_lat') and frame.has_field('position_long'):
lat_sc = frame.get_value('position_lat')
lon_sc = frame.get_value('position_long')
if lat_sc is not None and lon_sc is not None:
lat = lat_sc * (180.0 / 2**31)
lon = lon_sc * (180.0 / 2**31)
points.append([lon, lat])
except Exception as e:
logger.error(f"Error parsing FIT file: {e}")
# Return what we have or empty
return points
def _extract_points_from_tcx(file_content: bytes) -> List[List[float]]:
"""
Extract [lon, lat] points from a TCX file content.
"""
points = []
try:
# TCX is XML
# Namespace usually exists
root = ET.fromstring(file_content)
# Namespaces are annoying in ElementTree, usually {http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2}
# We can just iterate and ignore namespace or handle it.
# Let's try ignoring namespace by using local-name() in xpath if lxml, but this is stdlib ET.
# Just strip namespace for simplicity
for trkpt in root.iter():
if trkpt.tag.endswith('Trackpoint'):
lat = None
lon = None
for child in trkpt.iter():
if child.tag.endswith('LatitudeDegrees'):
try: lat = float(child.text)
except: pass
elif child.tag.endswith('LongitudeDegrees'):
try: lon = float(child.text)
except: pass
if lat is not None and lon is not None:
points.append([lon, lat])
except Exception as e:
logger.error(f"Error parsing TCX file: {e}")
return points
@router.get("/activities/{activity_id}/geojson")
async def get_activity_geojson(activity_id: str, db: Session = Depends(get_db)):
"""
Return GeoJSON LineString for the activity track.
"""
try:
activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first()
if not activity or not activity.file_content:
raise HTTPException(status_code=404, detail="Activity or file content not found")
points = []
if activity.file_type == 'fit':
points = _extract_points_from_fit(activity.file_content)
elif activity.file_type == 'tcx':
points = _extract_points_from_tcx(activity.file_content)
else:
# Try FIT or TCX anyway?
# Default to FIT check headers?
# For now just log warning
logger.warning(f"Unsupported file type for map: {activity.file_type}")
if not points:
return {"type": "FeatureCollection", "features": []}
return {
"type": "FeatureCollection",
"features": [{
"type": "Feature",
"properties": {
"color": "red"
},
"geometry": {
"type": "LineString",
"coordinates": points
}
}]
}
except Exception as e:
logger.error(f"Error generating GeoJSON: {e}")
raise HTTPException(status_code=500, detail=str(e))
def _extract_streams_from_fit(file_content: bytes) -> Dict[str, List[Any]]:
streams = {
"time": [],
"heart_rate": [],
"power": [],
"altitude": [],
"speed": [],
"cadence": []
}
try:
start_time = None
with io.BytesIO(file_content) as f:
with fitdecode.FitReader(f) as fit:
for frame in fit:
if frame.frame_type == fitdecode.FIT_FRAME_DATA and frame.name == 'record':
timestamp = frame.get_value('timestamp')
if not start_time and timestamp:
start_time = timestamp
if timestamp and start_time:
# Relative time in seconds
t = (timestamp - start_time).total_seconds()
# Helper to safely get value with fallback
def get_val(frame, keys):
for k in keys:
if frame.has_field(k):
return frame.get_value(k)
return None
streams["time"].append(t)
streams["heart_rate"].append(get_val(frame, ['heart_rate']))
streams["power"].append(get_val(frame, ['power']))
streams["altitude"].append(get_val(frame, ['enhanced_altitude', 'altitude']))
streams["speed"].append(get_val(frame, ['enhanced_speed', 'speed'])) # m/s (enhanced is also m/s)
streams["cadence"].append(get_val(frame, ['cadence']))
except Exception as e:
logger.error(f"Error extracting streams from FIT: {e}")
return streams
def _extract_summary_from_fit(file_content: bytes) -> Dict[str, Any]:
summary = {}
try:
with io.BytesIO(file_content) as f:
with fitdecode.FitReader(f) as fit:
for frame in fit:
if frame.frame_type == fitdecode.FIT_FRAME_DATA and frame.name == 'session':
# Prefer enhanced fields
def get(keys):
for k in keys:
if frame.has_field(k): return frame.get_value(k)
return None
summary['total_distance'] = get(['total_distance'])
summary['total_timer_time'] = get(['total_timer_time', 'total_elapsed_time'])
summary['total_calories'] = get(['total_calories'])
summary['avg_heart_rate'] = get(['avg_heart_rate'])
summary['max_heart_rate'] = get(['max_heart_rate'])
summary['avg_cadence'] = get(['avg_cadence'])
summary['max_cadence'] = get(['max_cadence'])
summary['avg_power'] = get(['avg_power'])
summary['max_power'] = get(['max_power'])
summary['total_ascent'] = get(['total_ascent'])
summary['total_descent'] = get(['total_descent'])
summary['enhanced_avg_speed'] = get(['enhanced_avg_speed', 'avg_speed'])
summary['enhanced_max_speed'] = get(['enhanced_max_speed', 'max_speed'])
summary['normalized_power'] = get(['normalized_power'])
summary['training_stress_score'] = get(['training_stress_score'])
summary['total_training_effect'] = get(['total_training_effect'])
summary['total_anaerobic_training_effect'] = get(['total_anaerobic_training_effect'])
# Stop after first session message (usually only one per file, or first is summary)
# Actually FIT can have multiple sessions (multipsport). We'll take the first for now.
break
except Exception as e:
logger.error(f"Error extraction summary from FIT: {e}")
return summary
def _extract_streams_from_tcx(file_content: bytes) -> Dict[str, List[Any]]:
streams = {
"time": [],
"heart_rate": [],
"power": [],
"altitude": [],
"speed": [],
"cadence": []
}
try:
root = ET.fromstring(file_content)
# Namespace strip hack
start_time = None
for trkpt in root.iter():
if trkpt.tag.endswith('Trackpoint'):
timestamp_str = None
hr = None
pwr = None
alt = None
cad = None
spd = None
for child in trkpt.iter():
if child.tag.endswith('Time'):
timestamp_str = child.text
elif child.tag.endswith('AltitudeMeters'):
try: alt = float(child.text)
except: pass
elif child.tag.endswith('HeartRateBpm'):
for val in child:
if val.tag.endswith('Value'):
try: hr = int(val.text)
except: pass
elif child.tag.endswith('Cadence'): # Standard TCX cadence
try: cad = int(child.text)
except: pass
elif child.tag.endswith('Extensions'):
# TPX extensions for speed/power
for ext in child.iter():
if ext.tag.endswith('Speed'):
try: spd = float(ext.text)
except: pass
elif ext.tag.endswith('Watts'):
try: pwr = int(ext.text)
except: pass
if timestamp_str:
try:
# TCX time format is ISO8601 usually
ts = datetime.fromisoformat(timestamp_str.replace('Z', '+00:00'))
if not start_time:
start_time = ts
streams["time"].append((ts - start_time).total_seconds())
streams["heart_rate"].append(hr)
streams["power"].append(pwr)
streams["altitude"].append(alt)
streams["speed"].append(spd)
streams["cadence"].append(cad)
except: pass
except Exception as e:
logger.error(f"Error extracting streams from TCX: {e}")
return streams
@router.get("/activities/{activity_id}/streams")
async def get_activity_streams(activity_id: str, db: Session = Depends(get_db)):
"""
Return time series data for charts.
"""
try:
activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first()
if not activity or not activity.file_content:
raise HTTPException(status_code=404, detail="Activity or file content not found")
streams = {}
if activity.file_type == 'fit':
streams = _extract_streams_from_fit(activity.file_content)
elif activity.file_type == 'tcx':
streams = _extract_streams_from_tcx(activity.file_content)
else:
logger.warning(f"Unsupported file type for streams: {activity.file_type}")
return streams
except Exception as e:
logger.error(f"Error getting streams: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/activities/{activity_id}/navigation")
async def get_activity_navigation(activity_id: str, db: Session = Depends(get_db)):
"""
Return next/prev activity IDs.
"""
try:
current = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first()
if not current:
raise HTTPException(status_code=404, detail="Activity not found")
# Global Prev (Older)
prev_act = (
db.query(Activity)
.filter(Activity.start_time < current.start_time)
.order_by(Activity.start_time.desc())
.first()
)
# Global Next (Newer)
next_act = (
db.query(Activity)
.filter(Activity.start_time > current.start_time)
.order_by(Activity.start_time.asc())
.first()
)
# Same Type Prev
prev_type_act = (
db.query(Activity)
.filter(Activity.start_time < current.start_time)
.filter(Activity.activity_type == current.activity_type)
.order_by(Activity.start_time.desc())
.first()
)
# Same Type Next
next_type_act = (
db.query(Activity)
.filter(Activity.start_time > current.start_time)
.filter(Activity.activity_type == current.activity_type)
.order_by(Activity.start_time.asc())
.first()
)
return {
"prev_id": prev_act.garmin_activity_id if prev_act else None,
"next_id": next_act.garmin_activity_id if next_act else None,
"prev_type_id": prev_type_act.garmin_activity_id if prev_type_act else None,
"next_type_id": next_type_act.garmin_activity_id if next_type_act else None
}
except Exception as e:
logger.error(f"Error getting navigation: {e}")
raise HTTPException(status_code=500, detail=str(e))

View File

@@ -5,14 +5,17 @@ from typing import Optional
from sqlalchemy.orm import Session
import logging
import traceback
import requests
import base64
import json
from datetime import datetime, timedelta
from ..services.garmin.client import GarminClient
from ..services.fitbit_client import FitbitClient
from ..services.postgresql_manager import PostgreSQLManager
from ..utils.config import config
from ..models.api_token import APIToken
from ..models.config import Configuration
from garth.exc import GarthException
import garth
router = APIRouter()
logger = logging.getLogger(__name__)
@@ -39,11 +42,6 @@ class FitbitCallback(BaseModel):
class GarminMFARequest(BaseModel):
verification_code: str
from datetime import datetime, timedelta
from ..models.api_token import APIToken
from ..models.config import Configuration
import json
class GarminAuthStatus(BaseModel):
token_stored: bool
authenticated: bool
@@ -80,8 +78,8 @@ def get_auth_status(db: Session = Depends(get_db)):
authenticated=has_oauth1 and has_oauth2,
garth_oauth1_token_exists=has_oauth1,
garth_oauth2_token_exists=has_oauth2,
mfa_state_exists=False, # We don't store persistent MFA state in DB other than tokens
last_used=garmin_token.expires_at, # Using expires_at as proxy or null
mfa_state_exists=False,
last_used=garmin_token.expires_at,
updated_at=garmin_token.updated_at
)
else:
@@ -96,7 +94,7 @@ def get_auth_status(db: Session = Depends(get_db)):
if fitbit_token:
response.fitbit = FitbitAuthStatus(
authenticated=True,
client_id="Stored", # We don't store client_id in APIToken explicitly but could parse from file if needed
client_id="Stored",
token_expires_at=fitbit_token.expires_at,
last_login=fitbit_token.updated_at
)
@@ -119,7 +117,6 @@ def clear_garmin_credentials(db: Session = Depends(get_db)):
@router.post("/setup/garmin")
def save_garmin_credentials(credentials: GarminCredentials, db: Session = Depends(get_db)):
# Re-acquire logger to ensure correct config after startup
logger = logging.getLogger(__name__)
logger.info(f"Received Garmin credentials for user: {credentials.username}")
@@ -129,7 +126,7 @@ def save_garmin_credentials(credentials: GarminCredentials, db: Session = Depend
status = garmin_client.login(db)
if status == "mfa_required":
return JSONResponse(status_code=202, content={"status": "mfa_required", "message": "MFA code required.", "session_id": "session"}) # Added dummy session_id for frontend compat
return JSONResponse(status_code=202, content={"status": "mfa_required", "message": "MFA code required.", "session_id": "session"})
elif status == "error":
logger.error("Garmin login returned 'error' status.")
raise HTTPException(status_code=401, detail="Login failed. Check username/password.")
@@ -146,11 +143,6 @@ def complete_garmin_mfa(mfa_request: GarminMFARequest, db: Session = Depends(get
logger.info(f"Received MFA verification code: {'*' * len(mfa_request.verification_code)}")
try:
# We need to reuse the client that was just used for login.
# In a real clustered app this would need shared state (Redis).
# For this single-instance app, we rely on Global Garth state or re-instantiation logic.
# But wait, handle_mfa logic in auth.py was loading from file/global.
# Let's ensure we are instantiating correctly.
garmin_client = GarminClient()
success = garmin_client.handle_mfa(db, mfa_request.verification_code)
@@ -161,8 +153,6 @@ def complete_garmin_mfa(mfa_request: GarminMFARequest, db: Session = Depends(get
except Exception as e:
logger.error(f"MFA verification failed with exception: {e}", exc_info=True)
print("DEBUG: MFA verification failed. Traceback below:", flush=True)
traceback.print_exc()
raise HTTPException(status_code=500, detail=f"MFA verification failed: {str(e)}")
@router.post("/setup/garmin/test-token")
@@ -177,180 +167,43 @@ def test_garmin_token(db: Session = Depends(get_db)):
logger.warning("Test Token: No 'garmin' token record found in database.")
return JSONResponse(status_code=400, content={"status": "error", "message": "No valid tokens found. Please login first."})
logger.debug(f"Test Token: Token record found. ID: {token.id}, Updated: {token.updated_at}")
if not token.garth_oauth1_token:
logger.warning("Test Token: garth_oauth1_token is empty or None.")
return JSONResponse(status_code=400, content={"status": "error", "message": "No valid tokens found. Please login first."})
logger.debug(f"Test Token: OAuth1 Token length: {len(token.garth_oauth1_token)}")
logger.debug(f"Test Token: OAuth2 Token length: {len(token.garth_oauth2_token) if token.garth_oauth2_token else 'None'}")
import garth
# Manually load tokens into garth global state
try:
oauth1_data = json.loads(token.garth_oauth1_token) if token.garth_oauth1_token else None
oauth2_data = json.loads(token.garth_oauth2_token) if token.garth_oauth2_token else None
if not isinstance(oauth1_data, dict) or not isinstance(oauth2_data, dict):
logger.error(f"Test Token: Parsed tokens are not dictionaries. OAuth1: {type(oauth1_data)}, OAuth2: {type(oauth2_data)}")
return JSONResponse(status_code=500, content={"status": "error", "message": "Stored tokens are invalid (not dictionaries)."})
logger.debug(f"Test Token: Parsed tokens. OAuth1 keys: {list(oauth1_data.keys())}, OAuth2 keys: {list(oauth2_data.keys())}")
# Instantiate objects using the garth classes
from garth.auth_tokens import OAuth1Token, OAuth2Token
garth.client.oauth1_token = OAuth1Token(**oauth1_data)
garth.client.oauth2_token = OAuth2Token(**oauth2_data)
logger.debug("Test Token: Tokens loaded into garth.client.")
except json.JSONDecodeError as e:
logger.error(f"Test Token: Failed to decode JSON tokens: {e}")
return JSONResponse(status_code=500, content={"status": "error", "message": "Stored tokens are corrupted."})
except Exception as e:
logger.error(f"Test Token: Failed to decode/load tokens: {e}")
return JSONResponse(status_code=500, content={"status": "error", "message": "Stored tokens are invalid."})
# Now test connection
try:
logger.debug(f"Test Token: garth.client type: {type(garth.client)}")
logger.debug("Test Token: Attempting to fetch UserProfile...")
# Using direct connectapi call as it was proven to work in debug script
# and avoids potential issues with UserProfile.get default args in this context
profile = garth.client.connectapi("/userprofile-service/socialProfile")
# success = True
display_name = profile.get('fullName') or profile.get('displayName')
logger.info(f"Test Token: Success! Connected as {display_name}")
return {"status": "success", "message": f"Token valid! Connected as: {display_name}"}
except GarthException as e:
logger.warning(f"Test Token: GarthException during profile fetch: {e}")
logger.warning(f"Test Token: GarthException: {e}")
return JSONResponse(status_code=401, content={"status": "error", "message": "Token expired or invalid."})
except Exception as e:
# Capture missing token errors that might be wrapped
logger.warning(f"Test Token: Exception during profile fetch: {e}")
logger.warning(f"Test Token: Exception: {e}")
if "OAuth1 token is required" in str(e):
return JSONResponse(status_code=400, content={"status": "error", "message": "No valid tokens found. Please login first."})
return JSONResponse(status_code=400, content={"status": "error", "message": "No valid tokens found."})
return JSONResponse(status_code=500, content={"status": "error", "message": f"Connection test failed: {str(e)}"})
except Exception as e:
logger.error(f"Test token failed with unexpected error: {e}", exc_info=True)
return JSONResponse(status_code=500, content={"status": "error", "message": str(e)})
@router.post("/setup/load-consul-config")
def load_consul_config(db: Session = Depends(get_db)):
logger = logging.getLogger(__name__)
logger.info("Attempting to load configuration from Consul...")
try:
# User defined Consul URL
consul_host = "consul.service.dc1.consul"
consul_port = "8500"
app_prefix = "fitbit-garmin-sync/"
consul_url = f"http://{consul_host}:{consul_port}/v1/kv/{app_prefix}?recurse=true"
logger.debug(f"Connecting to Consul at: {consul_url}")
response = requests.get(consul_url, timeout=5)
if response.status_code == 404:
logger.warning(f"No configuration found in Consul under '{app_prefix}'")
raise HTTPException(status_code=404, detail="No configuration found in Consul")
response.raise_for_status()
data = response.json()
config_map = {}
# Helper to decode Consul values
def decode_consul_value(val):
if not val: return None
try:
return base64.b64decode(val).decode('utf-8')
except Exception as e:
logger.warning(f"Failed to decode value: {e}")
return None
# Pass 1: Load all raw keys
for item in data:
key = item['Key'].replace(app_prefix, '')
value = decode_consul_value(item.get('Value'))
if value:
config_map[key] = value
# Pass 2: Check for special 'config' key (JSON blob)
# The user URL ended in /config/edit, suggesting a single config file pattern
if 'config' in config_map:
try:
json_config = json.loads(config_map['config'])
logger.debug("Found 'config' key with JSON content, merging...")
# Merge JSON config, preferring explicit keys if collision (or vice versa? Let's say JSON overrides)
config_map.update(json_config)
except json.JSONDecodeError:
logger.warning("'config' key found but is not valid JSON, ignoring as blob.")
logger.debug(f"Resolved configuration keys: {list(config_map.keys())}")
# Look for standard keys
username = config_map.get('garmin_username') or config_map.get('USERNAME')
password = config_map.get('garmin_password') or config_map.get('PASSWORD')
is_china = str(config_map.get('is_china', 'false')).lower() == 'true'
# If missing, try nested 'garmin' object (common in config.json structure)
if not username and isinstance(config_map.get('garmin'), dict):
logger.debug("Found nested 'garmin' config object.")
garmin_conf = config_map['garmin']
username = garmin_conf.get('username')
password = garmin_conf.get('password')
if 'is_china' in garmin_conf:
is_china = str(garmin_conf.get('is_china')).lower() == 'true'
if not username or not password:
logger.error("Consul config resolved but missing 'garmin_username' or 'garmin_password'")
raise HTTPException(status_code=400, detail="Consul config missing credentials")
# Extract Fitbit credentials
fitbit_client_id = config_map.get('fitbit_client_id')
fitbit_client_secret = config_map.get('fitbit_client_secret')
fitbit_redirect_uri = config_map.get('fitbit_redirect_uri')
if isinstance(config_map.get('fitbit'), dict):
logger.debug("Found nested 'fitbit' config object.")
fitbit_conf = config_map['fitbit']
fitbit_client_id = fitbit_conf.get('client_id')
fitbit_client_secret = fitbit_conf.get('client_secret')
logger.info("Consul config loaded successfully. Returning to frontend.")
return {
"status": "success",
"message": "Configuration loaded from Consul",
"garmin": {
"username": username,
"password": password,
"is_china": is_china
},
"fitbit": {
"client_id": fitbit_client_id,
"client_secret": fitbit_client_secret,
"redirect_uri": fitbit_redirect_uri
}
}
except requests.exceptions.RequestException as e:
logger.error(f"Failed to connect to Consul: {e}")
raise HTTPException(status_code=502, detail=f"Failed to connect to Consul: {str(e)}")
except HTTPException:
raise
except Exception as e:
logger.error(f"Error loading from Consul: {e}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Internal error loading config: {str(e)}")
@router.post("/setup/fitbit")
def save_fitbit_credentials(credentials: FitbitCredentials, db: Session = Depends(get_db)):
"""
Saves Fitbit credentials to the Configuration table and returns the authorization URL.
"""
logger = logging.getLogger(__name__)
logger.info("Received Fitbit credentials to save.")
try:
# Check if config exists
config_entry = db.query(Configuration).first()
if not config_entry:
config_entry = Configuration()
@@ -361,54 +214,27 @@ def save_fitbit_credentials(credentials: FitbitCredentials, db: Session = Depend
config_entry.fitbit_redirect_uri = credentials.redirect_uri
db.commit()
# Generate Auth URL
redirect_uri = credentials.redirect_uri
if not redirect_uri:
redirect_uri = None
redirect_uri = credentials.redirect_uri or None
fitbit_client = FitbitClient(credentials.client_id, credentials.client_secret, redirect_uri=redirect_uri)
auth_url = fitbit_client.get_authorization_url(redirect_uri)
return {
"status": "success",
"message": "Credentials saved.",
"auth_url": auth_url
}
return {"status": "success", "message": "Credentials saved.", "auth_url": auth_url}
except Exception as e:
logger.error(f"Error saving Fitbit credentials: {e}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to save credentials: {str(e)}")
@router.post("/setup/fitbit/callback")
def fitbit_callback(callback_data: FitbitCallback, db: Session = Depends(get_db)):
"""
Exchanges the authorization code for tokens and saves them.
"""
logger = logging.getLogger(__name__)
logger.info("Received Fitbit callback code.")
try:
# Retrieve credentials
config_entry = db.query(Configuration).first()
if not config_entry or not config_entry.fitbit_client_id or not config_entry.fitbit_client_secret:
raise HTTPException(status_code=400, detail="Configuration not found or missing Fitbit credentials. Please save them first.")
if not config_entry or not config_entry.fitbit_client_id:
raise HTTPException(status_code=400, detail="Configuration missing Fitbit credentials.")
client_id = config_entry.fitbit_client_id
client_secret = config_entry.fitbit_client_secret
# Must match the one used in get_authorization_url
redirect_uri = config_entry.fitbit_redirect_uri
if not redirect_uri:
redirect_uri = None
fitbit_client = FitbitClient(client_id, client_secret, redirect_uri=redirect_uri)
redirect_uri = config_entry.fitbit_redirect_uri or None
fitbit_client = FitbitClient(config_entry.fitbit_client_id, config_entry.fitbit_client_secret, redirect_uri=redirect_uri)
token_data = fitbit_client.exchange_code_for_token(callback_data.code, redirect_uri)
# Save to APIToken
# Check if exists
token_entry = db.query(APIToken).filter_by(token_type='fitbit').first()
if not token_entry:
token_entry = APIToken(token_type='fitbit')
@@ -416,62 +242,36 @@ def fitbit_callback(callback_data: FitbitCallback, db: Session = Depends(get_db)
token_entry.access_token = token_data.get('access_token')
token_entry.refresh_token = token_data.get('refresh_token')
if token_data.get('expires_in'):
token_entry.expires_at = datetime.now() + timedelta(seconds=token_data.get('expires_in'))
# Handle expires_in (seconds) -> expires_at (datetime)
expires_in = token_data.get('expires_in')
if expires_in:
token_entry.expires_at = datetime.now() + timedelta(seconds=expires_in)
# Save other metadata if available (user_id, scope)
if 'scope' in token_data:
token_entry.scopes = str(token_data['scope']) # JSON or string list
db.commit()
return {
"status": "success",
"message": "Fitbit authentication successful. Tokens saved.",
"user_id": token_data.get('user_id')
}
return {"status": "success", "message": "Fitbit authentication successful.", "user_id": token_data.get('user_id')}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error in Fitbit callback: {e}", exc_info=True)
# Often oauth errors are concise, return detail
raise HTTPException(status_code=500, detail=f"Authentication failed: {str(e)}")
@router.post("/setup/fitbit/test-token")
def test_fitbit_token(db: Session = Depends(get_db)):
"""Tests if the stored Fitbit token is valid by fetching user profile."""
logger = logging.getLogger(__name__)
logger.info("Received request to test Fitbit token.")
try:
# Retrieve tokens and credentials
token = db.query(APIToken).filter_by(token_type='fitbit').first()
config_entry = db.query(Configuration).first()
if not token or not token.access_token:
return JSONResponse(status_code=400, content={"status": "error", "message": "No Fitbit token found. Please authenticate first."})
return JSONResponse(status_code=400, content={"status": "error", "message": "No Fitbit token found."})
if not config_entry or not config_entry.fitbit_client_id or not config_entry.fitbit_client_secret:
return JSONResponse(status_code=400, content={"status": "error", "message": "Fitbit credentials missing."})
# Instantiate client with tokens
# Note: fitbit library handles token refresh automatically if refresh_token is provided and valid
fitbit_client = FitbitClient(
config_entry.fitbit_client_id,
config_entry.fitbit_client_secret,
access_token=token.access_token,
refresh_token=token.refresh_token,
redirect_uri=config_entry.fitbit_redirect_uri # Optional but good practice
redirect_uri=config_entry.fitbit_redirect_uri
)
# Test call
if not fitbit_client.fitbit:
return JSONResponse(status_code=500, content={"status": "error", "message": "Failed to initialize Fitbit client."})
profile = fitbit_client.fitbit.user_profile_get()
user = profile.get('user', {})
display_name = user.get('displayName') or user.get('fullName')
@@ -479,13 +279,9 @@ def test_fitbit_token(db: Session = Depends(get_db)):
return {
"status": "success",
"message": f"Token valid! Connected as: {display_name}",
"user": {
"displayName": display_name,
"avatar": user.get('avatar')
}
"user": {"displayName": display_name, "avatar": user.get('avatar')}
}
except Exception as e:
logger.error(f"Test Fitbit token failed: {e}", exc_info=True)
# Check for specific token errors if possible, but generic catch is okay for now
return JSONResponse(status_code=401, content={"status": "error", "message": f"Token invalid or expired: {str(e)}"})

View File

@@ -0,0 +1,110 @@
from fastapi import APIRouter, Depends, HTTPException, status
from sqlalchemy.orm import Session
from pydantic import BaseModel
from typing import List, Optional
from datetime import datetime
import logging
from ..models.bike_setup import BikeSetup
from ..models.base import Base
from ..services.postgresql_manager import PostgreSQLManager
from ..utils.config import config
logger = logging.getLogger(__name__)
# Reusing get_db logic (it should ideally be in a shared common module, but for now reproducing it to avoid circular imports or refactoring)
def get_db():
db_manager = PostgreSQLManager(config.DATABASE_URL)
with db_manager.get_db_session() as session:
yield session
class BikeSetupCreate(BaseModel):
frame: str
chainring: int
rear_cog: int
name: Optional[str] = None
class BikeSetupUpdate(BaseModel):
frame: Optional[str] = None
chainring: Optional[int] = None
rear_cog: Optional[int] = None
name: Optional[str] = None
class BikeSetupRead(BaseModel):
id: int
frame: str
chainring: int
rear_cog: int
name: Optional[str] = None
created_at: Optional[datetime]
updated_at: Optional[datetime]
class Config:
from_attributes = True
router = APIRouter(prefix="/api/bike-setups", tags=["bike-setups"])
@router.get("/", response_model=List[BikeSetupRead])
def get_bike_setups(db: Session = Depends(get_db)):
"""List all bike setups."""
return db.query(BikeSetup).all()
@router.post("/", response_model=BikeSetupRead, status_code=status.HTTP_201_CREATED)
def create_bike_setup(setup: BikeSetupCreate, db: Session = Depends(get_db)):
"""Create a new bike setup."""
new_setup = BikeSetup(
frame=setup.frame,
chainring=setup.chainring,
rear_cog=setup.rear_cog,
name=setup.name
)
db.add(new_setup)
db.commit()
db.refresh(new_setup)
return new_setup
@router.get("/{setup_id}", response_model=BikeSetupRead)
def get_bike_setup(setup_id: int, db: Session = Depends(get_db)):
"""Get a specific bike setup."""
# Assuming BikeSetup is imported correctly
setup = db.query(BikeSetup).filter(BikeSetup.id == setup_id).first()
if not setup:
raise HTTPException(status_code=404, detail="Bike setup not found")
return setup
@router.put("/{setup_id}", response_model=BikeSetupRead)
def update_bike_setup(setup_id: int, setup_data: BikeSetupUpdate, db: Session = Depends(get_db)):
"""Update a bike setup."""
setup = db.query(BikeSetup).filter(BikeSetup.id == setup_id).first()
if not setup:
raise HTTPException(status_code=404, detail="Bike setup not found")
if setup_data.frame is not None:
setup.frame = setup_data.frame
if setup_data.chainring is not None:
setup.chainring = setup_data.chainring
if setup_data.rear_cog is not None:
setup.rear_cog = setup_data.rear_cog
if setup_data.name is not None:
setup.name = setup_data.name
db.commit()
db.refresh(setup)
return setup
@router.delete("/{setup_id}", status_code=status.HTTP_204_NO_CONTENT)
def delete_bike_setup(setup_id: int, db: Session = Depends(get_db)):
"""Delete a bike setup."""
setup = db.query(BikeSetup).filter(BikeSetup.id == setup_id).first()
if not setup:
raise HTTPException(status_code=404, detail="Bike setup not found")
db.delete(setup)
db.commit()
@router.post("/match-all", status_code=status.HTTP_200_OK)
def trigger_matching(db: Session = Depends(get_db)):
"""Trigger bike matching for all applicable activities."""
from ..services.bike_matching import run_matching_for_all
run_matching_for_all(db)
return {"status": "success", "message": "Matching process completed."}

View File

@@ -0,0 +1,121 @@
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
import logging
import requests
import base64
import json
from ..services.postgresql_manager import PostgreSQLManager
from ..utils.config import config
router = APIRouter()
logger = logging.getLogger(__name__)
def get_db():
db_manager = PostgreSQLManager(config.DATABASE_URL)
with db_manager.get_db_session() as session:
yield session
@router.post("/setup/load-consul-config")
def load_consul_config(db: Session = Depends(get_db)):
logger = logging.getLogger(__name__)
logger.info("Attempting to load configuration from Consul...")
try:
# User defined Consul URL
consul_host = "consul.service.dc1.consul"
consul_port = "8500"
app_prefix = "fitbit-garmin-sync/"
consul_url = f"http://{consul_host}:{consul_port}/v1/kv/{app_prefix}?recurse=true"
logger.debug(f"Connecting to Consul at: {consul_url}")
response = requests.get(consul_url, timeout=5)
if response.status_code == 404:
logger.warning(f"No configuration found in Consul under '{app_prefix}'")
raise HTTPException(status_code=404, detail="No configuration found in Consul")
response.raise_for_status()
data = response.json()
config_map = {}
# Helper to decode Consul values
def decode_consul_value(val):
if not val: return None
try:
return base64.b64decode(val).decode('utf-8')
except Exception as e:
logger.warning(f"Failed to decode value: {e}")
return None
# Pass 1: Load all raw keys
for item in data:
key = item['Key'].replace(app_prefix, '')
value = decode_consul_value(item.get('Value'))
if value:
config_map[key] = value
# Pass 2: Check for special 'config' key (JSON blob)
if 'config' in config_map:
try:
json_config = json.loads(config_map['config'])
logger.debug("Found 'config' key with JSON content, merging...")
config_map.update(json_config)
except json.JSONDecodeError:
logger.warning("'config' key found but is not valid JSON, ignoring as blob.")
logger.debug(f"Resolved configuration keys: {list(config_map.keys())}")
# Look for standard keys
username = config_map.get('garmin_username') or config_map.get('USERNAME')
password = config_map.get('garmin_password') or config_map.get('PASSWORD')
is_china = str(config_map.get('is_china', 'false')).lower() == 'true'
if not username and isinstance(config_map.get('garmin'), dict):
logger.debug("Found nested 'garmin' config object.")
garmin_conf = config_map['garmin']
username = garmin_conf.get('username')
password = garmin_conf.get('password')
if 'is_china' in garmin_conf:
is_china = str(garmin_conf.get('is_china')).lower() == 'true'
if not username or not password:
logger.error("Consul config resolved but missing 'garmin_username' or 'garmin_password'")
raise HTTPException(status_code=400, detail="Consul config missing credentials")
# Extract Fitbit credentials
fitbit_client_id = config_map.get('fitbit_client_id')
fitbit_client_secret = config_map.get('fitbit_client_secret')
fitbit_redirect_uri = config_map.get('fitbit_redirect_uri')
if isinstance(config_map.get('fitbit'), dict):
logger.debug("Found nested 'fitbit' config object.")
fitbit_conf = config_map['fitbit']
fitbit_client_id = fitbit_conf.get('client_id')
fitbit_client_secret = fitbit_conf.get('client_secret')
logger.info("Consul config loaded successfully. Returning to frontend.")
return {
"status": "success",
"message": "Configuration loaded from Consul",
"garmin": {
"username": username,
"password": password,
"is_china": is_china
},
"fitbit": {
"client_id": fitbit_client_id,
"client_secret": fitbit_client_secret,
"redirect_uri": fitbit_redirect_uri
}
}
except requests.exceptions.RequestException as e:
logger.error(f"Failed to connect to Consul: {e}")
raise HTTPException(status_code=502, detail=f"Failed to connect to Consul: {str(e)}")
except HTTPException:
raise
except Exception as e:
logger.error(f"Error loading from Consul: {e}", exc_info=True)
raise HTTPException(status_code=500, detail=f"Internal error loading config: {str(e)}")

View File

@@ -1,9 +1,11 @@
from fastapi import APIRouter, Query, HTTPException, Depends
from fastapi import APIRouter, Query, HTTPException, Depends, BackgroundTasks
from pydantic import BaseModel
from typing import List, Optional, Dict, Any
from sqlalchemy import func
from ..models.health_metric import HealthMetric
from ..models.weight_record import WeightRecord
import logging
import json
from ..services.postgresql_manager import PostgreSQLManager
from sqlalchemy.orm import Session
from ..utils.config import config
@@ -79,21 +81,62 @@ async def query_metrics(
metric_type: Optional[str] = Query(None),
start_date: Optional[str] = Query(None),
end_date: Optional[str] = Query(None),
limit: int = Query(100, ge=1, le=1000),
source: Optional[str] = Query(None),
limit: int = Query(100, ge=1, le=10000),
db: Session = Depends(get_db)
):
"""
Query health metrics with filters.
"""
try:
logger.info(f"Querying metrics - type: {metric_type}, start: {start_date}, end: {end_date}, limit: {limit}")
logger.info(f"Querying metrics - type: {metric_type}, source: {source}, start: {start_date}, end: {end_date}, limit: {limit}")
# Start building the query
# Special handling for Fitbit Weight queries -> Use WeightRecord table
if source == 'fitbit' and metric_type == 'weight':
query = db.query(WeightRecord)
if start_date:
from datetime import datetime
start_dt = datetime.fromisoformat(start_date)
query = query.filter(WeightRecord.date >= start_dt)
if end_date:
from datetime import datetime
end_dt = datetime.fromisoformat(end_date)
query = query.filter(WeightRecord.date <= end_dt)
query = query.order_by(WeightRecord.date.desc())
query = query.limit(limit)
weight_records = query.all()
metric_responses = []
for wr in weight_records:
metric_responses.append(
HealthMetricResponse(
id=wr.id,
metric_type='weight',
metric_value=wr.weight,
unit=wr.unit,
timestamp=wr.timestamp.isoformat() if wr.timestamp else "",
date=wr.date.isoformat() if wr.date else "",
source='fitbit',
detailed_data={'fitbit_id': wr.fitbit_id, 'bmi': wr.bmi}
)
)
logger.info(f"Returning {len(metric_responses)} Fitbit weight records from WeightRecord table")
return metric_responses
# Default: Start building the query on HealthMetric
query = db.query(HealthMetric)
# Apply filters based on parameters
if metric_type:
query = query.filter(HealthMetric.metric_type == metric_type)
if source:
query = query.filter(HealthMetric.source == source)
if start_date:
from datetime import datetime
@@ -105,6 +148,9 @@ async def query_metrics(
end_dt = datetime.fromisoformat(end_date)
query = query.filter(HealthMetric.date <= end_dt.date())
# Sort by Date Descending
query = query.order_by(HealthMetric.date.desc())
# Apply limit
query = query.limit(limit)
@@ -123,7 +169,7 @@ async def query_metrics(
timestamp=metric.timestamp.isoformat() if metric.timestamp else "",
date=metric.date.isoformat() if metric.date else "",
source=metric.source,
detailed_data=metric.detailed_data
detailed_data=json.loads(metric.detailed_data) if metric.detailed_data else None
)
)
@@ -133,6 +179,24 @@ async def query_metrics(
logger.error(f"Error in query_metrics: {str(e)}")
raise HTTPException(status_code=500, detail=f"Error querying metrics: {str(e)}")
# run_fitbit_sync_job moved to tasks.definitions
# ...
@router.post("/metrics/sync/fitbit")
async def sync_fitbit_trigger(
background_tasks: BackgroundTasks,
days_back: int = Query(30, description="Number of days to sync back")
):
"""Trigger background sync of Fitbit metrics"""
job_id = job_manager.create_job("sync_fitbit_metrics")
db_manager = PostgreSQLManager(config.DATABASE_URL)
background_tasks.add_task(run_fitbit_sync_job, job_id, days_back, db_manager.get_db_session)
return {"job_id": job_id, "status": "started"}
@router.get("/health-data/summary", response_model=HealthDataSummary)
async def get_health_summary(
start_date: Optional[str] = Query(None),
@@ -220,9 +284,57 @@ async def get_health_summary(
total_sleep_hours=round(total_sleep_hours, 2),
avg_calories=round(avg_calories, 2)
)
logger.info(f"Returning health summary: steps={total_steps}, avg_hr={avg_heart_rate}, sleep_hours={total_sleep_hours}, avg_calories={avg_calories}")
return summary
except Exception as e:
logger.error(f"Error in get_health_summary: {str(e)}")
raise HTTPException(status_code=500, detail=f"Error getting health summary: {str(e)}")
raise HTTPException(status_code=500, detail=f"Error getting health summary: {str(e)}")
# New Sync Endpoints
from ..services.job_manager import job_manager
from ..models.health_state import HealthSyncState
from ..utils.config import config
from ..services.postgresql_manager import PostgreSQLManager
from ..tasks.definitions import run_health_scan_job, run_health_sync_job, run_fitbit_sync_job
# Removed inline run_health_scan_job and run_health_sync_job
# Definitions moved to tasks/definitions.py
@router.post("/metrics/sync/scan")
async def scan_health_trigger(background_tasks: BackgroundTasks):
"""Trigger background scan of health gaps"""
job_id = job_manager.create_job("scan_health_metrics")
db_manager = PostgreSQLManager(config.DATABASE_URL)
background_tasks.add_task(run_health_scan_job, job_id, db_manager.get_db_session)
return {"job_id": job_id, "status": "started"}
@router.post("/metrics/sync/pending")
async def sync_pending_health_trigger(
background_tasks: BackgroundTasks,
limit: Optional[int] = Query(None, description="Limit number of days/metrics to sync")
):
"""Trigger background sync of pending health metrics"""
job_id = job_manager.create_job("sync_pending_health_metrics")
db_manager = PostgreSQLManager(config.DATABASE_URL)
background_tasks.add_task(run_health_sync_job, job_id, limit, db_manager.get_db_session)
return {"job_id": job_id, "status": "started"}
@router.get("/metrics/sync/status")
async def get_health_sync_status_summary(db: Session = Depends(get_db)):
"""Get counts of health metrics by sync status"""
try:
stats = db.query(
HealthSyncState.sync_status,
func.count(HealthSyncState.id)
).group_by(HealthSyncState.sync_status).all()
return {s[0]: s[1] for s in stats}
except Exception as e:
logger.error(f"Error getting health sync status: {e}")
return {}

View File

@@ -0,0 +1,131 @@
from fastapi import APIRouter, HTTPException, Depends
from pydantic import BaseModel
from sqlalchemy.orm import Session
from typing import List, Optional
from datetime import datetime, timedelta
import json
import logging
from ..models.scheduled_job import ScheduledJob
from ..services.postgresql_manager import PostgreSQLManager
from ..utils.config import config
from ..services.scheduler import scheduler
router = APIRouter()
logger = logging.getLogger(__name__)
def get_db():
db_manager = PostgreSQLManager(config.DATABASE_URL)
with db_manager.get_db_session() as session:
yield session
class ScheduledJobResponse(BaseModel):
id: int
job_type: str
name: str
interval_minutes: int
enabled: bool
last_run: Optional[datetime]
next_run: Optional[datetime]
params: Optional[str]
class Config:
from_attributes = True
class JobUpdateRequest(BaseModel):
interval_minutes: Optional[int] = None
enabled: Optional[bool] = None
params: Optional[dict] = None
@router.get("/scheduling/jobs", response_model=List[ScheduledJobResponse])
def list_scheduled_jobs(db: Session = Depends(get_db)):
"""List all scheduled jobs."""
jobs = db.query(ScheduledJob).order_by(ScheduledJob.id).all()
return jobs
@router.put("/scheduling/jobs/{job_id}", response_model=ScheduledJobResponse)
def update_scheduled_job(job_id: int, request: JobUpdateRequest, db: Session = Depends(get_db)):
"""Update a scheduled job's interval or enabled status."""
job = db.query(ScheduledJob).filter(ScheduledJob.id == job_id).first()
if not job:
raise HTTPException(status_code=404, detail="Job not found")
if request.interval_minutes is not None:
if request.interval_minutes < 1:
raise HTTPException(status_code=400, detail="Interval must be at least 1 minute")
job.interval_minutes = request.interval_minutes
# If enabled, update next_run based on new interval if it's far in future?
# Actually, standard behavior: next_run should be recalculated from last_run + new interval
# OR just leave it. If we shorten it, we might want it to run sooner.
# Let's recalculate next_run if it exists.
if job.last_run:
job.next_run = job.last_run + timedelta(minutes=job.interval_minutes)
else:
# If never run, next_run should be Now if enabled?
# Or keep existing next_run?
# If next_run is null and enabled, scheduler picks it up immediately.
pass
if request.enabled is not None:
job.enabled = request.enabled
if job.enabled and job.next_run is None:
# If re-enabling and no next run, set to now
job.next_run = datetime.now()
if request.params is not None:
job.params = json.dumps(request.params)
db.commit()
db.refresh(job)
return job
class JobCreateRequest(BaseModel):
job_type: str
name: str
interval_minutes: int
params: Optional[dict] = {}
enabled: Optional[bool] = True
@router.post("/scheduling/jobs", response_model=ScheduledJobResponse)
def create_scheduled_job(request: JobCreateRequest, db: Session = Depends(get_db)):
"""Create a new scheduled job."""
# Validate job_type
from ..services.scheduler import scheduler
if request.job_type not in scheduler.TASK_MAP:
raise HTTPException(status_code=400, detail=f"Invalid job_type. Must be one of: {list(scheduler.TASK_MAP.keys())}")
new_job = ScheduledJob(
job_type=request.job_type,
name=request.name,
interval_minutes=request.interval_minutes,
params=json.dumps(request.params) if request.params else "{}",
enabled=request.enabled,
next_run=datetime.now() if request.enabled else None
)
try:
db.add(new_job)
db.commit()
db.refresh(new_job)
return new_job
except Exception as e:
db.rollback()
logger.error(f"Failed to create job: {e}")
# Check for unique constraint on job_type if we enforced it?
# The model has job_type unique=True. This might be a problem if we want multiple of same type?
# User wants "new scheduled tasks" with "variables" -> implies multiple of same type (e.g. sync fitbit 10 days vs 30 days).
# We need to remove unique=True from ScheduledJob model if it exists!
raise HTTPException(status_code=400, detail=f"Failed to create job: {str(e)}")
@router.delete("/scheduling/jobs/{job_id}", status_code=204)
def delete_scheduled_job(job_id: int, db: Session = Depends(get_db)):
"""Delete a scheduled job."""
job = db.query(ScheduledJob).filter(ScheduledJob.id == job_id).first()
if not job:
raise HTTPException(status_code=404, detail="Job not found")
db.delete(job)
db.commit()
return None

View File

@@ -1,4 +1,4 @@
from fastapi import APIRouter, Depends
from fastapi import APIRouter, Depends, BackgroundTasks
from pydantic import BaseModel
from typing import List, Optional, Dict, Any
from sqlalchemy.orm import Session
@@ -6,6 +6,7 @@ from ..services.postgresql_manager import PostgreSQLManager
from ..utils.config import config
from ..models.activity import Activity
from ..models.sync_log import SyncLog
from ..services.job_manager import job_manager
from datetime import datetime
import json
@@ -30,11 +31,28 @@ class SyncLogResponse(BaseModel):
class Config:
from_attributes = True
class Config:
from_attributes = True
class JobStatusResponse(BaseModel):
id: str
operation: str
status: str
message: Optional[str] = None
start_time: datetime
progress: int = 0
cancel_requested: bool = False
paused: bool = False
completed_at: Optional[datetime] = None
duration_s: Optional[float] = None
result: Optional[Dict[str, Any]] = None
class StatusResponse(BaseModel):
total_activities: int
downloaded_activities: int
recent_logs: List[SyncLogResponse]
last_sync_stats: Optional[List[Dict[str, Any]]] = None
active_jobs: List[Dict[str, Any]] = []
@router.get("/status", response_model=StatusResponse)
def get_status(db: Session = Depends(get_db)):
@@ -79,5 +97,65 @@ def get_status(db: Session = Depends(get_db)):
total_activities=total_activities,
downloaded_activities=downloaded_activities,
recent_logs=recent_logs,
last_sync_stats=last_sync_stats if last_sync_stats else []
)
last_sync_stats=last_sync_stats if last_sync_stats else [],
active_jobs=job_manager.get_active_jobs()
)
@router.get("/jobs/history", response_model=Dict[str, Any])
def get_job_history(page: int = 1, limit: int = 10):
"""Get history of completed jobs with pagination."""
if page < 1: page = 1
offset = (page - 1) * limit
return job_manager.get_job_history(limit=limit, offset=offset)
@router.post("/jobs/{job_id}/pause")
def pause_job(job_id: str):
if job_manager.request_pause(job_id):
return {"status": "paused", "message": f"Pause requested for job {job_id}"}
raise HTTPException(status_code=404, detail="Job not found or cannot be paused")
@router.post("/jobs/{job_id}/resume")
def resume_job(job_id: str):
if job_manager.resume_job(job_id):
return {"status": "resumed", "message": f"Job {job_id} resumed"}
raise HTTPException(status_code=404, detail="Job not found or cannot be resumed")
@router.post("/jobs/{job_id}/cancel")
def cancel_job(job_id: str):
if job_manager.request_cancel(job_id):
return {"status": "cancelling", "message": f"Cancellation requested for job {job_id}"}
raise HTTPException(status_code=404, detail="Job not found")
import time
def run_test_job(job_id: str):
"""Simulate a long running job with pause support."""
try:
total_steps = 20
i = 0
while i < total_steps:
if job_manager.should_cancel(job_id):
job_manager.update_job(job_id, status="cancelled", message="Cancelled by user")
return
if job_manager.should_pause(job_id):
time.sleep(1)
continue # Skip progress update
# Normal work
progress = int(((i + 1) / total_steps) * 100)
job_manager.update_job(job_id, status="running", progress=progress, message=f"Processing... {i+1}/{total_steps}")
time.sleep(1)
i += 1
job_manager.complete_job(job_id)
except Exception as e:
job_manager.fail_job(job_id, str(e))
@router.post("/status/test-job")
def trigger_test_job(background_tasks: BackgroundTasks):
"""Trigger a test job for queue verification."""
job_id = job_manager.create_job("Test Job (5s)")
# Use run_serialized to enforce global lock
background_tasks.add_task(job_manager.run_serialized, job_id, run_test_job)
return {"job_id": job_id, "status": "started", "message": "Test job started"}

View File

@@ -1,4 +1,4 @@
from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks
from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks, Query
from pydantic import BaseModel
from typing import Optional, List, Dict, Any
from datetime import datetime, timedelta
@@ -15,6 +15,7 @@ import garth
import time
from garth.auth_tokens import OAuth1Token, OAuth2Token
from ..services.fitbit_client import FitbitClient
from fitbit import exceptions
from ..models.weight_record import WeightRecord
from ..models.config import Configuration
from enum import Enum
@@ -28,11 +29,21 @@ class SyncActivityRequest(BaseModel):
class SyncMetricsRequest(BaseModel):
days_back: int = 30
class UploadWeightRequest(BaseModel):
limit: int = 50
class SyncResponse(BaseModel):
status: str
message: str
job_id: Optional[str] = None
class WeightComparisonResponse(BaseModel):
fitbit_total: int
garmin_total: int
missing_in_garmin: int
missing_dates: List[str]
message: str
class FitbitSyncScope(str, Enum):
LAST_30_DAYS = "30d"
ALL_HISTORY = "all"
@@ -53,66 +64,27 @@ def get_db():
with db_manager.get_db_session() as session:
yield session
def _load_and_verify_garth_session(db: Session):
"""Helper to load token from DB and verify session with Garmin."""
logger.info("Loading and verifying Garmin session...")
token_record = db.query(APIToken).filter_by(token_type='garmin').first()
if not (token_record and token_record.garth_oauth1_token and token_record.garth_oauth2_token):
raise HTTPException(status_code=401, detail="Garmin token not found.")
try:
oauth1_dict = json.loads(token_record.garth_oauth1_token)
oauth2_dict = json.loads(token_record.garth_oauth2_token)
domain = oauth1_dict.get('domain')
if domain:
garth.configure(domain=domain)
garth.client.oauth1_token = OAuth1Token(**oauth1_dict)
garth.client.oauth2_token = OAuth2Token(**oauth2_dict)
garth.UserProfile.get()
logger.info("Garth session verified.")
except Exception as e:
logger.error(f"Garth session verification failed: {e}", exc_info=True)
raise HTTPException(status_code=401, detail=f"Failed to authenticate with Garmin: {e}")
def run_activity_sync_task(job_id: str, days_back: int):
logger.info(f"Starting background activity sync task {job_id}")
db_manager = PostgreSQLManager(config.DATABASE_URL)
with db_manager.get_db_session() as session:
try:
_load_and_verify_garth_session(session)
garmin_client = GarminClient()
sync_app = SyncApp(db_session=session, garmin_client=garmin_client)
sync_app.sync_activities(days_back=days_back, job_id=job_id)
except Exception as e:
logger.error(f"Background task failed: {e}")
job_manager.update_job(job_id, status="failed", message=str(e))
def run_metrics_sync_task(job_id: str, days_back: int):
logger.info(f"Starting background metrics sync task {job_id}")
db_manager = PostgreSQLManager(config.DATABASE_URL)
with db_manager.get_db_session() as session:
try:
_load_and_verify_garth_session(session)
garmin_client = GarminClient()
sync_app = SyncApp(db_session=session, garmin_client=garmin_client)
sync_app.sync_health_metrics(days_back=days_back, job_id=job_id)
except Exception as e:
logger.error(f"Background task failed: {e}")
job_manager.update_job(job_id, status="failed", message=str(e))
from ..services.garth_helper import load_and_verify_garth_session
from ..tasks.definitions import (
run_activity_sync_task,
run_metrics_sync_task,
run_health_scan_job,
run_fitbit_sync_job,
run_garmin_upload_job,
run_health_sync_job
)
@router.post("/sync/activities", response_model=SyncResponse)
def sync_activities(request: SyncActivityRequest, background_tasks: BackgroundTasks, db: Session = Depends(get_db)):
# Verify auth first before starting task
try:
_load_and_verify_garth_session(db)
load_and_verify_garth_session(db)
except Exception as e:
raise HTTPException(status_code=401, detail=f"Garmin auth failed: {str(e)}")
job_id = job_manager.create_job("Activity Sync")
background_tasks.add_task(run_activity_sync_task, job_id, request.days_back)
db_manager = PostgreSQLManager(config.DATABASE_URL)
background_tasks.add_task(run_activity_sync_task, job_id, request.days_back, db_manager.get_db_session)
return SyncResponse(
status="started",
@@ -123,12 +95,13 @@ def sync_activities(request: SyncActivityRequest, background_tasks: BackgroundTa
@router.post("/sync/metrics", response_model=SyncResponse)
def sync_metrics(request: SyncMetricsRequest, background_tasks: BackgroundTasks, db: Session = Depends(get_db)):
try:
_load_and_verify_garth_session(db)
load_and_verify_garth_session(db)
except Exception as e:
raise HTTPException(status_code=401, detail=f"Garmin auth failed: {str(e)}")
job_id = job_manager.create_job("Health Metrics Sync")
background_tasks.add_task(run_metrics_sync_task, job_id, request.days_back)
db_manager = PostgreSQLManager(config.DATABASE_URL)
background_tasks.add_task(run_metrics_sync_task, job_id, request.days_back, db_manager.get_db_session)
return SyncResponse(
status="started",
@@ -136,6 +109,22 @@ def sync_metrics(request: SyncMetricsRequest, background_tasks: BackgroundTasks,
job_id=job_id
)
@router.post("/metrics/sync/scan", response_model=SyncResponse)
async def scan_health_trigger(
background_tasks: BackgroundTasks,
days_back: int = Query(30, description="Number of days to scan back")
):
"""Trigger background scan of health gaps"""
job_id = job_manager.create_job("scan_health_metrics")
db_manager = PostgreSQLManager(config.DATABASE_URL)
background_tasks.add_task(run_health_scan_job, job_id, days_back, db_manager.get_db_session)
return SyncResponse(
status="started",
message="Health metrics scan started in background",
job_id=job_id
)
@router.post("/sync/fitbit/weight", response_model=SyncResponse)
def sync_fitbit_weight(request: WeightSyncRequest, db: Session = Depends(get_db)):
# Keep functionality for now, ideally also background
@@ -161,13 +150,37 @@ def sync_fitbit_weight_impl(request: WeightSyncRequest, db: Session):
raise HTTPException(status_code=400, detail="Fitbit credentials missing.")
# 2. Init Client
# Define callback to save new token
def refresh_cb(token_dict):
logger.info("Fitbit token refreshed via callback")
try:
# Re-query to avoid stale object errors if session closed?
# We have 'db' session from argument.
# We can use it.
# Convert token_dict to model fields
# The token_dict from fitbit library usually has access_token, refresh_token, expires_in/at
# token is the APIToken object from line 197. Use it if attached, or query.
# It's better to query by ID or token_type again to be safe?
# Or just use the 'token' variable if it's still attached to session.
token.access_token = token_dict.get('access_token')
token.refresh_token = token_dict.get('refresh_token')
token.expires_at = datetime.fromtimestamp(token_dict.get('expires_at')) if token_dict.get('expires_at') else None
# scopes?
db.commit()
logger.info("New Fitbit token saved to DB")
except Exception as e:
logger.error(f"Failed to save refreshed token: {e}")
try:
fitbit_client = FitbitClient(
config_entry.fitbit_client_id,
config_entry.fitbit_client_secret,
access_token=token.access_token,
refresh_token=token.refresh_token,
redirect_uri=config_entry.fitbit_redirect_uri
redirect_uri=config_entry.fitbit_redirect_uri,
refresh_cb=refresh_cb
)
except Exception as e:
logger.error(f"Failed to initialize Fitbit client: {e}")
@@ -245,6 +258,7 @@ def sync_fitbit_weight_impl(request: WeightSyncRequest, db: Session):
# Structure: {'bmi': 23.5, 'date': '2023-01-01', 'logId': 12345, 'time': '23:59:59', 'weight': 70.5, 'source': 'API'}
fitbit_id = str(log.get('logId'))
weight_val = log.get('weight')
bmi_val = log.get('bmi')
date_str = log.get('date')
time_str = log.get('time')
@@ -252,11 +266,15 @@ def sync_fitbit_weight_impl(request: WeightSyncRequest, db: Session):
dt_str = f"{date_str} {time_str}"
timestamp = datetime.strptime(dt_str, '%Y-%m-%d %H:%M:%S')
# Check exist
# Check exist
existing = db.query(WeightRecord).filter_by(fitbit_id=fitbit_id).first()
if existing:
if abs(existing.weight - weight_val) > 0.01: # Check for update
# Check for update (weight changed or BMI missing)
if abs(existing.weight - weight_val) > 0.01 or existing.bmi is None:
existing.weight = weight_val
existing.bmi = bmi_val
existing.unit = 'kg' # Force unit update too
existing.date = timestamp
existing.timestamp = timestamp
existing.sync_status = 'unsynced' # Mark for Garmin sync if we implement that direction
@@ -265,6 +283,7 @@ def sync_fitbit_weight_impl(request: WeightSyncRequest, db: Session):
new_record = WeightRecord(
fitbit_id=fitbit_id,
weight=weight_val,
bmi=bmi_val,
unit='kg',
date=timestamp,
timestamp=timestamp,
@@ -291,11 +310,7 @@ def sync_fitbit_weight_impl(request: WeightSyncRequest, db: Session):
job_id=f"fitbit-weight-sync-{datetime.now().strftime('%Y%m%d%H%M%S')}"
)
class WeightComparisonResponse(BaseModel):
fitbit_total: int
garmin_total: int
missing_in_garmin: int
message: str
@router.post("/sync/compare-weight", response_model=WeightComparisonResponse)
def compare_weight_records(db: Session = Depends(get_db)):
@@ -318,15 +333,24 @@ def compare_weight_records(db: Session = Depends(get_db)):
garmin_date_set = {d[0].date() for d in garmin_dates if d[0]}
# 3. Compare
missing_dates = fitbit_date_set - garmin_date_set
missing_dates_set = fitbit_date_set - garmin_date_set
missing_dates_list = sorted([d.isoformat() for d in missing_dates_set], reverse=True)
return WeightComparisonResponse(
fitbit_total=len(fitbit_date_set),
garmin_total=len(garmin_date_set),
missing_in_garmin=len(missing_dates),
message=f"Comparison Complete. Fitbit has {len(fitbit_date_set)} unique days, Garmin has {len(garmin_date_set)}. {len(missing_dates)} days from Fitbit are missing in Garmin."
missing_in_garmin=len(missing_dates_set),
missing_dates=missing_dates_list,
message=f"Comparison Complete. Fitbit has {len(fitbit_date_set)} unique days, Garmin has {len(garmin_date_set)}. {len(missing_dates_set)} days from Fitbit are missing in Garmin."
)
limit = request.limit
job_id = job_manager.create_job("garmin_weight_upload")
db_manager = PostgreSQLManager(config.DATABASE_URL)
background_tasks.add_task(run_garmin_upload_job, job_id, limit, db_manager.get_db_session)
return {"job_id": job_id, "status": "started"}
@router.get("/jobs/active", response_model=List[JobStatusResponse])
def get_active_jobs():
return job_manager.get_active_jobs()
@@ -336,3 +360,11 @@ def stop_job(job_id: str):
if job_manager.request_cancel(job_id):
return {"status": "cancelled", "message": f"Cancellation requested for job {job_id}"}
raise HTTPException(status_code=404, detail="Job not found")
@router.get("/jobs/{job_id}", response_model=JobStatusResponse)
def get_job_status(job_id: str):
"""Get status of a specific job."""
job = job_manager.get_job(job_id)
if not job:
raise HTTPException(status_code=404, detail="Job not found")
return job

View File

@@ -6,5 +6,10 @@ from .api_token import APIToken
from .auth_status import AuthStatus
from .weight_record import WeightRecord
from .activity import Activity
from .job import Job
from .health_metric import HealthMetric
from .sync_log import SyncLog
from .sync_log import SyncLog
from .activity_state import GarminActivityState
from .health_state import HealthSyncState
from .scheduled_job import ScheduledJob
from .bike_setup import BikeSetup

View File

@@ -1,4 +1,5 @@
from sqlalchemy import Column, Integer, String, DateTime, Text, LargeBinary
from sqlalchemy import Column, Integer, String, DateTime, Text, LargeBinary, Float, ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.sql import func
from ..models import Base
@@ -11,9 +12,34 @@ class Activity(Base):
activity_type = Column(String, nullable=True) # Type of activity (e.g., 'running', 'cycling')
start_time = Column(DateTime, nullable=True) # Start time of the activity
duration = Column(Integer, nullable=True) # Duration in seconds
duration = Column(Integer, nullable=True) # Duration in seconds
# Extended Metrics
distance = Column(Float, nullable=True) # meters
calories = Column(Float, nullable=True) # kcal
avg_hr = Column(Integer, nullable=True) # bpm
max_hr = Column(Integer, nullable=True) # bpm
avg_speed = Column(Float, nullable=True) # m/s
max_speed = Column(Float, nullable=True) # m/s
elevation_gain = Column(Float, nullable=True) # meters
elevation_loss = Column(Float, nullable=True) # meters
avg_cadence = Column(Integer, nullable=True) # rpm/spm
max_cadence = Column(Integer, nullable=True) # rpm/spm
steps = Column(Integer, nullable=True)
aerobic_te = Column(Float, nullable=True) # 0-5
anaerobic_te = Column(Float, nullable=True) # 0-5
avg_power = Column(Integer, nullable=True) # watts
max_power = Column(Integer, nullable=True) # watts
norm_power = Column(Integer, nullable=True) # watts
tss = Column(Float, nullable=True) # Training Stress Score
vo2_max = Column(Float, nullable=True) # ml/kg/min
file_content = Column(LargeBinary, nullable=True) # Activity file content stored in database (base64 encoded)
file_type = Column(String, nullable=True) # File type (.fit, .gpx, .tcx, etc.)
download_status = Column(String, default='pending') # 'pending', 'downloaded', 'failed'
downloaded_at = Column(DateTime, nullable=True) # When downloaded
created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
bike_setup_id = Column(Integer, ForeignKey("bike_setups.id"), nullable=True)
bike_setup = relationship("BikeSetup")

View File

@@ -0,0 +1,12 @@
from sqlalchemy import Column, Integer, String, DateTime, func
from ..models import Base
class GarminActivityState(Base):
__tablename__ = "garmin_activity_state"
garmin_activity_id = Column(String, primary_key=True, index=True)
activity_name = Column(String, nullable=True)
activity_type = Column(String, nullable=True)
start_time = Column(DateTime, nullable=True)
sync_status = Column(String, default='new') # 'new', 'updated', 'synced'
last_seen = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())

View File

@@ -0,0 +1,15 @@
from sqlalchemy import Column, Integer, String, DateTime
from sqlalchemy.sql import func
from .base import Base
class BikeSetup(Base):
__tablename__ = "bike_setups"
id = Column(Integer, primary_key=True, index=True)
frame = Column(String, nullable=False)
chainring = Column(Integer, nullable=False)
rear_cog = Column(Integer, nullable=False)
name = Column(String, nullable=True) # Optional, can be derived or user-set
created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), onupdate=func.now())

View File

@@ -0,0 +1,16 @@
from sqlalchemy import Column, Integer, String, DateTime, Date, func, UniqueConstraint
from ..models import Base
class HealthSyncState(Base):
__tablename__ = "health_sync_state"
id = Column(Integer, primary_key=True, index=True)
date = Column(Date, nullable=False)
metric_type = Column(String, nullable=False) # 'steps', 'weight', 'sleep', etc.
source = Column(String, nullable=False) #'garmin', 'fitbit'
sync_status = Column(String, default='new') # 'new', 'updated', 'synced'
last_seen = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
__table_args__ = (
UniqueConstraint('date', 'metric_type', 'source', name='uq_health_state'),
)

View File

@@ -0,0 +1,19 @@
from sqlalchemy import Column, Integer, String, DateTime, Text, Boolean, JSON, func
from .base import Base
class Job(Base):
__tablename__ = 'jobs'
id = Column(String, primary_key=True, index=True)
operation = Column(String, nullable=False)
status = Column(String, nullable=False, default='running')
start_time = Column(DateTime(timezone=True), nullable=False)
end_time = Column(DateTime(timezone=True), nullable=True)
progress = Column(Integer, default=0)
message = Column(Text, nullable=True)
result = Column(JSON, nullable=True)
cancel_requested = Column(Boolean, default=False)
paused = Column(Boolean, default=False)
created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), onupdate=func.now())

View File

@@ -0,0 +1,20 @@
from sqlalchemy import Column, Integer, String, DateTime, Boolean, Text
from sqlalchemy.sql import func
from .base import Base
class ScheduledJob(Base):
__tablename__ = 'scheduled_jobs'
id = Column(Integer, primary_key=True, index=True)
job_type = Column(String, nullable=False) # e.g. 'fitbit_weight_sync'
name = Column(String, nullable=False)
interval_minutes = Column(Integer, nullable=False, default=60)
params = Column(Text, nullable=True) # JSON string
enabled = Column(Boolean, default=True)
last_run = Column(DateTime(timezone=True), nullable=True)
next_run = Column(DateTime(timezone=True), nullable=True)
created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), onupdate=func.now())

View File

@@ -8,6 +8,7 @@ class WeightRecord(Base):
id = Column(Integer, primary_key=True, index=True)
fitbit_id = Column(String, unique=True, nullable=False) # Original Fitbit ID to prevent duplicates
weight = Column(Float, nullable=False) # Weight value
bmi = Column(Float, nullable=True) # BMI value
unit = Column(String, nullable=False) # Unit (e.g., 'kg', 'lbs')
date = Column(DateTime, nullable=False) # Date of measurement
timestamp = Column(DateTime, nullable=False) # Exact timestamp

View File

@@ -0,0 +1,34 @@
from fastapi import APIRouter, Request
from fastapi.templating import Jinja2Templates
router = APIRouter()
templates = Jinja2Templates(directory="templates")
@router.get("/")
async def read_root(request: Request):
return templates.TemplateResponse("index.html", {"request": request})
@router.get("/activities")
async def activities_page(request: Request):
return templates.TemplateResponse("activities.html", {"request": request})
@router.get("/setup")
async def setup_page(request: Request):
return templates.TemplateResponse("setup.html", {"request": request})
@router.get("/garmin-health")
async def garmin_health_page(request: Request):
return templates.TemplateResponse("garmin_health.html", {"request": request})
@router.get("/fitbit-health")
async def fitbit_health_page(request: Request):
return templates.TemplateResponse("fitbit_health.html", {"request": request})
@router.get("/bike-setups")
async def bike_setups_page(request: Request):
return templates.TemplateResponse("bike_setups.html", {"request": request})
@router.get("/activity/{activity_id}")
async def activity_view_page(request: Request, activity_id: str):
return templates.TemplateResponse("activity_view.html", {"request": request, "activity_id": activity_id})

View File

@@ -0,0 +1,129 @@
import logging
from typing import List, Optional
from sqlalchemy.orm import Session
from ..models.activity import Activity
from ..models.bike_setup import BikeSetup
logger = logging.getLogger(__name__)
# Constants
WHEEL_CIRCUMFERENCE_M = 2.1 # Approx 700x23c/28c generic
TOLERANCE_PERCENT = 0.15
def calculate_observed_ratio(speed_mps: float, cadence_rpm: float) -> float:
"""
Calculate gear ratio from speed and cadence.
Speed = (Cadence * Ratio * Circumference) / 60
Ratio = (Speed * 60) / (Cadence * Circumference)
"""
if not cadence_rpm or cadence_rpm == 0:
return 0.0
return (speed_mps * 60) / (cadence_rpm * WHEEL_CIRCUMFERENCE_M)
def match_activity_to_bike(db: Session, activity: Activity) -> Optional[BikeSetup]:
"""
Match an activity to a bike setup based on gear ratio.
"""
if not activity.activity_type:
return None
type_lower = activity.activity_type.lower()
# Generic "cycling" check covers most (cycling, gravel_cycling, indoor_cycling)
# But explicitly: 'road_biking', 'mountain_biking', 'gravel_cycling', 'cycling'
# User asked for "all types of cycling".
# We essentially want to filter OUT known non-cycling stuff if it doesn't match keys.
# But safer to be inclusive of keywords.
is_cycling = (
'cycling' in type_lower or
'road_biking' in type_lower or
'mountain_biking' in type_lower or
'mtb' in type_lower or
'cyclocross' in type_lower
)
if not is_cycling:
# Not cycling
return None
if 'indoor' in type_lower:
# Indoor cycling - ignore
return None
if not activity.avg_speed or not activity.avg_cadence:
# Not enough data
return None
observed_ratio = calculate_observed_ratio(activity.avg_speed, activity.avg_cadence)
if observed_ratio == 0:
return None
setups = db.query(BikeSetup).all()
if not setups:
return None
best_match = None
min_diff = float('inf')
for setup in setups:
if not setup.chainring or not setup.rear_cog:
continue
mechanical_ratio = setup.chainring / setup.rear_cog
diff = abs(observed_ratio - mechanical_ratio)
# Check tolerance
# e.g., if ratio match is within 15%
if diff / mechanical_ratio <= TOLERANCE_PERCENT:
if diff < min_diff:
min_diff = diff
best_match = setup
return best_match
def process_activity_matching(db: Session, activity_id: int):
"""
Process matching for a specific activity and save result.
"""
activity = db.query(Activity).filter(Activity.id == activity_id).first()
if not activity:
return
match = match_activity_to_bike(db, activity)
if match:
activity.bike_setup_id = match.id
logger.info(f"Matched Activity {activity.id} to Setup {match.frame} (Found Ratio: {calculate_observed_ratio(activity.avg_speed, activity.avg_cadence):.2f})")
else:
# Implicitly "Generic" if None, but user requested explicit default logic.
generic = db.query(BikeSetup).filter(BikeSetup.name == "GenericBike").first()
if generic:
activity.bike_setup_id = generic.id
else:
activity.bike_setup_id = None # Truly unknown
db.commit()
def run_matching_for_all(db: Session):
"""
Run matching for all activities that don't have a setup.
"""
from sqlalchemy import or_
activities = db.query(Activity).filter(
Activity.bike_setup_id == None,
or_(
Activity.activity_type.ilike('%cycling%'),
Activity.activity_type.ilike('%road_biking%'),
Activity.activity_type.ilike('%mountain%'), # catch mountain_biking
Activity.activity_type.ilike('%mtb%'),
Activity.activity_type.ilike('%cyclocross%')
),
Activity.activity_type.notilike('%indoor%')
).all()
count = 0
for act in activities:
process_activity_matching(db, act.id)
count += 1
logger.info(f"Ran matching for {count} activities.")

View File

@@ -9,7 +9,7 @@ from ..utils.helpers import setup_logger
logger = setup_logger(__name__)
class FitbitClient:
def __init__(self, client_id: str, client_secret: str, access_token: str = None, refresh_token: str = None, redirect_uri: str = None):
def __init__(self, client_id: str, client_secret: str, access_token: str = None, refresh_token: str = None, redirect_uri: str = None, refresh_cb = None):
self.client_id = client_id
self.client_secret = client_secret
self.access_token = access_token
@@ -26,7 +26,9 @@ class FitbitClient:
access_token=access_token,
refresh_token=refresh_token,
redirect_uri=redirect_uri,
timeout=10
refresh_cb=refresh_cb,
timeout=10,
system='METRIC'
)
def get_authorization_url(self, redirect_uri: str = None) -> str:
@@ -41,7 +43,8 @@ class FitbitClient:
self.client_id,
self.client_secret,
redirect_uri=redirect_uri,
timeout=10
timeout=10,
system='METRIC'
)
# The example calls self.fitbit.client.authorize_token_url()
@@ -61,11 +64,12 @@ class FitbitClient:
"""Exchange authorization code for access and refresh tokens."""
# If redirect_uri is provided here, ensure we are using a client configured with it
if redirect_uri and redirect_uri != self.redirect_uri:
self.fitbit = fitbit.Fitbit(
self.fitbit = fitbit.Fitbit(
self.client_id,
self.client_secret,
redirect_uri=redirect_uri,
timeout=10
timeout=10,
system='METRIC'
)
logger.info(f"Exchanging authorization code for tokens")

View File

@@ -42,14 +42,20 @@ class AuthMixin:
logger.debug("garth.login successful.")
logger.debug("Attempting to save tokens to database...")
# If successful, garth still populates the global client?
# The return signature is tokens, but let's assume global client is also updated as usual.
# However, with return_on_mfa=True, result might be the tokens tuple.
# Let's inspect result structure if not MFA.
# To be safe, we can use global client or extract from result if it's tokens.
# But existing code uses global client. Let's trust it for now unless issues arise.
# Sync tokens to the garminconnect client instance if it exists
if hasattr(self, 'client') and hasattr(self.client, 'garth'):
logger.debug("Syncing tokens to internal garminconnect client...")
try:
self.client.garth.oauth1_token = garth.client.oauth1_token
self.client.garth.oauth2_token = garth.client.oauth2_token
logger.debug("Internal client tokens synced.")
except Exception as sync_e:
logger.error(f"Failed to sync tokens to internal client: {sync_e}")
self.update_tokens(db, garth.client.oauth1_token, garth.client.oauth2_token)
logger.debug("Tokens saved successfully.")
logger.debug("Tokens saved successfully.")
self.is_connected = True
logger.info("Login flow completed successfully.")

Some files were not shown because too many files have changed in this diff Show More