feat: Update spec, fix bugs, improve UI/UX, and clean up code

This commit is contained in:
2025-12-25 08:33:01 -08:00
parent 8fe375a966
commit df9dcb2f79
21 changed files with 1741 additions and 1055 deletions

View File

@@ -2,55 +2,47 @@ from fastapi import FastAPI, Request
from fastapi.templating import Jinja2Templates
from fastapi.staticfiles import StaticFiles
from contextlib import asynccontextmanager
from src.services.postgresql_manager import PostgreSQLManager
from src.utils.logging_config import setup_logging
from alembic.config import Config
from alembic import command
import os
import logging
# Create application lifespan to handle startup/shutdown
@asynccontextmanager
async def lifespan(app: FastAPI):
# Startup
# Run database migrations
alembic_cfg = Config("alembic.ini")
database_url = os.getenv("DATABASE_URL", "postgresql://postgres:password@localhost:5432/fitbit_garmin_sync")
alembic_cfg.set_main_option("sqlalchemy.url", database_url)
command.upgrade(alembic_cfg, "head")
setup_logging()
logger = logging.getLogger(__name__)
logger.info("--- Application Starting Up ---")
# Initialize database tables
db_manager = PostgreSQLManager(database_url=database_url)
db_manager.init_db()
alembic_cfg = Config("alembic.ini")
database_url = os.getenv("DATABASE_URL")
if database_url:
alembic_cfg.set_main_option("sqlalchemy.url", database_url)
try:
command.upgrade(alembic_cfg, "head")
logger.info("Database migrations checked/applied.")
except Exception as e:
logger.error(f"Error running database migrations: {e}")
else:
logger.warning("DATABASE_URL not set, skipping migrations.")
yield
# Shutdown
# Add any cleanup code here if needed
logger.info("--- Application Shutting Down ---")
# Create FastAPI app with lifespan
app = FastAPI(lifespan=lifespan)
# Mount static files
app.mount("/static", StaticFiles(directory="static"), name="static")
# Initialize templates
templates = Jinja2Templates(directory="templates")
# Include API routes
from src.api.status import router as status_router
from src.api.sync import router as sync_router
from src.api.setup import router as setup_router
from src.api.logs import router as logs_router
from src.api.metrics import router as metrics_router
from src.api.activities import router as activities_router
from src.api import status, sync, setup, logs, metrics, activities
app.include_router(status_router, prefix="/api")
app.include_router(sync_router, prefix="/api")
app.include_router(setup_router, prefix="/api")
app.include_router(logs_router, prefix="/api")
app.include_router(metrics_router, prefix="/api")
app.include_router(activities_router, prefix="/api")
from fastapi import Request
app.include_router(status.router, prefix="/api")
app.include_router(sync.router, prefix="/api")
app.include_router(setup.router, prefix="/api")
app.include_router(logs.router, prefix="/api")
app.include_router(metrics.router, prefix="/api")
app.include_router(activities.router, prefix="/api")
@app.get("/")
async def read_root(request: Request):
@@ -58,4 +50,4 @@ async def read_root(request: Request):
@app.get("/setup")
async def setup_page(request: Request):
return templates.TemplateResponse("setup.html", {"request": request})
return templates.TemplateResponse("setup.html", {"request": request})

View File

@@ -1,9 +1,22 @@
from fastapi import APIRouter, Query, Response
from fastapi import APIRouter, Query, Response, HTTPException, Depends
from pydantic import BaseModel
from typing import List, Optional, Dict, Any
from sqlalchemy import func
from ..models.activity import Activity
import logging
from ..services.postgresql_manager import PostgreSQLManager
from sqlalchemy.orm import Session
from ..utils.config import config
router = APIRouter()
logger = logging.getLogger(__name__)
def get_db():
db_manager = PostgreSQLManager(config.DATABASE_URL)
with db_manager.get_db_session() as session:
yield session
class ActivityResponse(BaseModel):
id: Optional[int] = None
garmin_activity_id: Optional[str] = None
@@ -19,26 +32,143 @@ class ActivityResponse(BaseModel):
@router.get("/activities/list", response_model=List[ActivityResponse])
async def list_activities(
limit: int = Query(50, ge=1, le=200),
offset: int = Query(0, ge=0)
offset: int = Query(0, ge=0),
db: Session = Depends(get_db)
):
# This would return metadata for all downloaded/available activities
# Implementation will connect with the services layer
return []
"""
Return metadata for all downloaded/available activities.
"""
try:
logger.info(f"Listing activities with limit={limit}, offset={offset}")
# Query the database for activities
activities = db.query(Activity).offset(offset).limit(limit).all()
# Convert SQLAlchemy objects to Pydantic models
activity_responses = []
for activity in activities:
activity_responses.append(
ActivityResponse(
id=activity.id,
garmin_activity_id=activity.garmin_activity_id,
activity_name=activity.activity_name,
activity_type=activity.activity_type,
start_time=activity.start_time.isoformat() if activity.start_time else None,
duration=activity.duration,
file_type=activity.file_type,
download_status=activity.download_status,
downloaded_at=activity.downloaded_at.isoformat() if activity.downloaded_at else None
)
)
logger.info(f"Returning {len(activity_responses)} activities")
return activity_responses
except Exception as e:
logger.error(f"Error in list_activities: {str(e)}")
raise HTTPException(status_code=500, detail=f"Error listing activities: {str(e)}")
@router.get("/activities/query", response_model=List[ActivityResponse])
async def query_activities(
activity_type: Optional[str] = Query(None),
start_date: Optional[str] = Query(None),
end_date: Optional[str] = Query(None),
download_status: Optional[str] = Query(None)
download_status: Optional[str] = Query(None),
db: Session = Depends(get_db)
):
# This would allow advanced filtering of activities
# Implementation will connect with the services layer
return []
"""
Allow advanced filtering of activities.
"""
try:
logger.info(f"Querying activities - type: {activity_type}, start: {start_date}, end: {end_date}, status: {download_status}")
# Start building the query
query = db.query(Activity)
# Apply filters based on parameters
if activity_type:
query = query.filter(Activity.activity_type == activity_type)
if start_date:
from datetime import datetime
start_dt = datetime.fromisoformat(start_date)
query = query.filter(Activity.start_time >= start_dt)
if end_date:
from datetime import datetime
end_dt = datetime.fromisoformat(end_date)
query = query.filter(Activity.start_time <= end_dt)
if download_status:
query = query.filter(Activity.download_status == download_status)
# Execute the query
activities = query.all()
# Convert SQLAlchemy objects to Pydantic models
activity_responses = []
for activity in activities:
activity_responses.append(
ActivityResponse(
id=activity.id,
garmin_activity_id=activity.garmin_activity_id,
activity_name=activity.activity_name,
activity_type=activity.activity_type,
start_time=activity.start_time.isoformat() if activity.start_time else None,
duration=activity.duration,
file_type=activity.file_type,
download_status=activity.download_status,
downloaded_at=activity.downloaded_at.isoformat() if activity.downloaded_at else None
)
)
logger.info(f"Returning {len(activity_responses)} filtered activities")
return activity_responses
except Exception as e:
logger.error(f"Error in query_activities: {str(e)}")
raise HTTPException(status_code=500, detail=f"Error querying activities: {str(e)}")
@router.get("/activities/download/{activity_id}")
async def download_activity(activity_id: str):
# This would serve the stored activity file from the database
# Implementation will connect with the services layer
# It should return the file content with appropriate content-type
return Response(content=b"sample_content", media_type="application/octet-stream", headers={"Content-Disposition": f"attachment; filename=activity_{activity_id}.tcx"})
async def download_activity(activity_id: str, db: Session = Depends(get_db)):
"""
Serve the stored activity file from the database.
"""
try:
logger.info(f"Downloading activity with ID: {activity_id}")
# Find the activity in the database
activity = db.query(Activity).filter(Activity.garmin_activity_id == activity_id).first()
if not activity:
raise HTTPException(status_code=404, detail=f"Activity with ID {activity_id} not found")
if not activity.file_content:
raise HTTPException(status_code=404, detail=f"No file content available for activity {activity_id}")
if activity.download_status != 'downloaded':
raise HTTPException(status_code=400, detail=f"File for activity {activity_id} is not ready for download (status: {activity.download_status})")
# Determine the appropriate content type based on the file type
content_type_map = {
'tcx': 'application/vnd.garmin.tcx+xml',
'gpx': 'application/gpx+xml',
'fit': 'application/octet-stream' # FIT files are binary
}
content_type = content_type_map.get(activity.file_type, 'application/octet-stream')
filename = f"activity_{activity_id}.{activity.file_type}"
logger.info(f"Returning file for activity {activity_id} with content type {content_type}")
return Response(
content=activity.file_content,
media_type=content_type,
headers={
"Content-Disposition": f"attachment; filename={filename}",
"Content-Length": str(len(activity.file_content))
}
)
except HTTPException:
# Re-raise HTTP exceptions as-is
raise
except Exception as e:
logger.error(f"Error in download_activity for ID {activity_id}: {str(e)}")
raise HTTPException(status_code=500, detail=f"Error downloading activity: {str(e)}")

View File

@@ -1,9 +1,22 @@
from fastapi import APIRouter, Query
from fastapi import APIRouter, Query, HTTPException, Depends
from pydantic import BaseModel
from typing import List, Optional, Dict, Any
from sqlalchemy import func
from ..models.health_metric import HealthMetric
import logging
from ..services.postgresql_manager import PostgreSQLManager
from sqlalchemy.orm import Session
from ..utils.config import config
router = APIRouter()
logger = logging.getLogger(__name__)
def get_db():
db_manager = PostgreSQLManager(config.DATABASE_URL)
with db_manager.get_db_session() as session:
yield session
class HealthMetricResponse(BaseModel):
id: int
metric_type: str
@@ -28,71 +41,188 @@ class HealthDataSummary(BaseModel):
total_sleep_hours: Optional[float] = 0.0
avg_calories: Optional[float] = 0.0
class ActivityResponse(BaseModel):
id: Optional[int] = None
garmin_activity_id: Optional[str] = None
activity_name: Optional[str] = None
activity_type: Optional[str] = None
start_time: Optional[str] = None
duration: Optional[int] = None
file_path: Optional[str] = None
file_type: Optional[str] = None
download_status: Optional[str] = None
downloaded_at: Optional[str] = None
@router.get("/metrics/list", response_model=MetricsListResponse)
async def list_available_metrics():
# This would return available metric types and date ranges
# Implementation will connect with the services layer
return {
"metric_types": ["steps", "heart_rate", "sleep", "calories"],
"date_range": {
"start_date": "2023-01-01",
"end_date": "2023-12-31"
async def list_available_metrics(db: Session = Depends(get_db)):
"""
Return available metric types and date ranges.
"""
try:
logger.info("Listing available metrics")
# Query for distinct metric types from the database
metric_types_result = db.query(HealthMetric.metric_type).distinct().all()
metric_types = [row[0] for row in metric_types_result if row[0] is not None]
# Find the date range of available metrics
min_date_result = db.query(func.min(HealthMetric.date)).scalar()
max_date_result = db.query(func.max(HealthMetric.date)).scalar()
start_date = min_date_result.isoformat() if min_date_result else None
end_date = max_date_result.isoformat() if max_date_result else None
response = {
"metric_types": metric_types,
"date_range": {
"start_date": start_date,
"end_date": end_date
}
}
}
logger.info(f"Returning {len(metric_types)} metric types with date range {start_date} to {end_date}")
return response
except Exception as e:
logger.error(f"Error in list_available_metrics: {str(e)}")
raise HTTPException(status_code=500, detail=f"Error listing metrics: {str(e)}")
@router.get("/metrics/query", response_model=List[HealthMetricResponse])
async def query_metrics(
metric_type: Optional[str] = Query(None),
start_date: Optional[str] = Query(None),
end_date: Optional[str] = Query(None),
limit: int = Query(100, ge=1, le=1000)
limit: int = Query(100, ge=1, le=1000),
db: Session = Depends(get_db)
):
# This would query health metrics with filters
# Implementation will connect with the services layer
return []
"""
Query health metrics with filters.
"""
try:
logger.info(f"Querying metrics - type: {metric_type}, start: {start_date}, end: {end_date}, limit: {limit}")
# Start building the query
query = db.query(HealthMetric)
# Apply filters based on parameters
if metric_type:
query = query.filter(HealthMetric.metric_type == metric_type)
if start_date:
from datetime import datetime
start_dt = datetime.fromisoformat(start_date)
query = query.filter(HealthMetric.date >= start_dt.date())
if end_date:
from datetime import datetime
end_dt = datetime.fromisoformat(end_date)
query = query.filter(HealthMetric.date <= end_dt.date())
# Apply limit
query = query.limit(limit)
# Execute the query
health_metrics = query.all()
# Convert SQLAlchemy objects to Pydantic models
metric_responses = []
for metric in health_metrics:
metric_responses.append(
HealthMetricResponse(
id=metric.id,
metric_type=metric.metric_type,
metric_value=metric.metric_value,
unit=metric.unit,
timestamp=metric.timestamp.isoformat() if metric.timestamp else "",
date=metric.date.isoformat() if metric.date else "",
source=metric.source,
detailed_data=metric.detailed_data
)
)
logger.info(f"Returning {len(metric_responses)} health metrics")
return metric_responses
except Exception as e:
logger.error(f"Error in query_metrics: {str(e)}")
raise HTTPException(status_code=500, detail=f"Error querying metrics: {str(e)}")
@router.get("/health-data/summary", response_model=HealthDataSummary)
async def get_health_summary(
start_date: Optional[str] = Query(None),
end_date: Optional[str] = Query(None)
):
# This would return aggregated health statistics
# Implementation will connect with the services layer
return {
"total_steps": 123456,
"avg_heart_rate": 72.5,
"total_sleep_hours": 210.5,
"avg_calories": 2345.6
}
@router.get("/activities/list", response_model=List[ActivityResponse])
async def list_activities(
limit: int = Query(50, ge=1, le=200),
offset: int = Query(0, ge=0)
):
# This would return metadata for all downloaded/available activities
# Implementation will connect with the services layer
return []
@router.get("/activities/query", response_model=List[ActivityResponse])
async def query_activities(
activity_type: Optional[str] = Query(None),
start_date: Optional[str] = Query(None),
end_date: Optional[str] = Query(None),
download_status: Optional[str] = Query(None)
db: Session = Depends(get_db)
):
# This would allow advanced filtering of activities
# Implementation will connect with the services layer
return []
"""
Return aggregated health statistics.
"""
try:
logger.info(f"Getting health summary - start: {start_date}, end: {end_date}")
# Start building the query for steps
steps_query = db.query(func.sum(HealthMetric.metric_value)).filter(HealthMetric.metric_type == 'steps')
# Apply date filters to steps query
if start_date:
from datetime import datetime
start_dt = datetime.fromisoformat(start_date)
steps_query = steps_query.filter(HealthMetric.date >= start_dt.date())
if end_date:
from datetime import datetime
end_dt = datetime.fromisoformat(end_date)
steps_query = steps_query.filter(HealthMetric.date <= end_dt.date())
# Calculate total steps
total_steps_result = steps_query.scalar()
total_steps = int(total_steps_result) if total_steps_result is not None else 0
# Build query for heart rate
hr_query = db.query(func.avg(HealthMetric.metric_value)).filter(HealthMetric.metric_type == 'heart_rate')
# Apply date filters to heart rate query
if start_date:
from datetime import datetime
start_dt = datetime.fromisoformat(start_date)
hr_query = hr_query.filter(HealthMetric.date >= start_dt.date())
if end_date:
end_dt = datetime.fromisoformat(end_date)
hr_query = hr_query.filter(HealthMetric.date <= end_dt.date())
# Calculate average heart rate
avg_hr_result = hr_query.scalar()
avg_heart_rate = float(avg_hr_result) if avg_hr_result is not None else 0.0
# Calculate total sleep hours - assuming sleep data is stored in minutes in the database
sleep_query = db.query(func.sum(HealthMetric.metric_value)).filter(HealthMetric.metric_type == 'sleep')
# Apply date filters to sleep query
if start_date:
from datetime import datetime
start_dt = datetime.fromisoformat(start_date)
sleep_query = sleep_query.filter(HealthMetric.date >= start_dt.date())
if end_date:
end_dt = datetime.fromisoformat(end_date)
sleep_query = sleep_query.filter(HealthMetric.date <= end_dt.date())
# Calculate total sleep in minutes, then convert to hours
total_sleep_minutes_result = sleep_query.scalar()
total_sleep_hours = (total_sleep_minutes_result / 60) if total_sleep_minutes_result is not None else 0.0
# Calculate average calories - assuming we have calories data
calories_query = db.query(func.avg(HealthMetric.metric_value)).filter(HealthMetric.metric_type == 'calories')
# Apply date filters to calories query
if start_date:
from datetime import datetime
start_dt = datetime.fromisoformat(start_date)
calories_query = calories_query.filter(HealthMetric.date >= start_dt.date())
if end_date:
end_dt = datetime.fromisoformat(end_date)
calories_query = calories_query.filter(HealthMetric.date <= end_dt.date())
# Calculate average calories
avg_calories_result = calories_query.scalar()
avg_calories = float(avg_calories_result) if avg_calories_result is not None else 0.0
summary = HealthDataSummary(
total_steps=total_steps,
avg_heart_rate=round(avg_heart_rate, 2),
total_sleep_hours=round(total_sleep_hours, 2),
avg_calories=round(avg_calories, 2)
)
logger.info(f"Returning health summary: steps={total_steps}, avg_hr={avg_heart_rate}, sleep_hours={total_sleep_hours}, avg_calories={avg_calories}")
return summary
except Exception as e:
logger.error(f"Error in get_health_summary: {str(e)}")
raise HTTPException(status_code=500, detail=f"Error getting health summary: {str(e)}")

View File

@@ -3,16 +3,14 @@ from fastapi.responses import JSONResponse
from pydantic import BaseModel
from typing import Optional
from sqlalchemy.orm import Session
import traceback
import httpx
import base64
import json
import logging
from ..services.garmin.client import GarminClient
from ..services.postgresql_manager import PostgreSQLManager
from ..utils.config import config
import garth
from ..services.garmin.client import GarminClient
router = APIRouter()
logger = logging.getLogger(__name__)
def get_db():
db_manager = PostgreSQLManager(config.DATABASE_URL)
@@ -24,260 +22,35 @@ class GarminCredentials(BaseModel):
password: str
is_china: bool = False
class FitbitCredentials(BaseModel):
client_id: str
client_secret: str
class FitbitCallback(BaseModel):
callback_url: str
class GarminMFARequest(BaseModel):
verification_code: str
session_id: str
class AuthStatusResponse(BaseModel):
garmin: Optional[dict] = None
fitbit: Optional[dict] = None
class AuthStatusResponse(BaseModel):
garmin: Optional[dict] = None
fitbit: Optional[dict] = None
@router.post("/setup/load-consul-config")
async def load_consul_config(db: Session = Depends(get_db)):
"""
Load configuration from Consul and save it to the database.
It first tries to use tokens from Consul, if they are not present, it falls back to username/password login.
"""
consul_url = "http://consul.service.dc1.consul:8500/v1/kv/fitbit-garmin-sync/config"
try:
async with httpx.AsyncClient() as client:
response = await client.get(consul_url)
response.raise_for_status()
data = response.json()
if not (data and 'Value' in data[0]):
raise HTTPException(status_code=404, detail="Config not found in Consul")
config_value = base64.b64decode(data[0]['Value']).decode('utf-8')
config = json.loads(config_value)
if 'garmin' in config:
garmin_config = config['garmin']
from ..models.api_token import APIToken
from datetime import datetime
# Prefer tokens if available
if 'garth_oauth1_token' in garmin_config and 'garth_oauth2_token' in garmin_config:
token_record = db.query(APIToken).filter_by(token_type='garmin').first()
if not token_record:
token_record = APIToken(token_type='garmin')
db.add(token_record)
token_record.garth_oauth1_token = garmin_config['garth_oauth1_token']
token_record.garth_oauth2_token = garmin_config['garth_oauth2_token']
token_record.updated_at = datetime.now()
db.commit()
return {"status": "success", "message": "Garmin tokens from Consul have been saved."}
# Fallback to username/password login
elif 'username' in garmin_config and 'password' in garmin_config:
garmin_creds = GarminCredentials(**garmin_config)
garmin_client = GarminClient(garmin_creds.username, garmin_creds.password, garmin_creds.is_china)
status = garmin_client.login()
if status == "mfa_required":
return {"status": "mfa_required", "message": "Garmin login from Consul requires MFA. Please complete it manually."}
elif status != "success":
raise HTTPException(status_code=400, detail=f"Failed to login to Garmin with Consul credentials: {status}")
# TODO: Add Fitbit credentials handling
return {"status": "success", "message": "Configuration from Consul processed."}
except httpx.RequestError as e:
raise HTTPException(status_code=500, detail=f"Failed to connect to Consul: {e}")
except Exception as e:
import traceback
traceback.print_exc()
raise HTTPException(status_code=500, detail=f"An error occurred: {e}")
@router.get("/setup/auth-status", response_model=AuthStatusResponse)
async def get_auth_status(db: Session = Depends(get_db)):
from ..models.api_token import APIToken
garmin_status = {}
fitbit_status = {}
# Garmin Status
garmin_token = db.query(APIToken).filter_by(token_type='garmin').first()
if garmin_token:
garmin_status = {
"token_stored": True,
"authenticated": garmin_token.garth_oauth1_token is not None and garmin_token.garth_oauth2_token is not None,
"garth_oauth1_token_exists": garmin_token.garth_oauth1_token is not None,
"garth_oauth2_token_exists": garmin_token.garth_oauth2_token is not None,
"mfa_state_exists": garmin_token.mfa_state is not None,
"mfa_expires_at": garmin_token.mfa_expires_at,
"last_used": garmin_token.last_used,
"updated_at": garmin_token.updated_at,
"username": "N/A", # Placeholder, username is not stored in APIToken
"is_china": False # Placeholder
}
else:
garmin_status = {
"token_stored": False,
"authenticated": False
}
# Fitbit Status (Existing logic, might need adjustment if Fitbit tokens are stored differently)
fitbit_token = db.query(APIToken).filter_by(token_type='fitbit').first()
if fitbit_token:
fitbit_status = {
"token_stored": True,
"authenticated": fitbit_token.access_token is not None,
"client_id": fitbit_token.access_token[:10] + "..." if fitbit_token.access_token else "N/A",
"expires_at": fitbit_token.expires_at,
"last_used": fitbit_token.last_used,
"updated_at": fitbit_token.updated_at
}
else:
fitbit_status = {
"token_stored": False,
"authenticated": False
}
return AuthStatusResponse(
garmin=garmin_status,
fitbit=fitbit_status
)
@router.post("/setup/garmin")
async def save_garmin_credentials(credentials: GarminCredentials, db: Session = Depends(get_db)):
from ..utils.helpers import setup_logger
logger = setup_logger(__name__)
logger.info(f"Received Garmin credentials for user: {credentials.username}, is_china: {credentials.is_china}")
def save_garmin_credentials(credentials: GarminCredentials, db: Session = Depends(get_db)):
logger.info(f"Received Garmin credentials for user: {credentials.username}")
garmin_client = GarminClient(credentials.username, credentials.password, credentials.is_china)
logger.debug("GarminClient instance created successfully")
logger.debug("Attempting to log in to Garmin")
# Check the status returned directly
status = garmin_client.login()
status = garmin_client.login(db)
if status == "mfa_required":
# Hardcode the session_id as 'garmin' since you use a single record in APIToken
return JSONResponse(
status_code=200,
content={
"status": "mfa_required",
"message": "MFA Required",
"session_id": "garmin"
}
)
return JSONResponse(status_code=202, content={"status": "mfa_required", "message": "MFA code required."})
return JSONResponse(
status_code=200,
content={"status": "success", "message": "Logged in!"}
)
return JSONResponse(status_code=200, content={"status": "success", "message": "Logged in and tokens saved."})
@router.post("/setup/garmin/mfa")
async def complete_garmin_mfa(mfa_request: GarminMFARequest, db: Session = Depends(get_db)):
from ..utils.helpers import setup_logger
logger = setup_logger(__name__)
def complete_garmin_mfa(mfa_request: GarminMFARequest, db: Session = Depends(get_db)):
logger.info(f"Received MFA verification code: {'*' * len(mfa_request.verification_code)}")
try:
logger.info(f"Received MFA verification code for session {mfa_request.session_id}: {'*' * len(mfa_request.verification_code)}")
garmin_client = GarminClient()
success = garmin_client.handle_mfa(db, mfa_request.verification_code)
try:
garmin_client = GarminClient()
logger.debug(f"Attempting to handle MFA for session: {mfa_request.session_id}")
if success:
return JSONResponse(status_code=200, content={"status": "success", "message": "MFA verification successful, tokens saved."})
else:
raise HTTPException(status_code=400, detail="MFA verification failed.")
success = garmin_client.handle_mfa(mfa_request.verification_code, session_id=mfa_request.session_id)
if success:
logger.info(f"MFA verification completed successfully for session: {mfa_request.session_id}")
return JSONResponse(
status_code=200,
content={"status": "success", "message": "MFA verification completed successfully"}
)
else:
logger.error(f"MFA verification failed for session: {mfa_request.session_id}")
return JSONResponse(
status_code=400,
content={"status": "error", "message": "MFA verification failed"}
)
except Exception as e:
logger.error(f"MFA verification failed for session {mfa_request.session_id} with exception: {str(e)}")
logger.error(f"Exception type: {type(e).__name__}")
logger.error(f"Exception details: {repr(e)}")
logger.error(f"Full traceback: {traceback.format_exc()}")
return JSONResponse(
status_code=500,
content={"status": "error", "message": f"MFA verification failed: {str(e)}"}
)
except Exception as outer_error:
logger.error(f"Unexpected error in complete_garmin_mfa: {str(outer_error)}")
logger.error(f"Full traceback: {traceback.format_exc()}")
return JSONResponse(
status_code=500,
content={"status": "error", "message": f"Unexpected error: {str(outer_error)}"}
)
@router.post("/setup/fitbit")
async def save_fitbit_credentials(credentials: FitbitCredentials, db: Session = Depends(get_db)):
return {
"status": "success",
"auth_url": "https://www.fitbit.com/oauth2/authorize?...",
"message": "Fitbit credentials saved, please visit auth_url to authorize"
}
@router.post("/setup/fitbit/callback")
async def fitbit_callback(callback_data: FitbitCallback, db: Session = Depends(get_db)):
return {"status": "success", "message": "Fitbit OAuth flow completed successfully"}
@router.post("/setup/garmin/test-token")
async def test_garmin_token(db: Session = Depends(get_db)):
from ..models.api_token import APIToken
from garth.auth_tokens import OAuth1Token, OAuth2Token
import json
token_record = db.query(APIToken).filter_by(token_type='garmin').first()
if not token_record or not token_record.garth_oauth1_token or not token_record.garth_oauth2_token:
raise HTTPException(status_code=404, detail="Garmin token not found or incomplete.")
try:
from ..utils.helpers import setup_logger
logger = setup_logger(__name__)
logger.info("garth_oauth1_token from DB: %s", token_record.garth_oauth1_token)
logger.info("Type of garth_oauth1_token: %s", type(token_record.garth_oauth1_token))
logger.info("garth_oauth2_token from DB: %s", token_record.garth_oauth2_token)
logger.info("Type of garth_oauth2_token: %s", type(token_record.garth_oauth2_token))
if not token_record.garth_oauth1_token or not token_record.garth_oauth2_token:
raise HTTPException(status_code=400, detail="OAuth1 or OAuth2 token is empty.")
import garth
# Parse JSON to dictionaries
oauth1_dict = json.loads(token_record.garth_oauth1_token)
oauth2_dict = json.loads(token_record.garth_oauth2_token)
# Convert to proper token objects
garth.client.oauth1_token = OAuth1Token(**oauth1_dict)
garth.client.oauth2_token = OAuth2Token(**oauth2_dict)
# Also configure the domain if present
if oauth1_dict.get('domain'):
garth.configure(domain=oauth1_dict['domain'])
profile_info = garth.UserProfile.get()
return profile_info
except Exception as e:
import traceback
traceback.print_exc()
raise HTTPException(status_code=500, detail=f"Failed to test Garmin token: {e}")
logger.error(f"MFA verification failed with exception: {e}", exc_info=True)
raise HTTPException(status_code=500, detail=f"MFA verification failed: {str(e)}")

View File

@@ -1,36 +1,48 @@
from fastapi import APIRouter
from fastapi import APIRouter, Depends
from pydantic import BaseModel
from typing import List, Optional
from sqlalchemy.orm import Session
from ..services.postgresql_manager import PostgreSQLManager
from ..utils.config import config
from ..models.activity import Activity
from ..models.sync_log import SyncLog
from datetime import datetime
router = APIRouter()
def get_db():
db_manager = PostgreSQLManager(config.DATABASE_URL)
with db_manager.get_db_session() as session:
yield session
class SyncLogResponse(BaseModel):
id: int
operation: str
status: str
message: Optional[str]
start_time: str
end_time: Optional[str]
message: Optional[str] = None
start_time: datetime
end_time: Optional[datetime] = None
records_processed: int
records_failed: int
class Config:
orm_mode = True
class StatusResponse(BaseModel):
total_weight_records: int
synced_weight_records: int
unsynced_weight_records: int
total_activities: int
downloaded_activities: int
recent_logs: List[SyncLogResponse]
@router.get("/status")
async def get_status():
# This would return the current sync status
# Implementation will connect with the services layer
return {
"total_weight_records": 100,
"synced_weight_records": 85,
"unsynced_weight_records": 15,
"total_activities": 50,
"downloaded_activities": 30,
"recent_logs": []
}
@router.get("/status", response_model=StatusResponse)
def get_status(db: Session = Depends(get_db)):
"""Returns the current sync status and recent logs."""
total_activities = db.query(Activity).count()
downloaded_activities = db.query(Activity).filter(Activity.download_status == 'downloaded').count()
recent_logs = db.query(SyncLog).order_by(SyncLog.start_time.desc()).limit(10).all()
return StatusResponse(
total_activities=total_activities,
downloaded_activities=downloaded_activities,
recent_logs=recent_logs
)

View File

@@ -1,11 +1,20 @@
from fastapi import APIRouter, Depends
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
from typing import Optional
from datetime import datetime
from ..models.api_token import APIToken
from ..services.sync_app import SyncApp
from ..services.garmin.client import GarminClient
from ..services.postgresql_manager import PostgreSQLManager
from sqlalchemy.orm import Session
from ..utils.config import config
import logging
import json
import garth
from garth.auth_tokens import OAuth1Token, OAuth2Token
router = APIRouter()
logger = logging.getLogger(__name__)
class SyncActivityRequest(BaseModel):
days_back: int = 30
@@ -20,32 +29,50 @@ def get_db():
with db_manager.get_db_session() as session:
yield session
@router.post("/sync/weight", response_model=SyncResponse)
async def sync_weight(db: Session = Depends(get_db)):
# This would trigger the weight sync process
# Implementation will connect with the services layer
return {
"status": "started",
"message": "Weight sync process started",
"job_id": "weight-sync-12345"
}
def _load_and_verify_garth_session(db: Session):
"""Helper to load token from DB and verify session with Garmin."""
logger.info("Loading and verifying Garmin session...")
token_record = db.query(APIToken).filter_by(token_type='garmin').first()
if not (token_record and token_record.garth_oauth1_token and token_record.garth_oauth2_token):
raise HTTPException(status_code=401, detail="Garmin token not found.")
try:
oauth1_dict = json.loads(token_record.garth_oauth1_token)
oauth2_dict = json.loads(token_record.garth_oauth2_token)
domain = oauth1_dict.get('domain')
if domain:
garth.configure(domain=domain)
garth.client.oauth1_token = OAuth1Token(**oauth1_dict)
garth.client.oauth2_token = OAuth2Token(**oauth2_dict)
garth.UserProfile.get()
logger.info("Garth session verified.")
except Exception as e:
logger.error(f"Garth session verification failed: {e}", exc_info=True)
raise HTTPException(status_code=401, detail=f"Failed to authenticate with Garmin: {e}")
@router.post("/sync/activities", response_model=SyncResponse)
async def sync_activities(request: SyncActivityRequest, db: Session = Depends(get_db)):
# This would trigger the activity sync process
# Implementation will connect with the services layer
return {
"status": "started",
"message": "Activity sync process started",
"job_id": f"activity-sync-{request.days_back}"
}
def sync_activities(request: SyncActivityRequest, db: Session = Depends(get_db)):
_load_and_verify_garth_session(db)
garmin_client = GarminClient() # The client is now just a thin wrapper
sync_app = SyncApp(db_session=db, garmin_client=garmin_client)
result = sync_app.sync_activities(days_back=request.days_back)
return SyncResponse(
status=result.get("status", "completed_with_errors" if result.get("failed", 0) > 0 else "completed"),
message=f"Activity sync completed: {result.get('processed', 0)} processed, {result.get('failed', 0)} failed",
job_id=f"activity-sync-{datetime.now().strftime('%Y%m%d%H%M%S')}"
)
@router.post("/sync/metrics", response_model=SyncResponse)
async def sync_metrics(db: Session = Depends(get_db)):
# This would trigger the health metrics sync process
# Implementation will connect with the services layer
return {
"status": "started",
"message": "Health metrics sync process started",
"job_id": "metrics-sync-12345"
}
def sync_metrics(db: Session = Depends(get_db)):
_load_and_verify_garth_session(db)
garmin_client = GarminClient()
sync_app = SyncApp(db_session=db, garmin_client=garmin_client)
result = sync_app.sync_health_metrics()
return SyncResponse(
status=result.get("status", "completed_with_errors" if result.get("failed", 0) > 0 else "completed"),
message=f"Health metrics sync completed: {result.get('processed', 0)} processed, {result.get('failed', 0)} failed",
job_id=f"metrics-sync-{datetime.now().strftime('%Y%m%d%H%M%S')}"
)

View File

@@ -2,103 +2,89 @@ import garth
import json
from datetime import datetime, timedelta
from garth.exc import GarthException
from src.models.api_token import APIToken
from src.services.postgresql_manager import PostgreSQLManager
from src.utils.config import config
from src.utils.helpers import setup_logger
from sqlalchemy.orm import Session
import logging
logger = setup_logger(__name__)
from ...models.api_token import APIToken
logger = logging.getLogger(__name__)
class AuthMixin:
def login(self):
def login(self, db: Session):
"""Login to Garmin Connect, returning status instead of raising exceptions."""
logger.info(f"Starting login for: {self.username}")
try:
# result1 is status, result2 is the mfa_state dict or tokens
result1, result2 = garth.login(self.username, self.password, return_on_mfa=True)
if result1 == "needs_mfa":
logger.info("MFA required for Garmin authentication.")
self.initiate_mfa(result2) # Fixed below
return "mfa_required"
self.update_tokens(result1, result2)
garth.login(self.username, self.password)
self.update_tokens(db, garth.client.oauth1_token, garth.client.oauth2_token)
self.is_connected = True
return "success"
except GarthException as e:
if "needs-mfa" in str(e).lower():
logger.info("MFA required for Garmin authentication.")
self.initiate_mfa(db, garth.client.mfa_state)
return "mfa_required"
logger.error(f"Login failed: {e}")
return "error"
def update_tokens(self, oauth1, oauth2):
def update_tokens(self, db: Session, oauth1: dict, oauth2: dict):
"""Saves the Garmin OAuth tokens to the database."""
logger.info(f"Updating Garmin tokens for user: {self.username}")
db_manager = PostgreSQLManager(config.DATABASE_URL)
with db_manager.get_db_session() as session:
token_record = session.query(APIToken).filter_by(token_type='garmin').first()
if not token_record:
token_record = APIToken(token_type='garmin')
session.add(token_record)
token_record.garth_oauth1_token = json.dumps(oauth1)
token_record.garth_oauth2_token = json.dumps(oauth2)
token_record.updated_at = datetime.now()
# Clear MFA state as it's no longer needed
token_record.mfa_state = None
token_record.mfa_expires_at = None
session.commit()
logger.info("Garmin tokens updated successfully.")
token_record = db.query(APIToken).filter_by(token_type='garmin').first()
if not token_record:
token_record = APIToken(token_type='garmin')
db.add(token_record)
token_record.garth_oauth1_token = json.dumps(oauth1)
token_record.garth_oauth2_token = json.dumps(oauth2)
token_record.updated_at = datetime.now()
token_record.mfa_state = None
token_record.mfa_expires_at = None
db.commit()
logger.info("Garmin tokens updated successfully.")
def initiate_mfa(self, mfa_state):
def initiate_mfa(self, db: Session, mfa_state: dict):
"""Saves ONLY serializable parts of the MFA state to the database."""
logger.info(f"Initiating MFA process for user: {self.username}")
# FIX: Extract serializable data. We cannot dump the 'client' object directly.
serializable_state = {
"signin_params": mfa_state["signin_params"],
"cookies": mfa_state["client"].sess.cookies.get_dict(),
"domain": mfa_state["client"].domain
}
db_manager = PostgreSQLManager(config.DATABASE_URL)
with db_manager.get_db_session() as session:
token_record = session.query(APIToken).filter_by(token_type='garmin').first()
if not token_record:
token_record = APIToken(token_type='garmin')
session.add(token_record)
# Save the dictionary as a string
token_record.mfa_state = json.dumps(serializable_state)
token_record.mfa_expires_at = datetime.now() + timedelta(minutes=10)
session.commit()
token_record = db.query(APIToken).filter_by(token_type='garmin').first()
if not token_record:
token_record = APIToken(token_type='garmin')
db.add(token_record)
token_record.mfa_state = json.dumps(serializable_state)
token_record.mfa_expires_at = datetime.now() + timedelta(minutes=10)
db.commit()
def handle_mfa(self, verification_code: str, session_id: str = None):
def handle_mfa(self, db: Session, verification_code: str):
"""Reconstructs the Garth state and completes authentication."""
db_manager = PostgreSQLManager(config.DATABASE_URL)
with db_manager.get_db_session() as session:
token_record = session.query(APIToken).filter_by(token_type='garmin').first()
if not token_record or not token_record.mfa_state:
raise Exception("No pending MFA session found.")
saved_data = json.loads(token_record.mfa_state)
# FIX: Reconstruct the Garth Client and State object
from garth.http import Client
client = Client(domain=saved_data["domain"])
client.sess.cookies.update(saved_data["cookies"])
mfa_state = {
"client": client,
"signin_params": saved_data["signin_params"]
}
try:
oauth1, oauth2 = garth.resume_login(mfa_state, verification_code)
self.update_tokens(oauth1, oauth2)
# ... rest of your session cleanup ...
return True
except GarthException as e:
logger.error(f"MFA handling failed: {e}")
raise
token_record = db.query(APIToken).filter_by(token_type='garmin').first()
if not token_record or not token_record.mfa_state:
raise Exception("No pending MFA session found.")
saved_data = json.loads(token_record.mfa_state)
from garth.http import Client
client = Client(domain=saved_data["domain"])
client.sess.cookies.update(saved_data["cookies"])
mfa_state = {
"client": client,
"signin_params": saved_data["signin_params"]
}
try:
garth.resume_login(mfa_state, verification_code)
self.update_tokens(db, garth.client.oauth1_token, garth.client.oauth2_token)
return True
except GarthException as e:
logger.error(f"MFA handling failed: {e}")
raise

View File

@@ -1,9 +1,9 @@
import garth
from src.utils.helpers import setup_logger
import logging
from .auth import AuthMixin
from .data import DataMixin
logger = setup_logger(__name__)
logger = logging.getLogger(__name__)
class GarminClient(AuthMixin, DataMixin):
def __init__(self, username: str = None, password: str = None, is_china: bool = False):
@@ -13,10 +13,7 @@ class GarminClient(AuthMixin, DataMixin):
self.garmin_client = None
self.is_connected = False
logger.debug(f"Initializing GarminClient for user: {username}, is_china: {is_china}")
if is_china:
logger.debug("Configuring garth for China domain")
garth.configure(domain="garmin.cn")
if username and password:

View File

@@ -1,139 +1,75 @@
from datetime import datetime
from datetime import datetime, timedelta
from typing import List, Dict, Any, Optional
from src.utils.helpers import setup_logger
import garth
from garth.stats.steps import DailySteps
from garth.stats.hrv import DailyHRV
from garth.data.sleep import SleepData
import logging
logger = logging.getLogger(__name__)
logger = setup_logger(__name__)
class DataMixin:
def upload_weight(self, weight: float, unit: str = 'kg', timestamp: datetime = None) -> bool:
"""Upload weight entry to Garmin Connect."""
if not self.is_connected:
raise Exception("Not connected to Garmin Connect")
try:
if not timestamp:
timestamp = datetime.now()
try:
result = self.garmin_client.add_body_composition(
timestamp=timestamp,
weight=weight
)
except Exception:
try:
result = self.garmin_client.add_body_composition(
timestamp=timestamp.isoformat(),
weight=weight
)
except Exception:
result = self.garmin_client.add_body_composition(
timestamp=timestamp.strftime('%Y-%m-%d'),
weight=weight
)
logger.info(f"Successfully uploaded weight: {weight} {unit} at {timestamp}")
return result is not None
except Exception as e:
logger.error(f"Error uploading weight to Garmin: {str(e)}")
if "401" in str(e) or "unauthorized" in str(e).lower():
logger.error("Authentication failed - need to re-authenticate")
raise Exception("Authentication expired, needs re-authentication")
raise e
"""
Mixin for Garmin data fetching operations using the garth library.
Assumes that the global garth client has been authenticated.
"""
def get_activities(self, start_date: str, end_date: str = None, limit: int = 100) -> List[Dict[str, Any]]:
def get_activities(self, start_date: str, end_date: str, limit: int = 100) -> List[Dict[str, Any]]:
"""Fetch activity list from Garmin Connect."""
if not self.is_connected:
raise Exception("Not connected to Garmin Connect")
logger.info(f"Fetching activities from {start_date} to {end_date}")
try:
if not end_date:
end_date = datetime.now().strftime('%Y-%m-%d')
activities = self.garmin_client.get_activities(start_date, end_date)
logger.info(f"Fetched {len(activities)} activities from Garmin")
return activities
return garth.client.connectapi(
"/activitylist-service/activities/search/activities",
params={"startDate": start_date, "endDate": end_date, "limit": limit}
)
except Exception as e:
logger.error(f"Error fetching activities from Garmin: {str(e)}")
raise e
logger.error(f"Error fetching activities from Garmin: {e}")
raise
def download_activity(self, activity_id: str, file_type: str = 'tcx') -> Optional[bytes]:
"""Download activity file from Garmin Connect and return its content."""
if not self.is_connected:
raise Exception("Not connected to Garmin Connect")
def download_activity(self, activity_id: str, file_type: str = 'original') -> Optional[bytes]:
"""
Download an activity file from Garmin Connect.
'file_type' can be 'tcx', 'gpx', 'fit', or 'original'.
"""
logger.info(f"Downloading activity {activity_id} as {file_type}")
try:
file_content = self.garmin_client.get_activity_details(activity_id)
logger.info(f"Downloaded activity {activity_id} as {file_type} format")
return file_content if file_content else b""
path = f"/download-service/export/{file_type}/activity/{activity_id}"
return garth.client.download(path)
except Exception as e:
logger.error(f"Error downloading activity {activity_id} from Garmin: {str(e)}")
raise e
logger.error(f"Error downloading activity {activity_id} as {file_type}: {e}")
return None
def get_heart_rates(self, start_date: str, end_date: str = None) -> Dict[str, Any]:
"""Fetch heart rate data from Garmin Connect."""
if not self.is_connected:
raise Exception("Not connected to Garmin Connect")
try:
if not end_date:
end_date = datetime.now().strftime('%Y-%m-%d')
heart_rates = self.garmin_client.get_heart_rates(start_date, end_date)
logger.info(f"Fetched heart rate data from Garmin for {start_date} to {end_date}")
return heart_rates
except Exception as e:
logger.error(f"Error fetching heart rate data from Garmin: {str(e)}")
raise e
def get_daily_metrics(self, start_date: str, end_date: str) -> Dict[str, List[Dict]]:
"""
Fetch various daily metrics for a given date range.
"""
start = datetime.strptime(start_date, '%Y-%m-%d').date()
end = datetime.strptime(end_date, '%Y-%m-%d').date()
days = (end - start).days + 1
def get_sleep_data(self, start_date: str, end_date: str = None) -> Dict[str, Any]:
"""Fetch sleep data from Garmin Connect."""
if not self.is_connected:
raise Exception("Not connected to Garmin Connect")
try:
if not end_date:
end_date = datetime.now().strftime('%Y-%m-%d')
sleep_data = self.garmin_client.get_sleep_data(start_date, end_date)
logger.info(f"Fetched sleep data from Garmin for {start_date} to {end_date}")
return sleep_data
except Exception as e:
logger.error(f"Error fetching sleep data from Garmin: {str(e)}")
raise e
all_metrics = {
"steps": [],
"hrv": [],
"sleep": []
}
def get_steps_data(self, start_date: str, end_date: str = None) -> Dict[str, Any]:
"""Fetch steps data from Garmin Connect."""
if not self.is_connected:
raise Exception("Not connected to Garmin Connect")
try:
if not end_date:
end_date = datetime.now().strftime('%Y-%m-%d')
steps_data = self.garmin_client.get_steps_data(start_date, end_date)
logger.info(f"Fetched steps data from Garmin for {start_date} to {end_date}")
return steps_data
logger.info(f"Fetching daily steps for {days} days ending on {end_date}")
all_metrics["steps"] = DailySteps.list(end, period=days)
except Exception as e:
logger.error(f"Error fetching steps data from Garmin: {str(e)}")
raise e
logger.error(f"Error fetching daily steps: {e}")
def get_all_metrics(self, start_date: str, end_date: str = None) -> Dict[str, Any]:
"""Fetch all available metrics from Garmin Connect."""
if not self.is_connected:
raise Exception("Not connected to Garmin Connect")
try:
if not end_date:
end_date = datetime.now().strftime('%Y-%m-%d')
metrics = {
'heart_rates': self.get_heart_rates(start_date, end_date),
'sleep_data': self.get_sleep_data(start_date, end_date),
'steps_data': self.get_steps_data(start_date, end_date),
}
logger.info(f"Fetched all metrics from Garmin for {start_date} to {end_date}")
return metrics
logger.info(f"Fetching daily HRV for {days} days ending on {end_date}")
all_metrics["hrv"] = DailyHRV.list(end, period=days)
except Exception as e:
logger.error(f"Error fetching all metrics from Garmin: {str(e)}")
raise e
logger.error(f"Error fetching daily HRV: {e}")
try:
logger.info(f"Fetching daily sleep for {days} days ending on {end_date}")
all_metrics["sleep"] = SleepData.list(end, days=days)
except Exception as e:
logger.error(f"Error fetching daily sleep: {e}")
return all_metrics

View File

@@ -1,322 +1,182 @@
from ..models.weight_record import WeightRecord
from ..models.activity import Activity
from ..models.health_metric import HealthMetric
from ..models.sync_log import SyncLog
from ..services.fitbit_client import FitbitClient
from ..services.garmin.client import GarminClient
from sqlalchemy.orm import Session
from datetime import datetime, timedelta
from typing import Dict
import logging
from ..utils.helpers import setup_logger
logger = setup_logger(__name__)
logger = logging.getLogger(__name__)
class SyncApp:
def __init__(self, db_session: Session, fitbit_client: FitbitClient, garmin_client: GarminClient):
def __init__(self, db_session: Session, garmin_client: GarminClient, fitbit_client=None):
self.db_session = db_session
self.fitbit_client = fitbit_client
self.garmin_client = garmin_client
def sync_weight_data(self, start_date: str = None, end_date: str = None) -> Dict[str, int]:
"""Sync weight data from Fitbit to Garmin."""
if not start_date:
# Default to 1 year back
start_date = (datetime.now() - timedelta(days=365)).strftime('%Y-%m-%d')
if not end_date:
end_date = datetime.now().strftime('%Y-%m-%d')
# Create a sync log entry
sync_log = SyncLog(
operation="weight_sync",
status="started",
start_time=datetime.now(),
records_processed=0,
records_failed=0
)
self.db_session.add(sync_log)
self.db_session.commit()
try:
# Fetch unsynced weight records from Fitbit
fitbit_weights = self.fitbit_client.get_weight_logs(start_date, end_date)
# Track processing results
processed_count = 0
failed_count = 0
for weight_entry in fitbit_weights:
try:
# Check if this weight entry already exists in our DB (prevents duplicates)
fitbit_id = weight_entry.get('logId', str(weight_entry.get('date', '') + str(weight_entry.get('weight', 0))))
existing_record = self.db_session.query(WeightRecord).filter(
WeightRecord.fitbit_id == fitbit_id
).first()
if existing_record and existing_record.sync_status == 'synced':
# Skip if already synced
continue
# Create or update weight record
if not existing_record:
weight_record = WeightRecord(
fitbit_id=fitbit_id,
weight=weight_entry.get('weight'),
unit=weight_entry.get('unit', 'kg'),
date=datetime.fromisoformat(weight_entry.get('date')) if isinstance(weight_entry.get('date'), str) else weight_entry.get('date'),
timestamp=datetime.fromisoformat(weight_entry.get('date')) if isinstance(weight_entry.get('date'), str) else weight_entry.get('date'),
sync_status='unsynced'
)
self.db_session.add(weight_record)
self.db_session.flush() # Get the ID
else:
weight_record = existing_record
# Upload to Garmin if not already synced
if weight_record.sync_status != 'synced':
# Upload weight to Garmin
success = self.garmin_client.upload_weight(
weight=weight_record.weight,
unit=weight_record.unit,
timestamp=weight_record.timestamp
)
if success:
weight_record.sync_status = 'synced'
weight_record.garmin_id = "garmin_" + fitbit_id # Placeholder for Garmin ID
else:
weight_record.sync_status = 'failed'
failed_count += 1
processed_count += 1
except Exception as e:
logger.error(f"Error processing weight entry: {str(e)}")
failed_count += 1
# Update sync log with results
sync_log.status = "completed" if failed_count == 0 else "completed_with_errors"
sync_log.end_time = datetime.now()
sync_log.records_processed = processed_count
sync_log.records_failed = failed_count
self.db_session.commit()
logger.info(f"Weight sync completed: {processed_count} processed, {failed_count} failed")
return {
"processed": processed_count,
"failed": failed_count
}
except Exception as e:
logger.error(f"Error during weight sync: {str(e)}")
# Update sync log with error status
sync_log.status = "failed"
sync_log.end_time = datetime.now()
sync_log.message = str(e)
self.db_session.commit()
raise e
self.fitbit_client = fitbit_client
self.logger = logging.getLogger(f"{__name__}.{self.__class__.__name__}")
self.logger.info("SyncApp initialized")
def sync_activities(self, days_back: int = 30) -> Dict[str, int]:
"""Sync activity data from Garmin to local storage."""
self.logger.info(f"=== Starting sync_activities with days_back={days_back} ===")
start_date = (datetime.now() - timedelta(days=days_back)).strftime('%Y-%m-%d')
end_date = datetime.now().strftime('%Y-%m-%d')
# Create a sync log entry
sync_log = SyncLog(
operation="activity_archive",
status="started",
start_time=datetime.now(),
records_processed=0,
records_failed=0
)
self.logger.info(f"Date range: {start_date} to {end_date}")
sync_log = SyncLog(operation="activity_sync", status="started", start_time=datetime.now())
self.db_session.add(sync_log)
self.db_session.commit()
processed_count = 0
failed_count = 0
try:
# Fetch activities from Garmin
self.logger.info("Fetching activities from Garmin...")
garmin_activities = self.garmin_client.get_activities(start_date, end_date)
self.logger.info(f"Successfully fetched {len(garmin_activities)} activities from Garmin")
processed_count = 0
failed_count = 0
from ..models.activity import Activity
for activity in garmin_activities:
for activity_data in garmin_activities:
activity_id = str(activity_data.get('activityId'))
if not activity_id:
self.logger.warning("Skipping activity with no ID.")
continue
try:
activity_id = str(activity.get('activityId', ''))
existing_activity = self.db_session.query(Activity).filter(
Activity.garmin_activity_id == activity_id
).first()
existing_activity = self.db_session.query(Activity).filter_by(garmin_activity_id=activity_id).first()
if existing_activity and existing_activity.download_status == 'downloaded':
# Skip if already downloaded
continue
# Create or update activity record
if not existing_activity:
activity_record = Activity(
activity_type_dict = activity_data.get('activityType', {})
existing_activity = Activity(
garmin_activity_id=activity_id,
activity_name=activity.get('activityName', ''),
activity_type=activity.get('activityType', ''),
start_time=datetime.fromisoformat(activity.get('startTimeLocal', '')) if activity.get('startTimeLocal') else None,
duration=activity.get('duration', 0),
activity_name=activity_data.get('activityName'),
activity_type=activity_type_dict.get('typeKey', 'unknown'),
start_time=datetime.fromisoformat(activity_data.get('startTimeLocal')) if activity_data.get('startTimeLocal') else None,
duration=activity_data.get('duration', 0),
download_status='pending'
)
self.db_session.add(activity_record)
self.db_session.flush()
else:
activity_record = existing_activity
self.db_session.add(existing_activity)
# Download activity file if not already downloaded
if activity_record.download_status != 'downloaded':
# Download in various formats
file_formats = ['tcx', 'gpx', 'fit']
if existing_activity.download_status != 'downloaded':
downloaded_successfully = False
for fmt in file_formats:
try:
# Get file content from Garmin client
file_content = self.garmin_client.download_activity(activity_id, file_type=fmt)
if file_content:
# Store file content directly in the database
activity_record.file_content = file_content
activity_record.file_type = fmt
activity_record.download_status = 'downloaded'
activity_record.downloaded_at = datetime.now()
downloaded_successfully = True
break
except Exception as e:
logger.warning(f"Could not download activity {activity_id} in {fmt} format: {str(e)}")
continue
for fmt in ['original', 'tcx', 'gpx', 'fit']:
file_content = self.garmin_client.download_activity(activity_id, file_type=fmt)
if file_content:
existing_activity.file_content = file_content
existing_activity.file_type = fmt
existing_activity.download_status = 'downloaded'
existing_activity.downloaded_at = datetime.now()
self.logger.info(f"✓ Successfully downloaded {activity_id} as {fmt}")
downloaded_successfully = True
break
if not downloaded_successfully:
activity_record.download_status = 'failed'
existing_activity.download_status = 'failed'
self.logger.warning(f"✗ Failed to download {activity_id}")
failed_count += 1
processed_count += 1
else:
processed_count += 1
else:
self.logger.info(f"Activity {activity_id} already downloaded. Skipping.")
self.db_session.commit()
except Exception as e:
logger.error(f"Error processing activity {activity.get('activityId', '')}: {str(e)}")
self.logger.error(f"Error processing activity {activity_id}: {e}", exc_info=True)
failed_count += 1
self.db_session.rollback()
# Update sync log with results
sync_log.status = "completed" if failed_count == 0 else "completed_with_errors"
sync_log.end_time = datetime.now()
sync_log.status = "completed_with_errors" if failed_count > 0 else "completed"
sync_log.records_processed = processed_count
sync_log.records_failed = failed_count
self.db_session.commit()
logger.info(f"Activity sync completed: {processed_count} processed, {failed_count} failed")
return {
"processed": processed_count,
"failed": failed_count
}
except Exception as e:
logger.error(f"Error during activity sync: {str(e)}")
# Update sync log with error status
self.logger.error(f"Major error during activity sync: {e}", exc_info=True)
sync_log.status = "failed"
sync_log.end_time = datetime.now()
sync_log.message = str(e)
self.db_session.commit()
raise e
def sync_health_metrics(self, start_date: str = None, end_date: str = None) -> Dict[str, int]:
"""Sync health metrics from Garmin to local database."""
if not start_date:
# Default to 1 year back
start_date = (datetime.now() - timedelta(days=365)).strftime('%Y-%m-%d')
if not end_date:
end_date = datetime.now().strftime('%Y-%m-%d')
# Create a sync log entry
sync_log = SyncLog(
operation="metrics_download",
status="started",
start_time=datetime.now(),
records_processed=0,
records_failed=0
)
self.db_session.add(sync_log)
sync_log.end_time = datetime.now()
self.db_session.commit()
self.logger.info(f"=== Finished sync_activities: processed={processed_count}, failed={failed_count} ===")
return {"processed": processed_count, "failed": failed_count}
def sync_health_metrics(self, days_back: int = 30) -> Dict[str, int]:
"""Sync health metrics from Garmin to local database."""
start_date = (datetime.now() - timedelta(days=days_back)).strftime('%Y-%m-%d')
end_date = datetime.now().strftime('%Y-%m-%d')
self.logger.info(f"=== Starting sync_health_metrics with days_back={days_back} ===")
sync_log = SyncLog(operation="health_metric_sync", status="started", start_time=datetime.now())
self.db_session.add(sync_log)
self.db_session.commit()
processed_count = 0
failed_count = 0
try:
# Fetch all metrics from Garmin
all_metrics = self.garmin_client.get_all_metrics(start_date, end_date)
processed_count = 0
failed_count = 0
from ..models.health_metric import HealthMetric
# Process heart rate data
heart_rates = all_metrics.get('heart_rates', {})
if 'heartRateValues' in heart_rates:
for hr_data in heart_rates['heartRateValues']:
try:
timestamp = datetime.fromisoformat(hr_data[0]) if isinstance(hr_data[0], str) else datetime.fromtimestamp(hr_data[0]/1000)
metric = HealthMetric(
metric_type='heart_rate',
metric_value=hr_data[1],
unit='bpm',
timestamp=timestamp,
date=timestamp.date(),
source='garmin',
detailed_data=None
)
self.db_session.add(metric)
processed_count += 1
except Exception as e:
logger.error(f"Error processing heart rate data: {str(e)}")
failed_count += 1
# Process other metrics similarly...
# For brevity, I'll show just one more example
sleep_data = all_metrics.get('sleep_data', {})
sleep_levels = sleep_data.get('sleep', [])
for sleep_entry in sleep_levels:
daily_metrics = self.garmin_client.get_daily_metrics(start_date, end_date)
for steps_data in daily_metrics.get("steps", []):
try:
metric = HealthMetric(
metric_type='sleep',
metric_value=sleep_entry.get('duration', 0),
unit='minutes',
timestamp=datetime.now(), # Actual timestamp would come from data
date=datetime.now().date(), # Actual date would come from data
source='garmin',
detailed_data=sleep_entry
)
self.db_session.add(metric)
self._update_or_create_metric('steps', steps_data.calendar_date, steps_data.total_steps, 'steps')
processed_count += 1
except Exception as e:
logger.error(f"Error processing sleep data: {str(e)}")
self.logger.error(f"Error processing steps data: {e}", exc_info=True)
failed_count += 1
# Update sync log with results
sync_log.status = "completed" if failed_count == 0 else "completed_with_errors"
sync_log.end_time = datetime.now()
for hrv_data in daily_metrics.get("hrv", []):
try:
self._update_or_create_metric('hrv', hrv_data.calendar_date, hrv_data.last_night_avg, 'ms')
processed_count += 1
except Exception as e:
self.logger.error(f"Error processing HRV data: {e}", exc_info=True)
failed_count += 1
for sleep_data in daily_metrics.get("sleep", []):
try:
self._update_or_create_metric('sleep', sleep_data.daily_sleep_dto.calendar_date, sleep_data.daily_sleep_dto.sleep_time_seconds, 'seconds')
processed_count += 1
except Exception as e:
self.logger.error(f"Error processing sleep data: {e}", exc_info=True)
failed_count += 1
sync_log.status = "completed_with_errors" if failed_count > 0 else "completed"
sync_log.records_processed = processed_count
sync_log.records_failed = failed_count
self.db_session.commit()
logger.info(f"Health metrics sync completed: {processed_count} processed, {failed_count} failed")
return {
"processed": processed_count,
"failed": failed_count
}
except Exception as e:
logger.error(f"Error during health metrics sync: {str(e)}")
# Update sync log with error status
self.logger.error(f"Major error during health metrics sync: {e}", exc_info=True)
sync_log.status = "failed"
sync_log.end_time = datetime.now()
sync_log.message = str(e)
sync_log.end_time = datetime.now()
self.db_session.commit()
self.logger.info(f"=== Finished sync_health_metrics: processed={processed_count}, failed={failed_count} ===")
return {"processed": processed_count, "failed": failed_count}
def _update_or_create_metric(self, metric_type: str, date: datetime.date, value: float, unit: str):
"""Helper to update or create a health metric record."""
try:
existing = self.db_session.query(HealthMetric).filter_by(metric_type=metric_type, date=date).first()
if existing:
existing.metric_value = value
existing.updated_at = datetime.now()
else:
metric = HealthMetric(
metric_type=metric_type,
metric_value=value,
unit=unit,
timestamp=datetime.combine(date, datetime.min.time()),
date=date,
source='garmin'
)
self.db_session.add(metric)
self.db_session.commit()
raise e
except Exception as e:
self.logger.error(f"Error saving metric {metric_type} for {date}: {e}", exc_info=True)
self.db_session.rollback()
raise

View File

@@ -3,21 +3,6 @@ from datetime import datetime
from typing import Optional
import os
def setup_logger(name: str, level=logging.DEBUG):
"""Function to setup a logger that writes to the console."""
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s')
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
logger = logging.getLogger(name)
logger.setLevel(level)
if not logger.handlers:
logger.addHandler(console_handler)
return logger
def get_current_timestamp() -> str:
"""Get current timestamp in ISO format."""
return datetime.utcnow().isoformat()
@@ -33,4 +18,4 @@ def validate_environment_vars(required_vars: list) -> bool:
print(f"Missing required environment variables: {', '.join(missing_vars)}")
return False
return True
return True

View File

@@ -0,0 +1,28 @@
import logging
import logging.config
LOGGING_CONFIG = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"default": {
"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s",
},
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
"formatter": "default",
"stream": "ext://sys.stdout",
},
},
"root": {
"level": "INFO",
"handlers": ["console"],
},
}
def setup_logging():
"""Setup logging configuration."""
logging.config.dictConfig(LOGGING_CONFIG)

View File

@@ -10,18 +10,20 @@
<div class="container mt-5">
<h1>Fitbit-Garmin Sync Dashboard</h1>
<div class="row mb-4">
<div class="col-md-4">
<div class="card">
<div class="card-body">
<h5 class="card-title">Weight Records</h5>
<p class="card-text">Total: <span id="total-weights">0</span></p>
<p class="card-text">Synced: <span id="synced-weights">0</span></p>
<p class="card-text">Unsynced: <span id="unsynced-weights">0</span></p>
</div>
<!-- Toast container for notifications -->
<div class="toast-container position-fixed bottom-0 end-0 p-3">
<div id="appToast" class="toast" role="alert" aria-live="assertive" aria-atomic="true">
<div class="toast-header">
<strong class="me-auto" id="toast-title">Notification</strong>
<button type="button" class="btn-close" data-bs-dismiss="toast" aria-label="Close"></button>
</div>
<div class="toast-body" id="toast-body">
</div>
</div>
<div class="col-md-4">
</div>
<div class="row mb-4">
<div class="col-md-6">
<div class="card">
<div class="card-body">
<h5 class="card-title">Activities</h5>
@@ -30,13 +32,13 @@
</div>
</div>
</div>
<div class="col-md-4">
<div class="col-md-6">
<div class="card">
<div class="card-body">
<h5 class="card-title">Sync Status</h5>
<h5 class="card-title">Sync Controls</h5>
<div class="d-grid gap-2">
<button class="btn btn-primary" type="button" id="sync-weight-btn">Sync Weight</button>
<button class="btn btn-secondary" type="button" id="sync-activities-btn">Sync Activities</button>
<button class="btn btn-primary" type="button" id="sync-activities-btn">Sync Activities</button>
<button class="btn btn-info" type="button" id="sync-metrics-btn">Sync Health Metrics</button>
</div>
</div>
</div>
@@ -53,13 +55,15 @@
<th>Operation</th>
<th>Status</th>
<th>Start Time</th>
<th>Records Processed</th>
<th>Records Failed</th>
<th>End Time</th>
<th>Processed</th>
<th>Failed</th>
<th>Message</th>
</tr>
</thead>
<tbody>
<tr>
<td colspan="5">Loading logs...</td>
<td colspan="7">Loading logs...</td>
</tr>
</tbody>
</table>
@@ -69,71 +73,11 @@
<div class="row mt-5">
<div class="col-md-12">
<h3>Health Metrics</h3>
<h3>Actions</h5>
<div class="card">
<div class="card-body">
<div class="d-grid gap-2 d-md-flex justify-content-md-start">
<button class="btn btn-info me-md-2" type="button" id="sync-metrics-btn">Sync Health Metrics</button>
<button class="btn btn-outline-info me-md-2" type="button" id="view-metrics-btn">View Health Data Summary</button>
<button class="btn btn-outline-info" type="button" id="query-metrics-btn">Query Metrics</button>
</div>
</div>
</div>
</div>
</div>
<div class="row mt-5">
<div class="col-md-12">
<h3>Activity Files</h3>
<div class="card">
<div class="card-body">
<div class="d-grid gap-2 d-md-flex justify-content-md-start">
<button class="btn btn-outline-secondary me-md-2" type="button" id="list-activities-btn">List Stored Activities</button>
<button class="btn btn-outline-secondary" type="button" id="download-activities-btn">Download Activity File</button>
</div>
</div>
</div>
</div>
</div>
<div class="row mt-5">
<div class="col-md-12">
<div class="card">
<div class="card-body">
<div class="d-grid gap-2 d-md-flex justify-content-md-start">
<a href="/setup" class="btn btn-primary me-md-2">Setup & Configuration</a>
<a href="/docs" class="btn btn-outline-secondary" target="_blank">API Documentation</a>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="row mt-5">
<div class="col-md-12">
<h3>Health Metrics</h3>
<div class="card">
<div class="card-body">
<div class="d-grid gap-2 d-md-flex justify-content-md-start">
<button class="btn btn-info me-md-2" type="button" id="sync-metrics-btn">Sync Health Metrics</button>
<button class="btn btn-outline-info me-md-2" type="button" id="view-metrics-btn">View Health Data Summary</button>
<button class="btn btn-outline-info" type="button" id="query-metrics-btn">Query Metrics</button>
</div>
</div>
</div>
</div>
</div>
<div class="row mt-5">
<div class="col-md-12">
<h3>Activity Files</h3>
<div class="card">
<div class="card-body">
<div class="d-grid gap-2 d-md-flex justify-content-md-start">
<button class="btn btn-outline-secondary me-md-2" type="button" id="list-activities-btn">List Stored Activities</button>
<button class="btn btn-outline-secondary" type="button" id="download-activities-btn">Download Activity File</button>
</div>
<a href="/setup" class="btn btn-primary me-md-2">Setup & Configuration</a>
<a href="/docs" class="btn btn-outline-secondary" target="_blank">API Documentation</a>
</div>
</div>
</div>
@@ -142,167 +86,124 @@
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js"></script>
<script>
// Load dashboard data when page loads
let toastInstance = null;
document.addEventListener('DOMContentLoaded', function() {
const toastEl = document.getElementById('appToast');
toastInstance = new bootstrap.Toast(toastEl);
loadDashboardData();
// Set up sync buttons
document.getElementById('sync-weight-btn').addEventListener('click', syncWeight);
document.getElementById('sync-activities-btn').addEventListener('click', syncActivities);
// Set up metrics buttons
document.getElementById('sync-metrics-btn').addEventListener('click', syncHealthMetrics);
document.getElementById('view-metrics-btn').addEventListener('click', viewHealthSummary);
document.getElementById('query-metrics-btn').addEventListener('click', queryMetrics);
// Set up activity file buttons
document.getElementById('list-activities-btn').addEventListener('click', listActivities);
document.getElementById('download-activities-btn').addEventListener('click', downloadActivityFile);
});
function showToast(title, body, level = 'info') {
const toastTitle = document.getElementById('toast-title');
const toastBody = document.getElementById('toast-body');
const toastHeader = document.querySelector('.toast-header');
toastTitle.textContent = title;
toastBody.textContent = body;
// Reset header color
toastHeader.classList.remove('bg-success', 'bg-danger', 'bg-warning', 'bg-info', 'text-white');
if (level === 'success') {
toastHeader.classList.add('bg-success', 'text-white');
} else if (level === 'error') {
toastHeader.classList.add('bg-danger', 'text-white');
} else if (level === 'warning') {
toastHeader.classList.add('bg-warning');
} else {
toastHeader.classList.add('bg-info', 'text-white');
}
toastInstance.show();
}
async function loadDashboardData() {
try {
const response = await fetch('/api/status');
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const data = await response.json();
document.getElementById('total-weights').textContent = data.total_weight_records;
document.getElementById('synced-weights').textContent = data.synced_weight_records;
document.getElementById('unsynced-weights').textContent = data.unsynced_weight_records;
document.getElementById('total-activities').textContent = data.total_activities;
document.getElementById('downloaded-activities').textContent = data.downloaded_activities;
// Update logs table
const logsBody = document.querySelector('#sync-logs-table tbody');
logsBody.innerHTML = '';
if (data.recent_logs.length === 0) {
logsBody.innerHTML = '<tr><td colspan="7">No recent sync logs.</td></tr>';
return;
}
data.recent_logs.forEach(log => {
const row = document.createElement('tr');
row.innerHTML = `
<td>${log.operation}</td>
<td>${log.status}</td>
<td>${log.start_time}</td>
<td><span class="badge bg-${log.status === 'completed' ? 'success' : 'warning'}">${log.status}</span></td>
<td>${new Date(log.start_time).toLocaleString()}</td>
<td>${log.end_time ? new Date(log.end_time).toLocaleString() : 'N/A'}</td>
<td>${log.records_processed}</td>
<td>${log.records_failed}</td>
<td>${log.message || ''}</td>
`;
logsBody.appendChild(row);
});
} catch (error) {
console.error('Error loading dashboard data:', error);
}
}
async function syncWeight() {
try {
const response = await fetch('/api/sync/weight', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
}
});
const data = await response.json();
alert(`Weight sync initiated: ${data.message}`);
// Refresh dashboard data
loadDashboardData();
} catch (error) {
console.error('Error syncing weight:', error);
alert('Error initiating weight sync: ' + error.message);
showToast('Error', 'Could not load dashboard data.', 'error');
}
}
async function syncActivities() {
showToast('Syncing...', 'Activity sync has been initiated.', 'info');
try {
const response = await fetch('/api/sync/activities', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ days_back: 30 })
});
if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.detail || `HTTP error! status: ${response.status}`);
}
const data = await response.json();
alert(`Activity sync initiated: ${data.message}`);
// Refresh dashboard data
loadDashboardData();
showToast('Sync Complete', data.message, 'success');
loadDashboardData(); // Refresh data after sync
} catch (error) {
console.error('Error syncing activities:', error);
alert('Error initiating activity sync: ' + error.message);
showToast('Sync Error', `Activity sync failed: ${error.message}`, 'error');
}
}
async function syncHealthMetrics() {
showToast('Syncing...', 'Health metrics sync has been initiated.', 'info');
try {
const response = await fetch('/api/sync/metrics', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
}
headers: { 'Content-Type': 'application/json' }
});
if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.detail || `HTTP error! status: ${response.status}`);
}
const data = await response.json();
alert(`Health metrics sync initiated: ${data.message}`);
showToast('Sync Complete', data.message, 'success');
loadDashboardData(); // Refresh data after sync
} catch (error) {
console.error('Error syncing health metrics:', error);
alert('Error initiating health metrics sync: ' + error.message);
}
}
async function viewHealthSummary() {
try {
const response = await fetch('/api/health-data/summary');
const data = await response.json();
alert(`Health Summary:
Steps: ${data.total_steps || 0}
Avg Heart Rate: ${data.avg_heart_rate || 0}
Sleep Hours: ${data.total_sleep_hours || 0}
Avg Calories: ${data.avg_calories || 0}`);
} catch (error) {
console.error('Error fetching health summary:', error);
alert('Error fetching health summary: ' + error.message);
}
}
async function queryMetrics() {
try {
const response = await fetch('/api/metrics/query');
const data = await response.json();
alert(`Found ${data.length} health metrics`);
} catch (error) {
console.error('Error querying metrics:', error);
alert('Error querying metrics: ' + error.message);
}
}
async function listActivities() {
try {
const response = await fetch('/api/activities/list');
const data = await response.json();
alert(`Found ${data.length} stored activities`);
} catch (error) {
console.error('Error listing activities:', error);
alert('Error listing activities: ' + error.message);
}
}
async function downloadActivityFile() {
try {
// For demo purposes, we'll use a placeholder ID
// In a real implementation, this would prompt for activity ID or list available activities
const activityId = prompt('Enter activity ID to download:', '12345');
if (activityId) {
// This would initiate a download of the stored activity file
window.open(`/api/activities/download/${activityId}`, '_blank');
}
} catch (error) {
console.error('Error downloading activity file:', error);
alert('Error downloading activity file: ' + error.message);
showToast('Sync Error', `Health metrics sync failed: ${error.message}`, 'error');
}
}
</script>
</body>
</html>
</html>