This commit is contained in:
2025-09-08 12:51:15 -07:00
commit 574feb1ea1
62 changed files with 10425 additions and 0 deletions

70
backend/Dockerfile Normal file
View File

@@ -0,0 +1,70 @@
# Multi-stage build for container-first development
FROM python:3.11-slim-bullseye AS builder
# Set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
# Install system dependencies for building
RUN apt-get update && \
apt-get install -y --no-install-recommends gcc libpq-dev && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
# Set working directory
WORKDIR /app
# Install Python dependencies
COPY backend/requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Runtime stage
FROM python:3.11-slim-bullseye AS runtime
# Set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
# Install runtime system dependencies only
RUN apt-get update && \
apt-get install -y --no-install-recommends libpq5 && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
# Set working directory
WORKDIR /app
# Copy installed packages from builder stage
COPY --from=builder /usr/local/lib/python3.11/site-packages /usr/local/lib/python3.11/site-packages
COPY --from=builder /usr/local/bin /usr/local/bin
# Copy application code
COPY backend/ .
# Create entrypoint script for migration handling
RUN echo '#!/bin/bash\n\
set -e\n\
\n\
# Run database migrations\n\
echo "Running database migrations..."\n\
alembic upgrade head\n\
\n\
# Verify migration success\n\
echo "Verifying migration status..."\n\
alembic current\n\
\n\
# Start the application\n\
echo "Starting application..."\n\
exec "$@"' > /app/entrypoint.sh && \
chmod +x /app/entrypoint.sh
# Create non-root user
RUN useradd -m appuser && chown -R appuser:appuser /app
USER appuser
# Expose application port
EXPOSE 8000
# Use entrypoint for migration automation
ENTRYPOINT ["/app/entrypoint.sh"]
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]

24
backend/alembic.ini Normal file
View File

@@ -0,0 +1,24 @@
[alembic]
script_location = alembic
sqlalchemy.url = postgresql+asyncpg://appuser:password@db:5432/cyclingdb
[loggers]
keys = root
[handlers]
keys = console
[logger_root]
level = WARN
handlers = console
qualname =
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

56
backend/alembic/env.py Normal file
View File

@@ -0,0 +1,56 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config, pool
from sqlalchemy.ext.asyncio import AsyncEngine
from alembic import context
import sys
import os
# Add app directory to path
sys.path.append(os.getcwd())
# Import base and models
from app.models import Base
from app.database import DATABASE_URL
config = context.config
fileConfig(config.config_file_name)
target_metadata = Base.metadata
def run_migrations_offline():
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode."""
connectable = AsyncEngine(
engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
future=True,
url=DATABASE_URL,
)
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
async def do_run_migrations(connection):
context.configure(connection=connection, target_metadata=target_metadata)
await connection.run_sync(context.run_migrations)
if context.is_offline_mode():
run_migrations_offline()
else:
import asyncio
asyncio.run(run_migrations_online())

View File

@@ -0,0 +1,25 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

11
backend/app/config.py Normal file
View File

@@ -0,0 +1,11 @@
from pydantic_settings import BaseSettings
class Settings(BaseSettings):
DATABASE_URL: str
GPX_STORAGE_PATH: str
AI_MODEL: str = "openrouter/auto"
class Config:
env_file = ".env"
settings = Settings()

17
backend/app/database.py Normal file
View File

@@ -0,0 +1,17 @@
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
from sqlalchemy.orm import declarative_base, sessionmaker
DATABASE_URL = "postgresql+asyncpg://appuser:password@db:5432/cyclingdb"
engine = create_async_engine(DATABASE_URL, echo=True)
AsyncSessionLocal = sessionmaker(
bind=engine,
class_=AsyncSession,
expire_on_commit=False
)
Base = declarative_base()
async def get_db() -> AsyncSession:
async with AsyncSessionLocal() as session:
yield session

107
backend/app/main.py Normal file
View File

@@ -0,0 +1,107 @@
from fastapi import FastAPI, Depends, Request, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from .database import get_db, get_database_url
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import text
from alembic.config import Config
from alembic.migration import MigrationContext
from alembic.script import ScriptDirectory
from .routes import gpx as gpx_routes
from .routes import rule as rule_routes
from .routes import plan as plan_routes
from .routes import workouts as workout_routes
from .routes import prompts as prompt_routes
from .config import settings
app = FastAPI(
title="AI Cycling Coach API",
description="Backend service for AI-assisted cycling training platform",
version="0.1.0"
)
# API Key Authentication Middleware
@app.middleware("http")
async def api_key_auth(request: Request, call_next):
if request.url.path.startswith("/docs") or request.url.path.startswith("/redoc") or request.url.path == "/health":
return await call_next(request)
api_key = request.headers.get("X-API-KEY")
if api_key != settings.API_KEY:
raise HTTPException(status_code=401, detail="Invalid API Key")
return await call_next(request)
# Configure CORS
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Include routers
app.include_router(gpx_routes.router)
app.include_router(rule_routes.router)
app.include_router(plan_routes.router)
app.include_router(workout_routes.router, prefix="/workouts", tags=["workouts"])
app.include_router(prompt_routes.router, prefix="/prompts", tags=["prompts"])
async def check_migration_status():
"""Check if database migrations are up to date."""
try:
# Get Alembic configuration
config = Config("alembic.ini")
config.set_main_option("sqlalchemy.url", get_database_url())
script = ScriptDirectory.from_config(config)
# Get current database revision
from sqlalchemy import create_engine
engine = create_engine(get_database_url())
with engine.connect() as conn:
context = MigrationContext.configure(conn)
current_rev = context.get_current_revision()
# Get head revision
head_rev = script.get_current_head()
return {
"current_revision": current_rev,
"head_revision": head_rev,
"migrations_up_to_date": current_rev == head_rev
}
except Exception as e:
return {
"error": str(e),
"migrations_up_to_date": False
}
@app.get("/health")
async def health_check(db: AsyncSession = Depends(get_db)):
"""Enhanced health check with migration verification."""
health_status = {
"status": "healthy",
"version": "0.1.0",
"timestamp": "2024-01-15T10:30:00Z" # Should be dynamic
}
# Database connection check
try:
await db.execute(text("SELECT 1"))
health_status["database"] = "connected"
except Exception as e:
health_status["status"] = "unhealthy"
health_status["database"] = f"error: {str(e)}"
# Migration status check
migration_info = await check_migration_status()
health_status["migrations"] = migration_info
if not migration_info.get("migrations_up_to_date", False):
health_status["status"] = "unhealthy"
return health_status
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=8000)

View File

@@ -0,0 +1,11 @@
from .base import BaseModel
from .route import Route
from .section import Section
from .rule import Rule
from .plan import Plan
from .plan_rule import PlanRule
from .user import User
from .workout import Workout
from .analysis import Analysis
from .prompt import Prompt
from .garmin_sync_log import GarminSyncLog

View File

@@ -0,0 +1,17 @@
from sqlalchemy import Column, Integer, String, ForeignKey, JSON, Boolean, DateTime
from sqlalchemy.orm import relationship
from .base import BaseModel
class Analysis(BaseModel):
"""Analysis model for AI-generated workout feedback."""
__tablename__ = "analyses"
workout_id = Column(Integer, ForeignKey("workouts.id"), nullable=False)
analysis_type = Column(String(50), default='workout_review')
jsonb_feedback = Column(JSON) # AI-generated feedback
suggestions = Column(JSON) # AI-generated suggestions
approved = Column(Boolean, default=False)
# Relationships
workout = relationship("Workout", back_populates="analyses")

View File

@@ -0,0 +1,17 @@
from datetime import datetime
from uuid import UUID, uuid4
from sqlalchemy import Column, DateTime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.dialects.postgresql import UUID as PG_UUID
Base = declarative_base()
class BaseModel(Base):
__abstract__ = True
id = Column(PG_UUID(as_uuid=True), primary_key=True, default=uuid4)
created_at = Column(DateTime, default=datetime.utcnow)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
def __repr__(self):
return f"<{self.__class__.__name__} {self.id}>"

View File

@@ -0,0 +1,12 @@
from sqlalchemy import Column, Integer, DateTime, String, Text
from .base import BaseModel
class GarminSyncLog(BaseModel):
"""Log model for tracking Garmin sync operations."""
__tablename__ = "garmin_sync_log"
last_sync_time = Column(DateTime)
activities_synced = Column(Integer, default=0)
status = Column(String(20)) # success, error, in_progress
error_message = Column(Text)

View File

@@ -0,0 +1,14 @@
from sqlalchemy import Column, Integer, ForeignKey
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm import relationship
from .base import BaseModel
class Plan(BaseModel):
__tablename__ = "plans"
jsonb_plan = Column(JSONB, nullable=False)
version = Column(Integer, nullable=False)
parent_plan_id = Column(Integer, ForeignKey('plans.id'), nullable=True)
parent_plan = relationship("Plan", remote_side="Plan.id", backref="child_plans")
workouts = relationship("Workout", back_populates="plan", cascade="all, delete-orphan")

View File

@@ -0,0 +1,13 @@
from sqlalchemy import Column, Integer, String, Text, Boolean, DateTime
from .base import BaseModel
class Prompt(BaseModel):
"""Prompt model for AI prompt versioning and management."""
__tablename__ = "prompts"
action_type = Column(String(50), nullable=False) # plan_generation, workout_analysis, rule_parsing, suggestions
model = Column(String(100)) # AI model identifier
prompt_text = Column(Text, nullable=False)
version = Column(Integer, default=1)
active = Column(Boolean, default=True)

View File

@@ -0,0 +1,14 @@
from sqlalchemy import Column, String, Float, ForeignKey
from sqlalchemy.orm import relationship
from .base import BaseModel
class Route(BaseModel):
__tablename__ = "routes"
name = Column(String(100), nullable=False)
description = Column(String(500))
total_distance = Column(Float, nullable=False)
elevation_gain = Column(Float, nullable=False)
gpx_file_path = Column(String(255), nullable=False)
sections = relationship("Section", back_populates="route", cascade="all, delete-orphan")

View File

@@ -0,0 +1,14 @@
from sqlalchemy import Column, Integer, ForeignKey, Boolean
from sqlalchemy.dialects.postgresql import JSONB
from .base import BaseModel
class Rule(BaseModel):
__tablename__ = "rules"
name = Column(String(100), nullable=False)
user_defined = Column(Boolean, default=True)
jsonb_rules = Column(JSONB, nullable=False)
version = Column(Integer, default=1)
parent_rule_id = Column(Integer, ForeignKey('rules.id'), nullable=True)
parent_rule = relationship("Rule", remote_side="Rule.id")

View File

@@ -0,0 +1,15 @@
from sqlalchemy import Column, String, Float, ForeignKey
from sqlalchemy.orm import relationship
from .base import BaseModel
class Section(BaseModel):
__tablename__ = "sections"
route_id = Column(ForeignKey("routes.id"), nullable=False)
gpx_file_path = Column(String(255), nullable=False)
distance_m = Column(Float, nullable=False)
grade_avg = Column(Float)
min_gear = Column(String(50))
est_time_minutes = Column(Float)
route = relationship("Route", back_populates="sections")

View File

@@ -0,0 +1,7 @@
from .base import BaseModel
from sqlalchemy.orm import relationship
class User(BaseModel):
__tablename__ = "users"
plans = relationship("Plan", back_populates="user")

View File

@@ -0,0 +1,26 @@
from sqlalchemy import Column, Integer, String, Float, DateTime, ForeignKey, JSON, Boolean
from sqlalchemy.orm import relationship
from .base import BaseModel
class Workout(BaseModel):
"""Workout model for Garmin activity data."""
__tablename__ = "workouts"
plan_id = Column(Integer, ForeignKey("plans.id"), nullable=True)
garmin_activity_id = Column(String(255), unique=True, nullable=False)
activity_type = Column(String(50))
start_time = Column(DateTime, nullable=False)
duration_seconds = Column(Integer)
distance_m = Column(Float)
avg_hr = Column(Integer)
max_hr = Column(Integer)
avg_power = Column(Float)
max_power = Column(Float)
avg_cadence = Column(Float)
elevation_gain_m = Column(Float)
metrics = Column(JSON) # Store full Garmin data as JSONB
# Relationships
plan = relationship("Plan", back_populates="workouts")
analyses = relationship("Analysis", back_populates="workout", cascade="all, delete-orphan")

35
backend/app/routes/gpx.py Normal file
View File

@@ -0,0 +1,35 @@
from fastapi import APIRouter, UploadFile, File, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db
from app.services.gpx import parse_gpx, store_gpx_file
from app.schemas.gpx import RouteCreate, Route as RouteSchema
from app.models import Route
import os
router = APIRouter(prefix="/gpx", tags=["GPX Routes"])
@router.post("/upload", response_model=RouteSchema)
async def upload_gpx_route(
file: UploadFile = File(...),
db: AsyncSession = Depends(get_db)
):
# Store GPX file
gpx_path = await store_gpx_file(file)
# Parse GPX file
gpx_data = await parse_gpx(gpx_path)
# Create route in database
route_data = RouteCreate(
name=file.filename,
description=f"Uploaded from {file.filename}",
total_distance=gpx_data['total_distance'],
elevation_gain=gpx_data['elevation_gain'],
gpx_file_path=gpx_path
)
db_route = Route(**route_data.dict())
db.add(db_route)
await db.commit()
await db.refresh(db_route)
return db_route

View File

@@ -0,0 +1,89 @@
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from app.database import get_db
from app.models import Plan, PlanRule, Rule
from app.schemas.plan import PlanCreate, Plan as PlanSchema
from uuid import UUID
router = APIRouter(prefix="/plans", tags=["Training Plans"])
@router.post("/", response_model=PlanSchema)
async def create_plan(
plan: PlanCreate,
db: AsyncSession = Depends(get_db)
):
# Create plan
db_plan = Plan(
user_id=plan.user_id,
start_date=plan.start_date,
end_date=plan.end_date,
goal=plan.goal
)
db.add(db_plan)
await db.flush() # Flush to get plan ID
# Add rules to plan
for rule_id in plan.rule_ids:
db_plan_rule = PlanRule(plan_id=db_plan.id, rule_id=rule_id)
db.add(db_plan_rule)
await db.commit()
await db.refresh(db_plan)
return db_plan
@router.get("/{plan_id}", response_model=PlanSchema)
async def read_plan(
plan_id: UUID,
db: AsyncSession = Depends(get_db)
):
plan = await db.get(Plan, plan_id)
if not plan:
raise HTTPException(status_code=404, detail="Plan not found")
return plan
@router.get("/", response_model=list[PlanSchema])
async def read_plans(
db: AsyncSession = Depends(get_db)
):
result = await db.execute(select(Plan))
return result.scalars().all()
@router.put("/{plan_id}", response_model=PlanSchema)
async def update_plan(
plan_id: UUID,
plan: PlanCreate,
db: AsyncSession = Depends(get_db)
):
db_plan = await db.get(Plan, plan_id)
if not db_plan:
raise HTTPException(status_code=404, detail="Plan not found")
# Update plan fields
db_plan.user_id = plan.user_id
db_plan.start_date = plan.start_date
db_plan.end_date = plan.end_date
db_plan.goal = plan.goal
# Update rules
await db.execute(PlanRule.delete().where(PlanRule.plan_id == plan_id))
for rule_id in plan.rule_ids:
db_plan_rule = PlanRule(plan_id=plan_id, rule_id=rule_id)
db.add(db_plan_rule)
await db.commit()
await db.refresh(db_plan)
return db_plan
@router.delete("/{plan_id}")
async def delete_plan(
plan_id: UUID,
db: AsyncSession = Depends(get_db)
):
plan = await db.get(Plan, plan_id)
if not plan:
raise HTTPException(status_code=404, detail="Plan not found")
await db.delete(plan)
await db.commit()
return {"detail": "Plan deleted"}

View File

@@ -0,0 +1,79 @@
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from typing import List
from app.database import get_db
from app.models.prompt import Prompt
from app.schemas.prompt import Prompt as PromptSchema, PromptCreate, PromptUpdate
from app.services.prompt_manager import PromptManager
router = APIRouter()
@router.get("/", response_model=List[PromptSchema])
async def read_prompts(db: AsyncSession = Depends(get_db)):
"""Get all prompts."""
result = await db.execute(select(Prompt))
return result.scalars().all()
@router.get("/{prompt_id}", response_model=PromptSchema)
async def read_prompt(prompt_id: int, db: AsyncSession = Depends(get_db)):
"""Get a specific prompt by ID."""
prompt = await db.get(Prompt, prompt_id)
if not prompt:
raise HTTPException(status_code=404, detail="Prompt not found")
return prompt
@router.post("/", response_model=PromptSchema)
async def create_prompt(
prompt: PromptCreate,
db: AsyncSession = Depends(get_db)
):
"""Create a new prompt version."""
prompt_manager = PromptManager(db)
new_prompt = await prompt_manager.create_prompt_version(
action_type=prompt.action_type,
prompt_text=prompt.prompt_text,
model=prompt.model
)
return new_prompt
@router.get("/active/{action_type}")
async def get_active_prompt(
action_type: str,
db: AsyncSession = Depends(get_db)
):
"""Get the active prompt for a specific action type."""
prompt_manager = PromptManager(db)
prompt_text = await prompt_manager.get_active_prompt(action_type)
if not prompt_text:
raise HTTPException(status_code=404, detail=f"No active prompt found for {action_type}")
return {"action_type": action_type, "prompt_text": prompt_text}
@router.get("/history/{action_type}", response_model=List[PromptSchema])
async def get_prompt_history(
action_type: str,
db: AsyncSession = Depends(get_db)
):
"""Get the version history for a specific action type."""
prompt_manager = PromptManager(db)
prompts = await prompt_manager.get_prompt_history(action_type)
return prompts
@router.post("/{prompt_id}/activate")
async def activate_prompt_version(
prompt_id: int,
db: AsyncSession = Depends(get_db)
):
"""Activate a specific prompt version."""
prompt_manager = PromptManager(db)
success = await prompt_manager.activate_prompt_version(prompt_id)
if not success:
raise HTTPException(status_code=404, detail="Prompt not found")
return {"message": "Prompt version activated successfully"}

View File

@@ -0,0 +1,66 @@
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from app.database import get_db
from app.models import Rule
from app.schemas.rule import RuleCreate, Rule as RuleSchema
from uuid import UUID
router = APIRouter(prefix="/rules", tags=["Rules"])
@router.post("/", response_model=RuleSchema)
async def create_rule(
rule: RuleCreate,
db: AsyncSession = Depends(get_db)
):
db_rule = Rule(**rule.dict())
db.add(db_rule)
await db.commit()
await db.refresh(db_rule)
return db_rule
@router.get("/{rule_id}", response_model=RuleSchema)
async def read_rule(
rule_id: UUID,
db: AsyncSession = Depends(get_db)
):
rule = await db.get(Rule, rule_id)
if not rule:
raise HTTPException(status_code=404, detail="Rule not found")
return rule
@router.get("/", response_model=list[RuleSchema])
async def read_rules(
db: AsyncSession = Depends(get_db)
):
result = await db.execute(sa.select(Rule))
return result.scalars().all()
@router.put("/{rule_id}", response_model=RuleSchema)
async def update_rule(
rule_id: UUID,
rule: RuleCreate,
db: AsyncSession = Depends(get_db)
):
db_rule = await db.get(Rule, rule_id)
if not db_rule:
raise HTTPException(status_code=404, detail="Rule not found")
for key, value in rule.dict().items():
setattr(db_rule, key, value)
await db.commit()
await db.refresh(db_rule)
return db_rule
@router.delete("/{rule_id}")
async def delete_rule(
rule_id: UUID,
db: AsyncSession = Depends(get_db)
):
rule = await db.get(Rule, rule_id)
if not rule:
raise HTTPException(status_code=404, detail="Rule not found")
await db.delete(rule)
await db.commit()
return {"detail": "Rule deleted"}

View File

@@ -0,0 +1,138 @@
from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from typing import List
from app.database import get_db
from app.models.workout import Workout
from app.models.analysis import Analysis
from app.models.garmin_sync_log import GarminSyncLog
from app.models.plan import Plan
from app.schemas.workout import Workout as WorkoutSchema, WorkoutSyncStatus
from app.schemas.analysis import Analysis as AnalysisSchema
from app.services.workout_sync import WorkoutSyncService
from app.services.ai_service import AIService
from app.services.plan_evolution import PlanEvolutionService
router = APIRouter()
@router.get("/", response_model=List[WorkoutSchema])
async def read_workouts(db: AsyncSession = Depends(get_db)):
"""Get all workouts."""
result = await db.execute(select(Workout))
return result.scalars().all()
@router.get("/{workout_id}", response_model=WorkoutSchema)
async def read_workout(workout_id: int, db: AsyncSession = Depends(get_db)):
"""Get a specific workout by ID."""
workout = await db.get(Workout, workout_id)
if not workout:
raise HTTPException(status_code=404, detail="Workout not found")
return workout
@router.post("/sync")
async def trigger_garmin_sync(
background_tasks: BackgroundTasks,
db: AsyncSession = Depends(get_db)
):
"""Trigger background sync of recent Garmin activities."""
sync_service = WorkoutSyncService(db)
background_tasks.add_task(sync_service.sync_recent_activities, days_back=14)
return {"message": "Garmin sync started"}
@router.get("/sync-status", response_model=WorkoutSyncStatus)
async def get_sync_status(db: AsyncSession = Depends(get_db)):
"""Get the latest sync status."""
result = await db.execute(
select(GarminSyncLog).order_by(GarminSyncLog.created_at.desc()).limit(1)
)
sync_log = result.scalar_one_or_none()
if not sync_log:
return WorkoutSyncStatus(status="never_synced")
return sync_log
@router.post("/{workout_id}/analyze")
async def analyze_workout(
workout_id: int,
background_tasks: BackgroundTasks,
db: AsyncSession = Depends(get_db)
):
"""Trigger AI analysis of a specific workout."""
workout = await db.get(Workout, workout_id)
if not workout:
raise HTTPException(status_code=404, detail="Workout not found")
ai_service = AIService(db)
background_tasks.add_task(
analyze_and_store_workout,
db, workout, ai_service
)
return {"message": "Analysis started", "workout_id": workout_id}
async def analyze_and_store_workout(db: AsyncSession, workout: Workout, ai_service: AIService):
"""Background task to analyze workout and store results."""
try:
# Get current plan if workout is associated with one
plan = None
if workout.plan_id:
plan = await db.get(Plan, workout.plan_id)
# Analyze workout
analysis_result = await ai_service.analyze_workout(workout, plan.jsonb_plan if plan else None)
# Store analysis
analysis = Analysis(
workout_id=workout.id,
jsonb_feedback=analysis_result.get("feedback", {}),
suggestions=analysis_result.get("suggestions", {})
)
db.add(analysis)
await db.commit()
except Exception as e:
# Log error but don't crash the background task
print(f"Error analyzing workout {workout.id}: {str(e)}")
@router.get("/{workout_id}/analyses", response_model=List[AnalysisSchema])
async def read_workout_analyses(workout_id: int, db: AsyncSession = Depends(get_db)):
"""Get all analyses for a specific workout."""
workout = await db.get(Workout, workout_id)
if not workout:
raise HTTPException(status_code=404, detail="Workout not found")
return workout.analyses
@router.post("/analyses/{analysis_id}/approve")
async def approve_analysis(
analysis_id: int,
db: AsyncSession = Depends(get_db)
):
"""Approve analysis suggestions and trigger plan evolution."""
analysis = await db.get(Analysis, analysis_id)
if not analysis:
raise HTTPException(status_code=404, detail="Analysis not found")
analysis.approved = True
# Trigger plan evolution if suggestions exist and workout has a plan
if analysis.suggestions and analysis.workout.plan_id:
evolution_service = PlanEvolutionService(db)
current_plan = await db.get(Plan, analysis.workout.plan_id)
if current_plan:
new_plan = await evolution_service.evolve_plan_from_analysis(
analysis, current_plan
)
await db.commit()
return {"message": "Analysis approved", "new_plan_id": new_plan.id if new_plan else None}
await db.commit()
return {"message": "Analysis approved"}

View File

@@ -0,0 +1,25 @@
from pydantic import BaseModel
from typing import Optional, Dict, Any
class AnalysisBase(BaseModel):
workout_id: int
analysis_type: str = 'workout_review'
jsonb_feedback: Optional[Dict[str, Any]] = None
suggestions: Optional[Dict[str, Any]] = None
approved: bool = False
class AnalysisCreate(AnalysisBase):
pass
class Analysis(AnalysisBase):
id: int
class Config:
orm_mode = True
class AnalysisUpdate(BaseModel):
approved: bool

View File

@@ -0,0 +1,25 @@
from pydantic import BaseModel
from typing import Optional, List
class GPXData(BaseModel):
total_distance: float
elevation_gain: float
points: List[dict]
class RouteCreate(BaseModel):
name: str
description: Optional[str] = None
total_distance: float
elevation_gain: float
gpx_file_path: str
class Route(BaseModel):
id: str
name: str
description: Optional[str] = None
total_distance: float
elevation_gain: float
gpx_file_path: str
class Config:
orm_mode = True

View File

@@ -0,0 +1,19 @@
from pydantic import BaseModel
from datetime import datetime
from typing import List, Optional
from uuid import UUID
class PlanBase(BaseModel):
user_id: UUID
start_date: datetime
end_date: datetime
goal: str
class PlanCreate(PlanBase):
rule_ids: List[UUID]
class Plan(PlanBase):
id: UUID
class Config:
orm_mode = True

View File

@@ -0,0 +1,30 @@
from pydantic import BaseModel
from typing import Optional
from datetime import datetime
class PromptBase(BaseModel):
action_type: str
model: Optional[str] = None
prompt_text: str
version: int = 1
active: bool = True
class PromptCreate(BaseModel):
action_type: str
prompt_text: str
model: Optional[str] = None
class PromptUpdate(BaseModel):
prompt_text: Optional[str] = None
active: Optional[bool] = None
class Prompt(PromptBase):
id: int
created_at: datetime
class Config:
orm_mode = True

View File

@@ -0,0 +1,17 @@
from pydantic import BaseModel
from typing import Optional
class RuleBase(BaseModel):
name: str
description: Optional[str] = None
condition: str
priority: int = 0
class RuleCreate(RuleBase):
pass
class Rule(RuleBase):
id: str
class Config:
orm_mode = True

View File

@@ -0,0 +1,40 @@
from pydantic import BaseModel
from typing import Optional, Dict, Any
from datetime import datetime
class WorkoutBase(BaseModel):
garmin_activity_id: str
activity_type: Optional[str] = None
start_time: datetime
duration_seconds: Optional[int] = None
distance_m: Optional[float] = None
avg_hr: Optional[int] = None
max_hr: Optional[int] = None
avg_power: Optional[float] = None
max_power: Optional[float] = None
avg_cadence: Optional[float] = None
elevation_gain_m: Optional[float] = None
metrics: Optional[Dict[str, Any]] = None
class WorkoutCreate(WorkoutBase):
plan_id: Optional[int] = None
class Workout(WorkoutBase):
id: int
plan_id: Optional[int] = None
class Config:
orm_mode = True
class WorkoutSyncStatus(BaseModel):
status: str
last_sync_time: Optional[datetime] = None
activities_synced: int = 0
error_message: Optional[str] = None
class Config:
orm_mode = True

View File

@@ -0,0 +1,130 @@
import os
import asyncio
from typing import Dict, Any, List, Optional
import httpx
import json
from app.services.prompt_manager import PromptManager
from app.models.workout import Workout
import logging
logger = logging.getLogger(__name__)
class AIService:
"""Service for AI-powered analysis and plan generation."""
def __init__(self, db_session):
self.db = db_session
self.prompt_manager = PromptManager(db_session)
self.api_key = os.getenv("OPENROUTER_API_KEY")
self.model = os.getenv("AI_MODEL", "anthropic/claude-3-sonnet-20240229")
self.base_url = "https://openrouter.ai/api/v1"
async def analyze_workout(self, workout: Workout, plan: Optional[Dict] = None) -> Dict[str, Any]:
"""Analyze a workout using AI and generate feedback."""
prompt_template = await self.prompt_manager.get_active_prompt("workout_analysis")
if not prompt_template:
raise ValueError("No active workout analysis prompt found")
# Build context from workout data
workout_context = {
"activity_type": workout.activity_type,
"duration_minutes": workout.duration_seconds / 60 if workout.duration_seconds else 0,
"distance_km": workout.distance_m / 1000 if workout.distance_m else 0,
"avg_hr": workout.avg_hr,
"avg_power": workout.avg_power,
"elevation_gain": workout.elevation_gain_m,
"planned_workout": plan
}
prompt = prompt_template.format(**workout_context)
response = await self._make_ai_request(prompt)
return self._parse_workout_analysis(response)
async def generate_plan(self, rules: List[Dict], goals: Dict[str, Any]) -> Dict[str, Any]:
"""Generate a training plan using AI."""
prompt_template = await self.prompt_manager.get_active_prompt("plan_generation")
context = {
"rules": rules,
"goals": goals,
"current_fitness_level": goals.get("fitness_level", "intermediate")
}
prompt = prompt_template.format(**context)
response = await self._make_ai_request(prompt)
return self._parse_plan_response(response)
async def parse_rules_from_natural_language(self, natural_language: str) -> Dict[str, Any]:
"""Parse natural language rules into structured format."""
prompt_template = await self.prompt_manager.get_active_prompt("rule_parsing")
prompt = prompt_template.format(user_rules=natural_language)
response = await self._make_ai_request(prompt)
return self._parse_rules_response(response)
async def _make_ai_request(self, prompt: str) -> str:
"""Make async request to OpenRouter API with retry logic."""
async with httpx.AsyncClient() as client:
for attempt in range(3): # Simple retry logic
try:
response = await client.post(
f"{self.base_url}/chat/completions",
headers={
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json",
},
json={
"model": self.model,
"messages": [{"role": "user", "content": prompt}],
"max_tokens": 2000,
},
timeout=30.0
)
response.raise_for_status()
data = response.json()
return data["choices"][0]["message"]["content"]
except Exception as e:
if attempt == 2: # Last attempt
logger.error(f"AI request failed after 3 attempts: {str(e)}")
raise AIServiceError(f"AI request failed after 3 attempts: {str(e)}")
await asyncio.sleep(2 ** attempt) # Exponential backoff
def _parse_workout_analysis(self, response: str) -> Dict[str, Any]:
"""Parse AI response for workout analysis."""
try:
# Assume AI returns JSON
clean_response = response.strip()
if clean_response.startswith("```json"):
clean_response = clean_response[7:-3]
return json.loads(clean_response)
except json.JSONDecodeError:
return {"raw_analysis": response, "structured": False}
def _parse_plan_response(self, response: str) -> Dict[str, Any]:
"""Parse AI response for plan generation."""
try:
clean_response = response.strip()
if clean_response.startswith("```json"):
clean_response = clean_response[7:-3]
return json.loads(clean_response)
except json.JSONDecodeError:
return {"raw_plan": response, "structured": False}
def _parse_rules_response(self, response: str) -> Dict[str, Any]:
"""Parse AI response for rule parsing."""
try:
clean_response = response.strip()
if clean_response.startswith("```json"):
clean_response = clean_response[7:-3]
return json.loads(clean_response)
except json.JSONDecodeError:
return {"raw_rules": response, "structured": False}
class AIServiceError(Exception):
"""Raised when AI service requests fail."""
pass

View File

@@ -0,0 +1,84 @@
import os
import garth
from typing import List, Dict, Any, Optional
from datetime import datetime, timedelta
import logging
logger = logging.getLogger(__name__)
class GarminService:
"""Service for interacting with Garmin Connect API."""
def __init__(self):
self.username = os.getenv("GARMIN_USERNAME")
self.password = os.getenv("GARMIN_PASSWORD")
self.client: Optional[garth.Client] = None
self.session_dir = "/app/data/sessions"
# Ensure session directory exists
os.makedirs(self.session_dir, exist_ok=True)
async def authenticate(self) -> bool:
"""Authenticate with Garmin Connect and persist session."""
if not self.client:
self.client = garth.Client()
try:
# Try to load existing session
self.client.load(self.session_dir)
logger.info("Loaded existing Garmin session")
return True
except Exception:
# Fresh authentication required
try:
await self.client.login(self.username, self.password)
self.client.save(self.session_dir)
logger.info("Successfully authenticated with Garmin Connect")
return True
except Exception as e:
logger.error(f"Garmin authentication failed: {str(e)}")
raise GarminAuthError(f"Authentication failed: {str(e)}")
async def get_activities(self, limit: int = 10, start_date: datetime = None) -> List[Dict[str, Any]]:
"""Fetch recent activities from Garmin Connect."""
if not self.client:
await self.authenticate()
if not start_date:
start_date = datetime.now() - timedelta(days=7)
try:
activities = self.client.get_activities(limit=limit, start=start_date)
logger.info(f"Fetched {len(activities)} activities from Garmin")
return activities
except Exception as e:
logger.error(f"Failed to fetch activities: {str(e)}")
raise GarminAPIError(f"Failed to fetch activities: {str(e)}")
async def get_activity_details(self, activity_id: str) -> Dict[str, Any]:
"""Get detailed activity data including metrics."""
if not self.client:
await self.authenticate()
try:
details = self.client.get_activity(activity_id)
logger.info(f"Fetched details for activity {activity_id}")
return details
except Exception as e:
logger.error(f"Failed to fetch activity details for {activity_id}: {str(e)}")
raise GarminAPIError(f"Failed to fetch activity details: {str(e)}")
def is_authenticated(self) -> bool:
"""Check if we have a valid authenticated session."""
return self.client is not None
class GarminAuthError(Exception):
"""Raised when Garmin authentication fails."""
pass
class GarminAPIError(Exception):
"""Raised when Garmin API calls fail."""
pass

View File

@@ -0,0 +1,62 @@
import os
import uuid
import logging
from fastapi import UploadFile, HTTPException
import gpxpy
from app.config import settings
logger = logging.getLogger(__name__)
async def store_gpx_file(file: UploadFile) -> str:
"""Store uploaded GPX file and return path"""
try:
file_ext = os.path.splitext(file.filename)[1]
if file_ext.lower() != '.gpx':
raise HTTPException(status_code=400, detail="Invalid file type")
file_name = f"{uuid.uuid4()}{file_ext}"
file_path = os.path.join(settings.GPX_STORAGE_PATH, file_name)
# Ensure storage directory exists
os.makedirs(os.path.dirname(file_path), exist_ok=True)
# Save file
with open(file_path, "wb") as f:
f.write(await file.read())
return file_path
except Exception as e:
logger.error(f"Error storing GPX file: {e}")
raise HTTPException(status_code=500, detail="Error storing file")
async def parse_gpx(file_path: str) -> dict:
"""Parse GPX file and extract key metrics"""
try:
with open(file_path, 'r') as f:
gpx = gpxpy.parse(f)
total_distance = 0.0
elevation_gain = 0.0
points = []
for track in gpx.tracks:
for segment in track.segments:
total_distance += segment.length_3d()
for i in range(1, len(segment.points)):
elevation_gain += max(0, segment.points[i].elevation - segment.points[i-1].elevation)
points = [{
'lat': point.latitude,
'lon': point.longitude,
'ele': point.elevation,
'time': point.time.isoformat() if point.time else None
} for point in segment.points]
return {
'total_distance': total_distance,
'elevation_gain': elevation_gain,
'points': points
}
except Exception as e:
logger.error(f"Error parsing GPX file: {e}")
raise HTTPException(status_code=500, detail="Error parsing GPX file")

View File

@@ -0,0 +1,74 @@
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from app.services.ai_service import AIService
from app.models.analysis import Analysis
from app.models.plan import Plan
import logging
logger = logging.getLogger(__name__)
class PlanEvolutionService:
"""Service for evolving training plans based on workout analysis."""
def __init__(self, db: AsyncSession):
self.db = db
self.ai_service = AIService(db)
async def evolve_plan_from_analysis(
self,
analysis: Analysis,
current_plan: Plan
) -> Plan:
"""Create a new plan version based on workout analysis."""
if not analysis.approved:
return None
suggestions = analysis.suggestions
if not suggestions:
return None
# Generate new plan incorporating suggestions
evolution_context = {
"current_plan": current_plan.jsonb_plan,
"workout_analysis": analysis.jsonb_feedback,
"suggestions": suggestions,
"evolution_type": "workout_feedback"
}
new_plan_data = await self.ai_service.evolve_plan(evolution_context)
# Create new plan version
new_plan = Plan(
jsonb_plan=new_plan_data,
version=current_plan.version + 1,
parent_plan_id=current_plan.id
)
self.db.add(new_plan)
await self.db.commit()
await self.db.refresh(new_plan)
logger.info(f"Created new plan version {new_plan.version} from analysis {analysis.id}")
return new_plan
async def get_plan_evolution_history(self, plan_id: int) -> list[Plan]:
"""Get the evolution history for a plan."""
result = await self.db.execute(
select(Plan)
.where(
(Plan.id == plan_id) |
(Plan.parent_plan_id == plan_id)
)
.order_by(Plan.version)
)
return result.scalars().all()
async def get_current_active_plan(self) -> Plan:
"""Get the most recent active plan."""
result = await self.db.execute(
select(Plan)
.order_by(Plan.version.desc())
.limit(1)
)
return result.scalar_one_or_none()

View File

@@ -0,0 +1,92 @@
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, update, func
from app.models.prompt import Prompt
import logging
logger = logging.getLogger(__name__)
class PromptManager:
"""Service for managing AI prompts with versioning."""
def __init__(self, db: AsyncSession):
self.db = db
async def get_active_prompt(self, action_type: str, model: str = None) -> str:
"""Get the active prompt for a specific action type."""
query = select(Prompt).where(
Prompt.action_type == action_type,
Prompt.active == True
)
if model:
query = query.where(Prompt.model == model)
result = await self.db.execute(query.order_by(Prompt.version.desc()))
prompt = result.scalar_one_or_none()
return prompt.prompt_text if prompt else None
async def create_prompt_version(
self,
action_type: str,
prompt_text: str,
model: str = None
) -> Prompt:
"""Create a new version of a prompt."""
# Deactivate previous versions
await self.db.execute(
update(Prompt)
.where(Prompt.action_type == action_type)
.values(active=False)
)
# Get next version number
result = await self.db.execute(
select(func.max(Prompt.version))
.where(Prompt.action_type == action_type)
)
max_version = result.scalar() or 0
# Create new prompt
new_prompt = Prompt(
action_type=action_type,
model=model,
prompt_text=prompt_text,
version=max_version + 1,
active=True
)
self.db.add(new_prompt)
await self.db.commit()
await self.db.refresh(new_prompt)
logger.info(f"Created new prompt version {new_prompt.version} for {action_type}")
return new_prompt
async def get_prompt_history(self, action_type: str) -> list[Prompt]:
"""Get all versions of prompts for an action type."""
result = await self.db.execute(
select(Prompt)
.where(Prompt.action_type == action_type)
.order_by(Prompt.version.desc())
)
return result.scalars().all()
async def activate_prompt_version(self, prompt_id: int) -> bool:
"""Activate a specific prompt version."""
# First deactivate all prompts for this action type
prompt = await self.db.get(Prompt, prompt_id)
if not prompt:
return False
await self.db.execute(
update(Prompt)
.where(Prompt.action_type == prompt.action_type)
.values(active=False)
)
# Activate the specific version
prompt.active = True
await self.db.commit()
logger.info(f"Activated prompt version {prompt.version} for {prompt.action_type}")
return True

View File

@@ -0,0 +1,90 @@
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select
from app.services.garmin import GarminService, GarminAPIError
from app.models.workout import Workout
from app.models.garmin_sync_log import GarminSyncLog
from datetime import datetime, timedelta
import logging
logger = logging.getLogger(__name__)
class WorkoutSyncService:
"""Service for syncing Garmin activities to database."""
def __init__(self, db: AsyncSession):
self.db = db
self.garmin_service = GarminService()
async def sync_recent_activities(self, days_back: int = 7) -> int:
"""Sync recent Garmin activities to database."""
try:
# Create sync log entry
sync_log = GarminSyncLog(status="in_progress")
self.db.add(sync_log)
await self.db.commit()
# Calculate start date
start_date = datetime.now() - timedelta(days=days_back)
# Fetch activities from Garmin
activities = await self.garmin_service.get_activities(
limit=50, start_date=start_date
)
synced_count = 0
for activity in activities:
if await self.activity_exists(activity['activityId']):
continue
# Parse and create workout
workout_data = await self.parse_activity_data(activity)
workout = Workout(**workout_data)
self.db.add(workout)
synced_count += 1
# Update sync log
sync_log.status = "success"
sync_log.activities_synced = synced_count
sync_log.last_sync_time = datetime.now()
await self.db.commit()
logger.info(f"Successfully synced {synced_count} activities")
return synced_count
except GarminAPIError as e:
sync_log.status = "error"
sync_log.error_message = str(e)
await self.db.commit()
logger.error(f"Garmin API error during sync: {str(e)}")
raise
except Exception as e:
sync_log.status = "error"
sync_log.error_message = str(e)
await self.db.commit()
logger.error(f"Unexpected error during sync: {str(e)}")
raise
async def activity_exists(self, garmin_activity_id: str) -> bool:
"""Check if activity already exists in database."""
result = await self.db.execute(
select(Workout).where(Workout.garmin_activity_id == garmin_activity_id)
)
return result.scalar_one_or_none() is not None
async def parse_activity_data(self, activity: Dict[str, Any]) -> Dict[str, Any]:
"""Parse Garmin activity data into workout model format."""
return {
"garmin_activity_id": activity['activityId'],
"activity_type": activity.get('activityType', {}).get('typeKey'),
"start_time": datetime.fromisoformat(activity['startTimeLocal'].replace('Z', '+00:00')),
"duration_seconds": activity.get('duration'),
"distance_m": activity.get('distance'),
"avg_hr": activity.get('averageHR'),
"max_hr": activity.get('maxHR'),
"avg_power": activity.get('avgPower'),
"max_power": activity.get('maxPower'),
"avg_cadence": activity.get('averageBikingCadenceInRevPerMinute'),
"elevation_gain_m": activity.get('elevationGain'),
"metrics": activity # Store full Garmin data as JSONB
}

5
backend/pytest.ini Normal file
View File

@@ -0,0 +1,5 @@
[pytest]
testpaths = tests
addopts = -p no:warnings --verbose
python_files = test_*.py
log_cli = true

11
backend/requirements.txt Normal file
View File

@@ -0,0 +1,11 @@
fastapi==0.110.0
uvicorn[standard]==0.29.0
python-dotenv==1.0.1
sqlalchemy==2.0.29
psycopg2-binary==2.9.9
alembic==1.13.1
pydantic-settings==2.2.1
python-multipart==0.0.9
gpxpy # Add GPX parsing library
garth==0.4.46 # Garmin Connect API client
httpx==0.25.2 # Async HTTP client for OpenRouter API

View File

@@ -0,0 +1,240 @@
#!/usr/bin/env python3
"""
Database backup and restore utilities for containerized deployments.
Ensures safe backup/restore operations with migration compatibility checks.
"""
import sys
import os
import subprocess
import shutil
from pathlib import Path
from datetime import datetime
from typing import Optional
# Add backend directory to path
backend_dir = Path(__file__).parent.parent
sys.path.insert(0, str(backend_dir))
from app.database import get_database_url
class DatabaseManager:
"""Handles database backup and restore operations."""
def __init__(self, backup_dir: str = "/app/data/backups"):
self.backup_dir = Path(backup_dir)
self.backup_dir.mkdir(parents=True, exist_ok=True)
def get_db_connection_params(self):
"""Extract database connection parameters from URL."""
from urllib.parse import urlparse
db_url = get_database_url()
parsed = urlparse(db_url)
return {
'host': parsed.hostname,
'port': parsed.port or 5432,
'user': parsed.username,
'password': parsed.password,
'database': parsed.path.lstrip('/')
}
def create_backup(self, name: Optional[str] = None) -> str:
"""Create a database backup."""
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
backup_name = name or f"backup_{timestamp}"
backup_file = self.backup_dir / f"{backup_name}.sql"
params = self.get_db_connection_params()
# Use pg_dump for backup
cmd = [
"pg_dump",
"-h", params['host'],
"-p", str(params['port']),
"-U", params['user'],
"-d", params['database'],
"-f", str(backup_file),
"--no-password",
"--format=custom", # Custom format for better compression
"--compress=9"
]
# Set password environment variable
env = os.environ.copy()
env['PGPASSWORD'] = params['password']
try:
print(f"Creating backup: {backup_file}")
result = subprocess.run(cmd, env=env, capture_output=True, text=True)
if result.returncode == 0:
print(f"✅ Backup created successfully: {backup_file}")
return str(backup_file)
else:
print(f"❌ Backup failed: {result.stderr}")
raise Exception(f"Backup failed: {result.stderr}")
except FileNotFoundError:
print("❌ pg_dump not found. Ensure PostgreSQL client tools are installed.")
raise
def restore_backup(self, backup_file: str, confirm: bool = False) -> None:
"""Restore database from backup."""
backup_path = Path(backup_file)
if not backup_path.exists():
raise FileNotFoundError(f"Backup file not found: {backup_file}")
if not confirm:
print(f"⚠️ This will overwrite the current database!")
response = input("Are you sure you want to continue? (yes/no): ")
if response.lower() != 'yes':
print("Restore cancelled.")
return
params = self.get_db_connection_params()
# Drop and recreate database to ensure clean restore
self._recreate_database()
# Use pg_restore for restore
cmd = [
"pg_restore",
"-h", params['host'],
"-p", str(params['port']),
"-U", params['user'],
"-d", params['database'],
"--no-password",
"--clean",
"--if-exists",
"--create",
str(backup_path)
]
env = os.environ.copy()
env['PGPASSWORD'] = params['password']
try:
print(f"Restoring from backup: {backup_file}")
result = subprocess.run(cmd, env=env, capture_output=True, text=True)
if result.returncode == 0:
print("✅ Database restored successfully")
else:
print(f"❌ Restore failed: {result.stderr}")
raise Exception(f"Restore failed: {result.stderr}")
except FileNotFoundError:
print("❌ pg_restore not found. Ensure PostgreSQL client tools are installed.")
raise
def _recreate_database(self):
"""Drop and recreate the database."""
params = self.get_db_connection_params()
# Connect to postgres database to drop/recreate target database
postgres_params = params.copy()
postgres_params['database'] = 'postgres'
drop_cmd = [
"psql",
"-h", postgres_params['host'],
"-p", str(postgres_params['port']),
"-U", postgres_params['user'],
"-d", postgres_params['database'],
"-c", f"DROP DATABASE IF EXISTS {params['database']};"
]
create_cmd = [
"psql",
"-h", postgres_params['host'],
"-p", str(postgres_params['port']),
"-U", postgres_params['user'],
"-d", postgres_params['database'],
"-c", f"CREATE DATABASE {params['database']};"
]
env = os.environ.copy()
env['PGPASSWORD'] = params['password']
for cmd in [drop_cmd, create_cmd]:
result = subprocess.run(cmd, env=env, capture_output=True, text=True)
if result.returncode != 0:
print(f"Database recreation step failed: {result.stderr}")
def list_backups(self):
"""List available backup files."""
backups = list(self.backup_dir.glob("*.sql"))
backups.sort(key=lambda x: x.stat().st_mtime, reverse=True)
if not backups:
print("No backup files found.")
return
print("Available backups:")
for backup in backups:
size = backup.stat().st_size / (1024 * 1024) # Size in MB
mtime = datetime.fromtimestamp(backup.stat().st_mtime)
print(".2f")
def cleanup_old_backups(self, keep_days: int = 30):
"""Remove backups older than specified days."""
from datetime import timedelta
cutoff = datetime.now() - timedelta(days=keep_days)
removed = []
for backup in self.backup_dir.glob("*.sql"):
if datetime.fromtimestamp(backup.stat().st_mtime) < cutoff:
backup.unlink()
removed.append(backup.name)
if removed:
print(f"Removed {len(removed)} old backups: {', '.join(removed)}")
else:
print("No old backups to remove.")
def main():
if len(sys.argv) < 2:
print("Usage: python backup_restore.py <command> [options]")
print("Commands:")
print(" backup [name] - Create a new backup")
print(" restore <file> [--yes] - Restore from backup")
print(" list - List available backups")
print(" cleanup [days] - Remove backups older than N days (default: 30)")
sys.exit(1)
manager = DatabaseManager()
command = sys.argv[1]
try:
if command == "backup":
name = sys.argv[2] if len(sys.argv) > 2 else None
manager.create_backup(name)
elif command == "restore":
if len(sys.argv) < 3:
print("Error: Please specify backup file to restore from")
sys.exit(1)
backup_file = sys.argv[2]
confirm = "--yes" in sys.argv
manager.restore_backup(backup_file, confirm)
elif command == "list":
manager.list_backups()
elif command == "cleanup":
days = int(sys.argv[2]) if len(sys.argv) > 2 else 30
manager.cleanup_old_backups(days)
else:
print(f"Unknown command: {command}")
sys.exit(1)
except Exception as e:
print(f"Error: {e}")
sys.exit(1)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,259 @@
#!/usr/bin/env python3
"""
Migration compatibility and version checker for containerized deployments.
Validates migration integrity and compatibility before deployments.
"""
import sys
import os
from pathlib import Path
from typing import Dict, List, Tuple
# Add backend directory to path
backend_dir = Path(__file__).parent.parent
sys.path.insert(0, str(backend_dir))
from alembic.config import Config
from alembic import command
from alembic.migration import MigrationContext
from alembic.script import ScriptDirectory
from sqlalchemy import create_engine, text
from app.database import get_database_url
class MigrationChecker:
"""Validates migration compatibility and integrity."""
def __init__(self):
self.config = self._get_alembic_config()
self.script = ScriptDirectory.from_config(self.config)
def _get_alembic_config(self):
"""Get Alembic configuration."""
config = Config("alembic.ini")
config.set_main_option("sqlalchemy.url", get_database_url())
return config
def check_migration_files(self) -> Dict[str, bool]:
"""Check integrity of migration files."""
results = {
"files_exist": False,
"proper_ordering": False,
"no_duplicates": False,
"valid_syntax": False
}
try:
# Check if migration directory exists
versions_dir = Path("alembic/versions")
if not versions_dir.exists():
print("❌ Migration versions directory not found")
return results
# Get all migration files
migration_files = list(versions_dir.glob("*.py"))
if not migration_files:
print("⚠️ No migration files found")
results["files_exist"] = True # Empty is valid
return results
results["files_exist"] = True
# Check for duplicate revision numbers
revisions = []
for file_path in migration_files:
with open(file_path, 'r') as f:
content = f.read()
# Extract revision from file
if "revision = " in content:
rev_line = [line for line in content.split('\n') if "revision = " in line]
if rev_line:
rev = rev_line[0].split("'")[1]
if rev in revisions:
print(f"❌ Duplicate revision found: {rev}")
return results
revisions.append(rev)
results["no_duplicates"] = True
# Validate migration ordering
try:
# Get ordered revisions from script directory
ordered_revisions = []
for rev in self.script.walk_revisions():
ordered_revisions.append(rev.revision)
# Check if our files match the ordering
if set(revisions) == set(ordered_revisions):
results["proper_ordering"] = True
else:
print("❌ Migration ordering mismatch")
return results
except Exception as e:
print(f"❌ Error checking migration ordering: {e}")
return results
# Basic syntax validation
for file_path in migration_files:
try:
compile(open(file_path).read(), file_path, 'exec')
except SyntaxError as e:
print(f"❌ Syntax error in {file_path}: {e}")
return results
results["valid_syntax"] = True
print("✅ All migration files are valid")
except Exception as e:
print(f"❌ Error checking migration files: {e}")
return results
def check_database_state(self) -> Dict[str, any]:
"""Check current database migration state."""
results = {
"connected": False,
"current_revision": None,
"head_revision": None,
"up_to_date": False,
"pending_migrations": []
}
try:
engine = create_engine(get_database_url())
with engine.connect() as conn:
results["connected"] = True
# Get current revision
context = MigrationContext.configure(conn)
current_rev = context.get_current_revision()
results["current_revision"] = current_rev
# Get head revision
head_rev = self.script.get_current_head()
results["head_revision"] = head_rev
# Check if up to date
results["up_to_date"] = current_rev == head_rev
# Get pending migrations
if not results["up_to_date"]:
pending = []
for rev in self.script.walk_revisions():
if rev.revision > current_rev:
pending.append(rev.revision)
results["pending_migrations"] = pending
except Exception as e:
print(f"❌ Database connection error: {e}")
return results
def validate_deployment_readiness(self) -> bool:
"""Validate if deployment can proceed safely."""
print("🔍 Checking deployment readiness...")
# Check migration files
file_checks = self.check_migration_files()
all_files_good = all(file_checks.values())
# Check database state
db_checks = self.check_database_state()
db_connected = db_checks["connected"]
if not all_files_good:
print("❌ Migration files have issues")
return False
if not db_connected:
print("❌ Cannot connect to database")
return False
if not db_checks["up_to_date"]:
print(f"⚠️ Database not up to date. Current: {db_checks['current_revision']}, Head: {db_checks['head_revision']}")
print(f"Pending migrations: {db_checks['pending_migrations']}")
# For deployment, we might want to allow this if migrations will be run
print(" This is acceptable if migrations will be run during deployment")
return True
print("✅ Deployment readiness check passed")
return True
def generate_migration_report(self) -> str:
"""Generate a detailed migration status report."""
report = []
report.append("# Migration Status Report")
report.append("")
# File checks
report.append("## Migration Files")
file_checks = self.check_migration_files()
for check, status in file_checks.items():
status_icon = "" if status else ""
report.append(f"- {check}: {status_icon}")
# Database state
report.append("")
report.append("## Database State")
db_checks = self.check_database_state()
for check, value in db_checks.items():
if isinstance(value, list):
value = ", ".join(value) if value else "None"
report.append(f"- {check}: {value}")
# Deployment readiness
report.append("")
report.append("## Deployment Readiness")
ready = self.validate_deployment_readiness()
readiness_icon = "" if ready else ""
report.append(f"- Ready for deployment: {readiness_icon}")
return "\n".join(report)
def main():
if len(sys.argv) < 2:
print("Usage: python migration_checker.py <command>")
print("Commands:")
print(" check-files - Check migration file integrity")
print(" check-db - Check database migration state")
print(" validate-deploy - Validate deployment readiness")
print(" report - Generate detailed migration report")
sys.exit(1)
checker = MigrationChecker()
command = sys.argv[1]
try:
if command == "check-files":
results = checker.check_migration_files()
all_good = all(results.values())
print("✅ Files OK" if all_good else "❌ Files have issues")
sys.exit(0 if all_good else 1)
elif command == "check-db":
results = checker.check_database_state()
print(f"Connected: {'' if results['connected'] else ''}")
print(f"Up to date: {'' if results['up_to_date'] else ''}")
print(f"Current: {results['current_revision']}")
print(f"Head: {results['head_revision']}")
elif command == "validate-deploy":
ready = checker.validate_deployment_readiness()
sys.exit(0 if ready else 1)
elif command == "report":
report = checker.generate_migration_report()
print(report)
else:
print(f"Unknown command: {command}")
sys.exit(1)
except Exception as e:
print(f"Error: {e}")
sys.exit(1)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,89 @@
#!/usr/bin/env python3
"""
Migration rollback script for containerized deployments.
Provides safe rollback functionality with validation.
"""
import sys
import os
from pathlib import Path
# Add backend directory to path
backend_dir = Path(__file__).parent.parent
sys.path.insert(0, str(backend_dir))
from alembic.config import Config
from alembic import command
from alembic.migration import MigrationContext
from alembic.script import ScriptDirectory
import sqlalchemy as sa
from app.database import get_database_url
def get_alembic_config():
"""Get Alembic configuration."""
config = Config("alembic.ini")
config.set_main_option("sqlalchemy.url", get_database_url())
return config
def get_current_revision():
"""Get current database revision."""
config = get_alembic_config()
script = ScriptDirectory.from_config(config)
with sa.create_engine(get_database_url()).connect() as conn:
context = MigrationContext.configure(conn)
current_rev = context.get_current_revision()
return current_rev
def rollback_migration(revision="head:-1"):
"""
Rollback to specified revision.
Args:
revision: Target revision (default: one step back from head)
"""
try:
print(f"Rolling back to revision: {revision}")
config = get_alembic_config()
command.downgrade(config, revision)
print("Rollback completed successfully")
# Verify rollback
current = get_current_revision()
print(f"Current revision after rollback: {current}")
except Exception as e:
print(f"Rollback failed: {e}")
sys.exit(1)
def list_migrations():
"""List available migrations."""
config = get_alembic_config()
script = ScriptDirectory.from_config(config)
print("Available migrations:")
for rev in script.walk_revisions():
print(f" {rev.revision}: {rev.doc}")
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: python migration_rollback.py <command> [revision]")
print("Commands:")
print(" rollback [revision] - Rollback to revision (default: head:-1)")
print(" current - Show current revision")
print(" list - List available migrations")
sys.exit(1)
command = sys.argv[1]
if command == "rollback":
revision = sys.argv[2] if len(sys.argv) > 2 else "head:-1"
rollback_migration(revision)
elif command == "current":
current = get_current_revision()
print(f"Current revision: {current}")
elif command == "list":
list_migrations()
else:
print(f"Unknown command: {command}")
sys.exit(1)

View File

@@ -0,0 +1 @@
# Empty file to mark tests directory as Python package

36
backend/tests/conftest.py Normal file
View File

@@ -0,0 +1,36 @@
import pytest
from fastapi.testclient import TestClient
from app.main import app
from app.database import get_db, Base
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
TEST_DATABASE_URL = "postgresql://postgres:postgres@localhost:5432/test_db"
@pytest.fixture(scope="session")
def test_engine():
engine = create_engine(TEST_DATABASE_URL)
Base.metadata.create_all(bind=engine)
yield engine
Base.metadata.drop_all(bind=engine)
@pytest.fixture
def db_session(test_engine):
connection = test_engine.connect()
transaction = connection.begin()
session = sessionmaker(autocommit=False, autoflush=False, bind=connection)()
yield session
session.close()
transaction.rollback()
connection.close()
@pytest.fixture
def client(db_session):
def override_get_db():
try:
yield db_session
finally:
db_session.close()
app.dependency_overrides[get_db] = override_get_db
return TestClient(app)

View File

@@ -0,0 +1,78 @@
import pytest
from unittest.mock import AsyncMock, patch
from app.services.garmin import GarminService
from app.models.garmin_sync_log import GarminSyncStatus
from datetime import datetime, timedelta
@pytest.mark.asyncio
async def test_garmin_authentication_success(db_session):
"""Test successful Garmin Connect authentication"""
with patch('garth.Client') as mock_client:
mock_instance = mock_client.return_value
mock_instance.login = AsyncMock(return_value=True)
service = GarminService(db_session)
result = await service.authenticate("test_user", "test_pass")
assert result is True
mock_instance.login.assert_awaited_once_with("test_user", "test_pass")
@pytest.mark.asyncio
async def test_garmin_authentication_failure(db_session):
"""Test authentication failure handling"""
with patch('garth.Client') as mock_client:
mock_instance = mock_client.return_value
mock_instance.login = AsyncMock(side_effect=Exception("Invalid credentials"))
service = GarminService(db_session)
result = await service.authenticate("bad_user", "wrong_pass")
assert result is False
log_entry = db_session.query(GarminSyncLog).first()
assert log_entry.status == GarminSyncStatus.AUTH_FAILED
@pytest.mark.asyncio
async def test_activity_sync(db_session):
"""Test successful activity synchronization"""
with patch('garth.Client') as mock_client:
mock_instance = mock_client.return_value
mock_instance.connectapi = AsyncMock(return_value=[
{"activityId": 123, "startTime": "2024-01-01T08:00:00"}
])
service = GarminService(db_session)
await service.sync_activities()
# Verify workout created
workout = db_session.query(Workout).first()
assert workout.garmin_activity_id == 123
# Verify sync log updated
log_entry = db_session.query(GarminSyncLog).first()
assert log_entry.status == GarminSyncStatus.COMPLETED
@pytest.mark.asyncio
async def test_rate_limiting_handling(db_session):
"""Test API rate limit error handling"""
with patch('garth.Client') as mock_client:
mock_instance = mock_client.return_value
mock_instance.connectapi = AsyncMock(side_effect=Exception("Rate limit exceeded"))
service = GarminService(db_session)
result = await service.sync_activities()
assert result is False
log_entry = db_session.query(GarminSyncLog).first()
assert log_entry.status == GarminSyncStatus.FAILED
assert "Rate limit" in log_entry.error_message
@pytest.mark.asyncio
async def test_session_persistence(db_session):
"""Test session cookie persistence"""
service = GarminService(db_session)
# Store session
await service.store_session({"token": "test123"})
session = await service.load_session()
assert session == {"token": "test123"}
assert Path("/app/data/sessions/garmin_session.pickle").exists()