mirror of
https://github.com/sstent/aicyclingcoach-go.git
synced 2026-04-04 20:13:01 +00:00
sync
This commit is contained in:
1389
CL_plan.md
1389
CL_plan.md
File diff suppressed because it is too large
Load Diff
@@ -45,21 +45,23 @@ COPY . .
|
|||||||
RUN echo '#!/bin/bash\n\
|
RUN echo '#!/bin/bash\n\
|
||||||
set -e\n\
|
set -e\n\
|
||||||
\n\
|
\n\
|
||||||
# Run database migrations\n\
|
# Run database migrations synchronously\n\
|
||||||
echo "Running database migrations..."\n\
|
echo "Running database migrations..."\n\
|
||||||
alembic upgrade head\n\
|
python -m alembic upgrade head\n\
|
||||||
\n\
|
\n\
|
||||||
# Verify migration success\n\
|
# Verify migration success\n\
|
||||||
echo "Verifying migration status..."\n\
|
echo "Verifying migration status..."\n\
|
||||||
alembic current\n\
|
python -m alembic current\n\
|
||||||
\n\
|
\n\
|
||||||
# Start the application\n\
|
# Start the application\n\
|
||||||
echo "Starting application..."\n\
|
echo "Starting application..."\n\
|
||||||
exec "$@"' > /app/entrypoint.sh && \
|
exec "$@"' > /app/entrypoint.sh && \
|
||||||
chmod +x /app/entrypoint.sh
|
chmod +x /app/entrypoint.sh
|
||||||
|
|
||||||
# Create non-root user
|
# Create non-root user and logs directory
|
||||||
RUN useradd -m appuser && chown -R appuser:appuser /app
|
RUN useradd -m appuser && \
|
||||||
|
mkdir -p /app/logs && \
|
||||||
|
chown -R appuser:appuser /app
|
||||||
USER appuser
|
USER appuser
|
||||||
|
|
||||||
# Expose application port
|
# Expose application port
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[alembic]
|
[alembic]
|
||||||
script_location = alembic
|
script_location = alembic
|
||||||
sqlalchemy.url = postgresql+asyncpg://appuser:password@db:5432/cyclingdb
|
sqlalchemy.url = postgresql+asyncpg://postgres:password@db:5432/cycling
|
||||||
|
|
||||||
[loggers]
|
[loggers]
|
||||||
keys = root
|
keys = root
|
||||||
@@ -8,6 +8,9 @@ keys = root
|
|||||||
[handlers]
|
[handlers]
|
||||||
keys = console
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
[logger_root]
|
[logger_root]
|
||||||
level = WARN
|
level = WARN
|
||||||
handlers = console
|
handlers = console
|
||||||
|
|||||||
@@ -9,8 +9,8 @@ import os
|
|||||||
sys.path.append(os.getcwd())
|
sys.path.append(os.getcwd())
|
||||||
|
|
||||||
# Import base and models
|
# Import base and models
|
||||||
from app.models import Base
|
from app.models.base import Base
|
||||||
from app.database import DATABASE_URL
|
from app.config import settings
|
||||||
|
|
||||||
config = context.config
|
config = context.config
|
||||||
fileConfig(config.config_file_name)
|
fileConfig(config.config_file_name)
|
||||||
@@ -30,7 +30,7 @@ def run_migrations_offline():
|
|||||||
with context.begin_transaction():
|
with context.begin_transaction():
|
||||||
context.run_migrations()
|
context.run_migrations()
|
||||||
|
|
||||||
def run_migrations_online():
|
async def run_migrations_online():
|
||||||
"""Run migrations in 'online' mode."""
|
"""Run migrations in 'online' mode."""
|
||||||
connectable = AsyncEngine(
|
connectable = AsyncEngine(
|
||||||
engine_from_config(
|
engine_from_config(
|
||||||
@@ -38,16 +38,17 @@ def run_migrations_online():
|
|||||||
prefix="sqlalchemy.",
|
prefix="sqlalchemy.",
|
||||||
poolclass=pool.NullPool,
|
poolclass=pool.NullPool,
|
||||||
future=True,
|
future=True,
|
||||||
url=DATABASE_URL,
|
url=settings.DATABASE_URL,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
async with connectable.connect() as connection:
|
async with connectable.connect() as connection:
|
||||||
await connection.run_sync(do_run_migrations)
|
await connection.run_sync(do_run_migrations)
|
||||||
|
|
||||||
async def do_run_migrations(connection):
|
def do_run_migrations(connection):
|
||||||
context.configure(connection=connection, target_metadata=target_metadata)
|
context.configure(connection=connection, target_metadata=target_metadata)
|
||||||
await connection.run_sync(context.run_migrations)
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
if context.is_offline_mode():
|
if context.is_offline_mode():
|
||||||
run_migrations_offline()
|
run_migrations_offline()
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
from pydantic_settings import BaseSettings
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||||
|
|
||||||
class Settings(BaseSettings):
|
class Settings(BaseSettings):
|
||||||
DATABASE_URL: str
|
DATABASE_URL: str
|
||||||
GPX_STORAGE_PATH: str
|
GPX_STORAGE_PATH: str
|
||||||
AI_MODEL: str = "openrouter/auto"
|
AI_MODEL: str = "openrouter/auto"
|
||||||
|
API_KEY: str
|
||||||
|
|
||||||
class Config:
|
model_config = SettingsConfigDict(env_file=".env", extra="ignore")
|
||||||
env_file = ".env"
|
|
||||||
|
|
||||||
settings = Settings()
|
settings = Settings()
|
||||||
@@ -1,7 +1,8 @@
|
|||||||
|
import os
|
||||||
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
|
||||||
from sqlalchemy.orm import declarative_base, sessionmaker
|
from sqlalchemy.orm import declarative_base, sessionmaker
|
||||||
|
|
||||||
DATABASE_URL = "postgresql+asyncpg://appuser:password@db:5432/cyclingdb"
|
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql+asyncpg://postgres:password@db:5432/cycling")
|
||||||
|
|
||||||
engine = create_async_engine(DATABASE_URL, echo=True)
|
engine = create_async_engine(DATABASE_URL, echo=True)
|
||||||
AsyncSessionLocal = sessionmaker(
|
AsyncSessionLocal = sessionmaker(
|
||||||
|
|||||||
@@ -1,6 +1,9 @@
|
|||||||
|
import logging
|
||||||
|
import json
|
||||||
|
from datetime import datetime
|
||||||
from fastapi import FastAPI, Depends, Request, HTTPException
|
from fastapi import FastAPI, Depends, Request, HTTPException
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
from .database import get_db, get_database_url
|
from .database import get_db
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from sqlalchemy import text
|
from sqlalchemy import text
|
||||||
from alembic.config import Config
|
from alembic.config import Config
|
||||||
@@ -14,6 +17,45 @@ from .routes import prompts as prompt_routes
|
|||||||
from .routes import dashboard as dashboard_routes
|
from .routes import dashboard as dashboard_routes
|
||||||
from .config import settings
|
from .config import settings
|
||||||
|
|
||||||
|
# Configure structured JSON logging
|
||||||
|
class StructuredJSONFormatter(logging.Formatter):
|
||||||
|
def format(self, record):
|
||||||
|
log_data = {
|
||||||
|
"timestamp": datetime.utcnow().isoformat(),
|
||||||
|
"level": record.levelname,
|
||||||
|
"message": record.getMessage(),
|
||||||
|
"logger": record.name,
|
||||||
|
"module": record.module,
|
||||||
|
"function": record.funcName,
|
||||||
|
"line": record.lineno,
|
||||||
|
"thread": record.threadName,
|
||||||
|
}
|
||||||
|
if hasattr(record, 'extra'):
|
||||||
|
log_data.update(record.extra)
|
||||||
|
if record.exc_info:
|
||||||
|
log_data["exception"] = self.formatException(record.exc_info)
|
||||||
|
return json.dumps(log_data)
|
||||||
|
|
||||||
|
# Set up logging
|
||||||
|
logger = logging.getLogger("ai_cycling_coach")
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
# Create console handler with structured JSON format
|
||||||
|
console_handler = logging.StreamHandler()
|
||||||
|
console_handler.setFormatter(StructuredJSONFormatter())
|
||||||
|
logger.addHandler(console_handler)
|
||||||
|
|
||||||
|
# Configure rotating file handler
|
||||||
|
from logging.handlers import RotatingFileHandler
|
||||||
|
file_handler = RotatingFileHandler(
|
||||||
|
filename="/app/logs/app.log",
|
||||||
|
maxBytes=10*1024*1024, # 10 MB
|
||||||
|
backupCount=5,
|
||||||
|
encoding='utf-8'
|
||||||
|
)
|
||||||
|
file_handler.setFormatter(StructuredJSONFormatter())
|
||||||
|
logger.addHandler(file_handler)
|
||||||
|
|
||||||
app = FastAPI(
|
app = FastAPI(
|
||||||
title="AI Cycling Coach API",
|
title="AI Cycling Coach API",
|
||||||
description="Backend service for AI-assisted cycling training platform",
|
description="Backend service for AI-assisted cycling training platform",
|
||||||
@@ -49,61 +91,16 @@ app.include_router(workout_routes.router, prefix="/workouts", tags=["workouts"])
|
|||||||
app.include_router(prompt_routes.router, prefix="/prompts", tags=["prompts"])
|
app.include_router(prompt_routes.router, prefix="/prompts", tags=["prompts"])
|
||||||
app.include_router(dashboard_routes.router, prefix="/api/dashboard", tags=["dashboard"])
|
app.include_router(dashboard_routes.router, prefix="/api/dashboard", tags=["dashboard"])
|
||||||
|
|
||||||
async def check_migration_status():
|
|
||||||
"""Check if database migrations are up to date."""
|
|
||||||
try:
|
|
||||||
# Get Alembic configuration
|
|
||||||
config = Config("alembic.ini")
|
|
||||||
config.set_main_option("sqlalchemy.url", get_database_url())
|
|
||||||
script = ScriptDirectory.from_config(config)
|
|
||||||
|
|
||||||
# Get current database revision
|
|
||||||
from sqlalchemy import create_engine
|
|
||||||
engine = create_engine(get_database_url())
|
|
||||||
with engine.connect() as conn:
|
|
||||||
context = MigrationContext.configure(conn)
|
|
||||||
current_rev = context.get_current_revision()
|
|
||||||
|
|
||||||
# Get head revision
|
|
||||||
head_rev = script.get_current_head()
|
|
||||||
|
|
||||||
return {
|
|
||||||
"current_revision": current_rev,
|
|
||||||
"head_revision": head_rev,
|
|
||||||
"migrations_up_to_date": current_rev == head_rev
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
return {
|
|
||||||
"error": str(e),
|
|
||||||
"migrations_up_to_date": False
|
|
||||||
}
|
|
||||||
|
|
||||||
@app.get("/health")
|
@app.get("/health")
|
||||||
async def health_check(db: AsyncSession = Depends(get_db)):
|
async def health_check():
|
||||||
"""Enhanced health check with migration verification."""
|
"""Simplified health check endpoint."""
|
||||||
health_status = {
|
return {
|
||||||
"status": "healthy",
|
"status": "healthy",
|
||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"timestamp": "2024-01-15T10:30:00Z" # Should be dynamic
|
"timestamp": datetime.utcnow().isoformat()
|
||||||
}
|
}
|
||||||
|
|
||||||
# Database connection check
|
|
||||||
try:
|
|
||||||
await db.execute(text("SELECT 1"))
|
|
||||||
health_status["database"] = "connected"
|
|
||||||
except Exception as e:
|
|
||||||
health_status["status"] = "unhealthy"
|
|
||||||
health_status["database"] = f"error: {str(e)}"
|
|
||||||
|
|
||||||
# Migration status check
|
|
||||||
migration_info = await check_migration_status()
|
|
||||||
health_status["migrations"] = migration_info
|
|
||||||
|
|
||||||
if not migration_info.get("migrations_up_to_date", False):
|
|
||||||
health_status["status"] = "unhealthy"
|
|
||||||
|
|
||||||
return health_status
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import uvicorn
|
import uvicorn
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
logger.info("Starting AI Cycling Coach API server")
|
||||||
|
uvicorn.run(app, host="0.0.0.0", port=8000, log_config=None)
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
from sqlalchemy import Column, Integer, String, ForeignKey, JSON, Boolean, DateTime
|
from sqlalchemy import Column, Integer, String, ForeignKey, JSON, Boolean, DateTime, func
|
||||||
from sqlalchemy.orm import relationship
|
from sqlalchemy.orm import relationship
|
||||||
from .base import BaseModel
|
from .base import BaseModel
|
||||||
|
|
||||||
|
|||||||
12
backend/app/models/plan_rule.py
Normal file
12
backend/app/models/plan_rule.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
from sqlalchemy import Column, Integer, ForeignKey
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
|
from .base import BaseModel
|
||||||
|
|
||||||
|
class PlanRule(BaseModel):
|
||||||
|
__tablename__ = "plan_rules"
|
||||||
|
|
||||||
|
plan_id = Column(Integer, ForeignKey('plans.id'), primary_key=True)
|
||||||
|
rule_id = Column(Integer, ForeignKey('rules.id'), primary_key=True)
|
||||||
|
|
||||||
|
plan = relationship("Plan", back_populates="rules")
|
||||||
|
rule = relationship("Rule", back_populates="plans")
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
from sqlalchemy import Column, Integer, ForeignKey, Boolean
|
from sqlalchemy import Column, Integer, ForeignKey, Boolean, String
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
from sqlalchemy.dialects.postgresql import JSONB
|
||||||
|
from sqlalchemy.orm import relationship
|
||||||
from .base import BaseModel
|
from .base import BaseModel
|
||||||
|
|
||||||
class Rule(BaseModel):
|
class Rule(BaseModel):
|
||||||
|
|||||||
@@ -1,9 +1,54 @@
|
|||||||
from fastapi import APIRouter
|
from fastapi import APIRouter
|
||||||
|
from fastapi.responses import PlainTextResponse, JSONResponse
|
||||||
from app.services.health_monitor import HealthMonitor
|
from app.services.health_monitor import HealthMonitor
|
||||||
|
from prometheus_client import generate_latest, CONTENT_TYPE_LATEST, Gauge
|
||||||
|
from pathlib import Path
|
||||||
|
import json
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
monitor = HealthMonitor()
|
monitor = HealthMonitor()
|
||||||
|
|
||||||
|
# Prometheus metrics
|
||||||
|
SYNC_QUEUE = Gauge('sync_queue_size', 'Current Garmin sync queue size')
|
||||||
|
PENDING_ANALYSES = Gauge('pending_analyses', 'Number of pending workout analyses')
|
||||||
|
|
||||||
@router.get("/health")
|
@router.get("/health")
|
||||||
async def get_health():
|
async def get_health():
|
||||||
return monitor.check_system_health()
|
return monitor.check_system_health()
|
||||||
|
|
||||||
|
@router.get("/metrics")
|
||||||
|
async def prometheus_metrics():
|
||||||
|
# Update metrics with latest values
|
||||||
|
health_data = monitor.check_system_health()
|
||||||
|
SYNC_QUEUE.set(health_data['services'].get('sync_queue_size', 0))
|
||||||
|
PENDING_ANALYSES.set(health_data['services'].get('pending_analyses', 0))
|
||||||
|
|
||||||
|
return PlainTextResponse(
|
||||||
|
content=generate_latest(),
|
||||||
|
media_type=CONTENT_TYPE_LATEST
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.get("/dashboard/health", response_class=JSONResponse)
|
||||||
|
async def health_dashboard():
|
||||||
|
"""Health dashboard endpoint with aggregated monitoring data"""
|
||||||
|
health_data = monitor.check_system_health()
|
||||||
|
|
||||||
|
# Get recent logs (last 100 lines)
|
||||||
|
log_file = Path("/app/logs/app.log")
|
||||||
|
recent_logs = []
|
||||||
|
try:
|
||||||
|
with log_file.open() as f:
|
||||||
|
lines = f.readlines()[-100:]
|
||||||
|
recent_logs = [json.loads(line.strip()) for line in lines]
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return {
|
||||||
|
"system": health_data,
|
||||||
|
"logs": recent_logs,
|
||||||
|
"statistics": {
|
||||||
|
"log_entries": len(recent_logs),
|
||||||
|
"error_count": sum(1 for log in recent_logs if log.get('level') == 'ERROR'),
|
||||||
|
"warning_count": sum(1 for log in recent_logs if log.get('level') == 'WARNING')
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -8,8 +8,9 @@ from app.models.workout import Workout
|
|||||||
from app.models.analysis import Analysis
|
from app.models.analysis import Analysis
|
||||||
from app.models.garmin_sync_log import GarminSyncLog
|
from app.models.garmin_sync_log import GarminSyncLog
|
||||||
from app.models.plan import Plan
|
from app.models.plan import Plan
|
||||||
from app.schemas.workout import Workout as WorkoutSchema, WorkoutSyncStatus
|
from app.schemas.workout import Workout as WorkoutSchema, WorkoutSyncStatus, WorkoutMetric
|
||||||
from app.schemas.analysis import Analysis as AnalysisSchema
|
from app.schemas.analysis import Analysis as AnalysisSchema
|
||||||
|
from app.schemas.plan import Plan as PlanSchema
|
||||||
from app.services.workout_sync import WorkoutSyncService
|
from app.services.workout_sync import WorkoutSyncService
|
||||||
from app.services.ai_service import AIService
|
from app.services.ai_service import AIService
|
||||||
from app.services.plan_evolution import PlanEvolutionService
|
from app.services.plan_evolution import PlanEvolutionService
|
||||||
@@ -32,7 +33,7 @@ async def read_workout(workout_id: int, db: AsyncSession = Depends(get_db)):
|
|||||||
raise HTTPException(status_code=404, detail="Workout not found")
|
raise HTTPException(status_code=404, detail="Workout not found")
|
||||||
return workout
|
return workout
|
||||||
|
|
||||||
@router.get("/{workout_id}/metrics", response_model=list[schemas.WorkoutMetric])
|
@router.get("/{workout_id}/metrics", response_model=list[WorkoutMetric])
|
||||||
async def get_workout_metrics(
|
async def get_workout_metrics(
|
||||||
workout_id: int,
|
workout_id: int,
|
||||||
db: AsyncSession = Depends(get_db)
|
db: AsyncSession = Depends(get_db)
|
||||||
@@ -153,7 +154,7 @@ async def approve_analysis(
|
|||||||
return {"message": "Analysis approved"}
|
return {"message": "Analysis approved"}
|
||||||
|
|
||||||
|
|
||||||
@router.get("/plans/{plan_id}/evolution", response_model=List[schemas.Plan])
|
@router.get("/plans/{plan_id}/evolution", response_model=List[PlanSchema])
|
||||||
async def get_plan_evolution(
|
async def get_plan_evolution(
|
||||||
plan_id: int,
|
plan_id: int,
|
||||||
db: AsyncSession = Depends(get_db)
|
db: AsyncSession = Depends(get_db)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from pydantic import BaseModel
|
from pydantic import BaseModel, Field
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
|
|||||||
@@ -36,38 +36,51 @@ class HealthMonitor:
|
|||||||
return {
|
return {
|
||||||
'database': self._check_database(),
|
'database': self._check_database(),
|
||||||
'garmin_sync': self._check_garmin_sync(),
|
'garmin_sync': self._check_garmin_sync(),
|
||||||
'ai_service': self._check_ai_service()
|
'ai_service': self._check_ai_service(),
|
||||||
|
'sync_queue_size': self._get_sync_queue_size(),
|
||||||
|
'pending_analyses': self._count_pending_analyses()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def _get_sync_queue_size(self) -> int:
|
||||||
|
"""Get number of pending sync operations"""
|
||||||
|
from app.models.garmin_sync_log import GarminSyncLog, SyncStatus
|
||||||
|
return GarminSyncLog.query.filter_by(status=SyncStatus.PENDING).count()
|
||||||
|
|
||||||
|
def _count_pending_analyses(self) -> int:
|
||||||
|
"""Count workouts needing analysis"""
|
||||||
|
from app.models.workout import Workout
|
||||||
|
return Workout.query.filter_by(analysis_status='pending').count()
|
||||||
|
|
||||||
def _check_database(self) -> str:
|
def _check_database(self) -> str:
|
||||||
try:
|
try:
|
||||||
with get_db() as db:
|
with get_db() as db:
|
||||||
db.execute(text("SELECT 1"))
|
db.execute(text("SELECT 1"))
|
||||||
return "ok"
|
return "ok"
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Database check failed: {str(e)}")
|
logger.error("Database check failed", extra={"component": "database", "error": str(e)})
|
||||||
return "down"
|
return "down"
|
||||||
|
|
||||||
def _check_garmin_sync(self) -> str:
|
def _check_garmin_sync(self) -> str:
|
||||||
try:
|
try:
|
||||||
last_sync = GarminSyncLog.get_latest()
|
last_sync = GarminSyncLog.get_latest()
|
||||||
if last_sync and last_sync.status == SyncStatus.FAILED:
|
if last_sync and last_sync.status == SyncStatus.FAILED:
|
||||||
|
logger.warning("Garmin sync has failed status", extra={"component": "garmin_sync", "status": last_sync.status.value})
|
||||||
return "warning"
|
return "warning"
|
||||||
return "ok"
|
return "ok"
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Garmin sync check failed: {str(e)}")
|
logger.error("Garmin sync check failed", extra={"component": "garmin_sync", "error": str(e)})
|
||||||
return "down"
|
return "down"
|
||||||
|
|
||||||
def _check_ai_service(self) -> str:
|
def _check_ai_service(self) -> str:
|
||||||
try:
|
try:
|
||||||
response = requests.get(
|
response = requests.get(
|
||||||
f"{settings.AI_SERVICE_URL}/ping",
|
f"{settings.AI_SERVICE_URL}/ping",
|
||||||
timeout=5,
|
timeout=5,
|
||||||
headers={"Authorization": f"Bearer {settings.OPENROUTER_API_KEY}"}
|
headers={"Authorization": f"Bearer {settings.OPENROUTER_API_KEY}"}
|
||||||
)
|
)
|
||||||
return "ok" if response.ok else "down"
|
return "ok" if response.ok else "down"
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"AI service check failed: {str(e)}")
|
logger.error("AI service check failed", extra={"component": "ai_service", "error": str(e)})
|
||||||
return "down"
|
return "down"
|
||||||
|
|
||||||
def _log_anomalies(self, metrics: Dict[str, Any]):
|
def _log_anomalies(self, metrics: Dict[str, Any]):
|
||||||
@@ -75,6 +88,7 @@ class HealthMonitor:
|
|||||||
for metric, value in metrics.items():
|
for metric, value in metrics.items():
|
||||||
if metric in self.warning_thresholds and value > self.warning_thresholds[metric]:
|
if metric in self.warning_thresholds and value > self.warning_thresholds[metric]:
|
||||||
alerts.append(f"{metric} {value}%")
|
alerts.append(f"{metric} {value}%")
|
||||||
|
logger.warning("System threshold exceeded", extra={"metric": metric, "value": value, "threshold": self.warning_thresholds[metric]})
|
||||||
|
|
||||||
if alerts:
|
if alerts:
|
||||||
logger.warning(f"System thresholds exceeded: {', '.join(alerts)}")
|
logger.warning("System thresholds exceeded", extra={"alerts": alerts})
|
||||||
@@ -6,6 +6,7 @@ from app.models.garmin_sync_log import GarminSyncLog
|
|||||||
from app.models.garmin_sync_log import GarminSyncLog
|
from app.models.garmin_sync_log import GarminSyncLog
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Dict, Any
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|||||||
@@ -8,4 +8,5 @@ pydantic-settings==2.2.1
|
|||||||
python-multipart==0.0.9
|
python-multipart==0.0.9
|
||||||
gpxpy # Add GPX parsing library
|
gpxpy # Add GPX parsing library
|
||||||
garth==0.4.46 # Garmin Connect API client
|
garth==0.4.46 # Garmin Connect API client
|
||||||
httpx==0.25.2 # Async HTTP client for OpenRouter API
|
httpx==0.25.2 # Async HTTP client for OpenRouter API
|
||||||
|
asyncpg==0.29.0 # Async PostgreSQL driver
|
||||||
@@ -24,6 +24,9 @@ class DatabaseManager:
|
|||||||
def __init__(self, backup_dir: str = "/app/data/backups"):
|
def __init__(self, backup_dir: str = "/app/data/backups"):
|
||||||
self.backup_dir = Path(backup_dir)
|
self.backup_dir = Path(backup_dir)
|
||||||
self.backup_dir.mkdir(parents=True, exist_ok=True)
|
self.backup_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.gpx_dir = Path("/app/data/gpx")
|
||||||
|
self.manifest_file = self.backup_dir / "gpx_manifest.json"
|
||||||
|
self.encryption_key = os.getenv("BACKUP_ENCRYPTION_KEY").encode()
|
||||||
|
|
||||||
def get_db_connection_params(self):
|
def get_db_connection_params(self):
|
||||||
"""Extract database connection parameters from URL."""
|
"""Extract database connection parameters from URL."""
|
||||||
@@ -39,15 +42,91 @@ class DatabaseManager:
|
|||||||
'database': parsed.path.lstrip('/')
|
'database': parsed.path.lstrip('/')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def _backup_gpx_files(self, backup_dir: Path) -> Optional[Path]:
|
||||||
|
"""Backup GPX files directory"""
|
||||||
|
gpx_dir = Path("/app/data/gpx")
|
||||||
|
if not gpx_dir.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
backup_path = backup_dir / "gpx.tar.gz"
|
||||||
|
with tarfile.open(backup_path, "w:gz") as tar:
|
||||||
|
tar.add(gpx_dir, arcname="gpx")
|
||||||
|
return backup_path
|
||||||
|
|
||||||
|
def _backup_sessions(self, backup_dir: Path) -> Optional[Path]:
|
||||||
|
"""Backup Garmin sessions directory"""
|
||||||
|
sessions_dir = Path("/app/data/sessions")
|
||||||
|
if not sessions_dir.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
backup_path = backup_dir / "sessions.tar.gz"
|
||||||
|
with tarfile.open(backup_path, "w:gz") as tar:
|
||||||
|
tar.add(sessions_dir, arcname="sessions")
|
||||||
|
return backup_path
|
||||||
|
|
||||||
|
def _generate_checksum(self, file_path: Path) -> str:
|
||||||
|
"""Generate SHA256 checksum for a file"""
|
||||||
|
hash_sha256 = hashlib.sha256()
|
||||||
|
with open(file_path, "rb") as f:
|
||||||
|
for chunk in iter(lambda: f.read(4096), b""):
|
||||||
|
hash_sha256.update(chunk)
|
||||||
|
return hash_sha256.hexdigest()
|
||||||
|
|
||||||
|
def _verify_backup_integrity(self, backup_path: Path):
|
||||||
|
"""Verify backup file integrity using checksum"""
|
||||||
|
checksum_file = backup_path.with_suffix('.sha256')
|
||||||
|
if not checksum_file.exists():
|
||||||
|
raise FileNotFoundError(f"Checksum file missing for {backup_path.name}")
|
||||||
|
|
||||||
|
with open(checksum_file) as f:
|
||||||
|
expected_checksum = f.read().split()[0]
|
||||||
|
|
||||||
|
actual_checksum = self._generate_checksum(backup_path)
|
||||||
|
if actual_checksum != expected_checksum:
|
||||||
|
raise ValueError(f"Checksum mismatch for {backup_path.name}")
|
||||||
|
|
||||||
def create_backup(self, name: Optional[str] = None) -> str:
|
def create_backup(self, name: Optional[str] = None) -> str:
|
||||||
"""Create a database backup."""
|
"""Create a full system backup including database, GPX files, and sessions"""
|
||||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||||
backup_name = name or f"backup_{timestamp}"
|
backup_name = name or f"full_backup_{timestamp}"
|
||||||
backup_file = self.backup_dir / f"{backup_name}.sql"
|
backup_dir = self.backup_dir / backup_name
|
||||||
|
backup_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Backup database
|
||||||
|
db_backup_path = self._backup_database(backup_dir)
|
||||||
|
|
||||||
|
# Backup GPX files
|
||||||
|
gpx_backup_path = self._backup_gpx_files(backup_dir)
|
||||||
|
|
||||||
|
# Backup sessions
|
||||||
|
sessions_backup_path = self._backup_sessions(backup_dir)
|
||||||
|
|
||||||
|
# Generate checksums for all backup files
|
||||||
|
for file in backup_dir.glob("*"):
|
||||||
|
if file.is_file():
|
||||||
|
checksum = self._generate_checksum(file)
|
||||||
|
with open(f"{file}.sha256", "w") as f:
|
||||||
|
f.write(f"{checksum} {file.name}")
|
||||||
|
|
||||||
|
# Verify backups
|
||||||
|
for file in backup_dir.glob("*"):
|
||||||
|
if file.is_file() and not file.name.endswith('.sha256'):
|
||||||
|
self._verify_backup_integrity(file)
|
||||||
|
|
||||||
|
print(f"✅ Full backup created successfully: {backup_dir}")
|
||||||
|
return str(backup_dir)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
shutil.rmtree(backup_dir, ignore_errors=True)
|
||||||
|
print(f"❌ Backup failed: {str(e)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _backup_database(self, backup_dir: Path) -> Path:
|
||||||
|
"""Create database backup"""
|
||||||
params = self.get_db_connection_params()
|
params = self.get_db_connection_params()
|
||||||
|
backup_file = backup_dir / "database.dump"
|
||||||
|
|
||||||
# Use pg_dump for backup
|
|
||||||
cmd = [
|
cmd = [
|
||||||
"pg_dump",
|
"pg_dump",
|
||||||
"-h", params['host'],
|
"-h", params['host'],
|
||||||
@@ -56,28 +135,18 @@ class DatabaseManager:
|
|||||||
"-d", params['database'],
|
"-d", params['database'],
|
||||||
"-f", str(backup_file),
|
"-f", str(backup_file),
|
||||||
"--no-password",
|
"--no-password",
|
||||||
"--format=custom", # Custom format for better compression
|
"--format=custom",
|
||||||
"--compress=9"
|
"--compress=9"
|
||||||
]
|
]
|
||||||
|
|
||||||
# Set password environment variable
|
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
env['PGPASSWORD'] = params['password']
|
env['PGPASSWORD'] = params['password']
|
||||||
|
|
||||||
try:
|
result = subprocess.run(cmd, env=env, capture_output=True, text=True)
|
||||||
print(f"Creating backup: {backup_file}")
|
if result.returncode != 0:
|
||||||
result = subprocess.run(cmd, env=env, capture_output=True, text=True)
|
raise Exception(f"Database backup failed: {result.stderr}")
|
||||||
|
|
||||||
if result.returncode == 0:
|
return backup_file
|
||||||
print(f"✅ Backup created successfully: {backup_file}")
|
|
||||||
return str(backup_file)
|
|
||||||
else:
|
|
||||||
print(f"❌ Backup failed: {result.stderr}")
|
|
||||||
raise Exception(f"Backup failed: {result.stderr}")
|
|
||||||
|
|
||||||
except FileNotFoundError:
|
|
||||||
print("❌ pg_dump not found. Ensure PostgreSQL client tools are installed.")
|
|
||||||
raise
|
|
||||||
|
|
||||||
def restore_backup(self, backup_file: str, confirm: bool = False) -> None:
|
def restore_backup(self, backup_file: str, confirm: bool = False) -> None:
|
||||||
"""Restore database from backup."""
|
"""Restore database from backup."""
|
||||||
@@ -128,6 +197,80 @@ class DatabaseManager:
|
|||||||
print("❌ pg_restore not found. Ensure PostgreSQL client tools are installed.")
|
print("❌ pg_restore not found. Ensure PostgreSQL client tools are installed.")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
def backup_gpx_files(self, incremental: bool = True) -> Optional[Path]:
|
||||||
|
"""Handle GPX backup creation with incremental/full strategy"""
|
||||||
|
try:
|
||||||
|
if incremental:
|
||||||
|
return self._incremental_gpx_backup()
|
||||||
|
return self._full_gpx_backup()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"GPX backup failed: {str(e)}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _full_gpx_backup(self) -> Path:
|
||||||
|
"""Create full GPX backup"""
|
||||||
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||||
|
backup_path = self.backup_dir / f"gpx_full_{timestamp}"
|
||||||
|
backup_path.mkdir()
|
||||||
|
|
||||||
|
# Copy all GPX files
|
||||||
|
subprocess.run(["rsync", "-a", f"{self.gpx_dir}/", f"{backup_path}/"])
|
||||||
|
self._encrypt_backup(backup_path)
|
||||||
|
return backup_path
|
||||||
|
|
||||||
|
def _incremental_gpx_backup(self) -> Optional[Path]:
|
||||||
|
"""Create incremental GPX backup using rsync --link-dest"""
|
||||||
|
last_full = self._find_last_full_backup()
|
||||||
|
if not last_full:
|
||||||
|
return self._full_gpx_backup()
|
||||||
|
|
||||||
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||||
|
backup_path = self.backup_dir / f"gpx_inc_{timestamp}"
|
||||||
|
backup_path.mkdir()
|
||||||
|
|
||||||
|
# Use hardlinks to previous backup for incremental
|
||||||
|
subprocess.run([
|
||||||
|
"rsync", "-a",
|
||||||
|
"--link-dest", str(last_full),
|
||||||
|
f"{self.gpx_dir}/",
|
||||||
|
f"{backup_path}/"
|
||||||
|
])
|
||||||
|
self._encrypt_backup(backup_path)
|
||||||
|
return backup_path
|
||||||
|
|
||||||
|
def _find_last_full_backup(self) -> Optional[Path]:
|
||||||
|
"""Find most recent full backup"""
|
||||||
|
full_backups = sorted(self.backup_dir.glob("gpx_full_*"), reverse=True)
|
||||||
|
return full_backups[0] if full_backups else None
|
||||||
|
|
||||||
|
def _encrypt_backup(self, backup_path: Path):
|
||||||
|
"""Encrypt backup directory using Fernet (AES-256-CBC with HMAC-SHA256)"""
|
||||||
|
from cryptography.fernet import Fernet
|
||||||
|
|
||||||
|
fernet = Fernet(self.encryption_key)
|
||||||
|
|
||||||
|
for file in backup_path.rglob('*'):
|
||||||
|
if file.is_file():
|
||||||
|
with open(file, 'rb') as f:
|
||||||
|
data = f.read()
|
||||||
|
encrypted = fernet.encrypt(data)
|
||||||
|
with open(file, 'wb') as f:
|
||||||
|
f.write(encrypted)
|
||||||
|
|
||||||
|
def decrypt_backup(self, backup_path: Path):
|
||||||
|
"""Decrypt backup directory"""
|
||||||
|
from cryptography.fernet import Fernet
|
||||||
|
|
||||||
|
fernet = Fernet(self.encryption_key)
|
||||||
|
|
||||||
|
for file in backup_path.rglob('*'):
|
||||||
|
if file.is_file():
|
||||||
|
with open(file, 'rb') as f:
|
||||||
|
data = f.read()
|
||||||
|
decrypted = fernet.decrypt(data)
|
||||||
|
with open(file, 'wb') as f:
|
||||||
|
f.write(decrypted)
|
||||||
|
|
||||||
def _recreate_database(self):
|
def _recreate_database(self):
|
||||||
"""Drop and recreate the database."""
|
"""Drop and recreate the database."""
|
||||||
params = self.get_db_connection_params()
|
params = self.get_db_connection_params()
|
||||||
@@ -184,10 +327,11 @@ class DatabaseManager:
|
|||||||
cutoff = datetime.now() - timedelta(days=keep_days)
|
cutoff = datetime.now() - timedelta(days=keep_days)
|
||||||
removed = []
|
removed = []
|
||||||
|
|
||||||
for backup in self.backup_dir.glob("*.sql"):
|
# Clean all backup directories (full_backup_*)
|
||||||
if datetime.fromtimestamp(backup.stat().st_mtime) < cutoff:
|
for backup_dir in self.backup_dir.glob("full_backup_*"):
|
||||||
backup.unlink()
|
if backup_dir.is_dir() and datetime.fromtimestamp(backup_dir.stat().st_mtime) < cutoff:
|
||||||
removed.append(backup.name)
|
shutil.rmtree(backup_dir)
|
||||||
|
removed.append(backup_dir.name)
|
||||||
|
|
||||||
if removed:
|
if removed:
|
||||||
print(f"Removed {len(removed)} old backups: {', '.join(removed)}")
|
print(f"Removed {len(removed)} old backups: {', '.join(removed)}")
|
||||||
@@ -198,10 +342,12 @@ def main():
|
|||||||
if len(sys.argv) < 2:
|
if len(sys.argv) < 2:
|
||||||
print("Usage: python backup_restore.py <command> [options]")
|
print("Usage: python backup_restore.py <command> [options]")
|
||||||
print("Commands:")
|
print("Commands:")
|
||||||
print(" backup [name] - Create a new backup")
|
print(" backup [name] - Create a new database backup")
|
||||||
|
print(" gpx-backup [--full] - Create GPX backup (incremental by default)")
|
||||||
print(" restore <file> [--yes] - Restore from backup")
|
print(" restore <file> [--yes] - Restore from backup")
|
||||||
print(" list - List available backups")
|
print(" list - List available backups")
|
||||||
print(" cleanup [days] - Remove backups older than N days (default: 30)")
|
print(" cleanup [days] - Remove backups older than N days (default: 30)")
|
||||||
|
print(" decrypt <dir> - Decrypt backup directory")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
manager = DatabaseManager()
|
manager = DatabaseManager()
|
||||||
@@ -210,13 +356,21 @@ def main():
|
|||||||
try:
|
try:
|
||||||
if command == "backup":
|
if command == "backup":
|
||||||
name = sys.argv[2] if len(sys.argv) > 2 else None
|
name = sys.argv[2] if len(sys.argv) > 2 else None
|
||||||
manager.create_backup(name)
|
name = sys.argv[2] if len(sys.argv) > 2 else None
|
||||||
|
manager.create_backup(name)
|
||||||
|
elif command == "gpx-backup":
|
||||||
|
if len(sys.argv) > 2 and sys.argv[2] == "--full":
|
||||||
|
manager.backup_gpx_files(incremental=False)
|
||||||
|
else:
|
||||||
|
manager.backup_gpx_files()
|
||||||
|
|
||||||
elif command == "restore":
|
elif command == "restore":
|
||||||
if len(sys.argv) < 3:
|
if len(sys.argv) < 3:
|
||||||
print("Error: Please specify backup file to restore from")
|
print("Error: Please specify backup file to restore from")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
backup_file = sys.argv[2]
|
||||||
|
confirm = "--yes" in sys.argv
|
||||||
backup_file = sys.argv[2]
|
backup_file = sys.argv[2]
|
||||||
confirm = "--yes" in sys.argv
|
confirm = "--yes" in sys.argv
|
||||||
manager.restore_backup(backup_file, confirm)
|
manager.restore_backup(backup_file, confirm)
|
||||||
|
|||||||
102
backend/tests/services/test_ai_service.py
Normal file
102
backend/tests/services/test_ai_service.py
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
import pytest
|
||||||
|
from unittest.mock import AsyncMock, patch, MagicMock
|
||||||
|
from app.services.ai_service import AIService, AIServiceError
|
||||||
|
from app.models.workout import Workout
|
||||||
|
import json
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_analyze_workout_success():
|
||||||
|
"""Test successful workout analysis with valid API response"""
|
||||||
|
mock_db = MagicMock()
|
||||||
|
mock_prompt = MagicMock()
|
||||||
|
mock_prompt.format.return_value = "test prompt"
|
||||||
|
|
||||||
|
ai_service = AIService(mock_db)
|
||||||
|
ai_service.prompt_manager.get_active_prompt = AsyncMock(return_value=mock_prompt)
|
||||||
|
|
||||||
|
test_response = json.dumps({
|
||||||
|
"performance_summary": "Good workout",
|
||||||
|
"suggestions": ["More recovery"]
|
||||||
|
})
|
||||||
|
|
||||||
|
with patch('httpx.AsyncClient.post') as mock_post:
|
||||||
|
mock_post.return_value = AsyncMock(
|
||||||
|
status_code=200,
|
||||||
|
json=lambda: {"choices": [{"message": {"content": test_response}}]}
|
||||||
|
)
|
||||||
|
|
||||||
|
workout = Workout(activity_type="cycling", duration_seconds=3600)
|
||||||
|
result = await ai_service.analyze_workout(workout)
|
||||||
|
|
||||||
|
assert "performance_summary" in result
|
||||||
|
assert len(result["suggestions"]) == 1
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_generate_plan_success():
|
||||||
|
"""Test plan generation with structured response"""
|
||||||
|
mock_db = MagicMock()
|
||||||
|
ai_service = AIService(mock_db)
|
||||||
|
ai_service.prompt_manager.get_active_prompt = AsyncMock(return_value="Plan prompt: {rules} {goals}")
|
||||||
|
|
||||||
|
test_plan = {
|
||||||
|
"weeks": [{"workouts": ["ride"]}],
|
||||||
|
"focus": "endurance"
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch('httpx.AsyncClient.post') as mock_post:
|
||||||
|
mock_post.return_value = AsyncMock(
|
||||||
|
status_code=200,
|
||||||
|
json=lambda: {"choices": [{"message": {"content": json.dumps(test_plan)}}]}
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await ai_service.generate_plan([], {})
|
||||||
|
assert "weeks" in result
|
||||||
|
assert result["focus"] == "endurance"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_api_retry_logic():
|
||||||
|
"""Test API request retries on failure"""
|
||||||
|
mock_db = MagicMock()
|
||||||
|
ai_service = AIService(mock_db)
|
||||||
|
|
||||||
|
with patch('httpx.AsyncClient.post') as mock_post:
|
||||||
|
mock_post.side_effect = Exception("API failure")
|
||||||
|
|
||||||
|
with pytest.raises(AIServiceError):
|
||||||
|
await ai_service._make_ai_request("test")
|
||||||
|
|
||||||
|
assert mock_post.call_count == 3
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_invalid_json_handling():
|
||||||
|
"""Test graceful handling of invalid JSON responses"""
|
||||||
|
mock_db = MagicMock()
|
||||||
|
ai_service = AIService(mock_db)
|
||||||
|
|
||||||
|
with patch('httpx.AsyncClient.post') as mock_post:
|
||||||
|
mock_post.return_value = AsyncMock(
|
||||||
|
status_code=200,
|
||||||
|
json=lambda: {"choices": [{"message": {"content": "invalid{json"}}]}
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await ai_service.parse_rules_from_natural_language("test")
|
||||||
|
assert "raw_rules" in result
|
||||||
|
assert not result["structured"]
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_code_block_parsing():
|
||||||
|
"""Test extraction of JSON from code blocks"""
|
||||||
|
mock_db = MagicMock()
|
||||||
|
ai_service = AIService(mock_db)
|
||||||
|
|
||||||
|
test_response = "```json\n" + json.dumps({"max_rides": 4}) + "\n```"
|
||||||
|
|
||||||
|
with patch('httpx.AsyncClient.post') as mock_post:
|
||||||
|
mock_post.return_value = AsyncMock(
|
||||||
|
status_code=200,
|
||||||
|
json=lambda: {"choices": [{"message": {"content": test_response}}]}
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await ai_service.evolve_plan({})
|
||||||
|
assert "max_rides" in result
|
||||||
|
assert result["max_rides"] == 4
|
||||||
56
backend/tests/services/test_plan_evolution.py
Normal file
56
backend/tests/services/test_plan_evolution.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
import pytest
|
||||||
|
from unittest.mock import AsyncMock, MagicMock
|
||||||
|
from app.services.plan_evolution import PlanEvolutionService
|
||||||
|
from app.models.plan import Plan
|
||||||
|
from app.models.analysis import Analysis
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_evolve_plan_with_valid_analysis():
|
||||||
|
"""Test plan evolution with approved analysis and suggestions"""
|
||||||
|
mock_db = AsyncMock()
|
||||||
|
mock_plan = Plan(
|
||||||
|
id=1,
|
||||||
|
version=1,
|
||||||
|
jsonb_plan={"weeks": []},
|
||||||
|
parent_plan_id=None
|
||||||
|
)
|
||||||
|
mock_analysis = Analysis(
|
||||||
|
approved=True,
|
||||||
|
jsonb_feedback={"suggestions": ["More recovery"]}
|
||||||
|
)
|
||||||
|
|
||||||
|
service = PlanEvolutionService(mock_db)
|
||||||
|
service.ai_service.evolve_plan = AsyncMock(return_value={"weeks": [{"recovery": True}]})
|
||||||
|
|
||||||
|
result = await service.evolve_plan_from_analysis(mock_analysis, mock_plan)
|
||||||
|
|
||||||
|
assert result.version == 2
|
||||||
|
assert result.parent_plan_id == 1
|
||||||
|
mock_db.add.assert_called_once()
|
||||||
|
mock_db.commit.assert_awaited_once()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_evolution_skipped_for_unapproved_analysis():
|
||||||
|
"""Test plan evolution is skipped for unapproved analysis"""
|
||||||
|
mock_db = AsyncMock()
|
||||||
|
mock_analysis = Analysis(approved=False)
|
||||||
|
|
||||||
|
service = PlanEvolutionService(mock_db)
|
||||||
|
result = await service.evolve_plan_from_analysis(mock_analysis, MagicMock())
|
||||||
|
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_evolution_history_retrieval():
|
||||||
|
"""Test getting plan evolution history"""
|
||||||
|
mock_db = AsyncMock()
|
||||||
|
mock_db.execute.return_value.scalars.return_value = [
|
||||||
|
Plan(version=1), Plan(version=2)
|
||||||
|
]
|
||||||
|
|
||||||
|
service = PlanEvolutionService(mock_db)
|
||||||
|
history = await service.get_plan_evolution_history(1)
|
||||||
|
|
||||||
|
assert len(history) == 2
|
||||||
|
assert history[0].version == 1
|
||||||
81
backend/tests/services/test_workflow_sync.py
Normal file
81
backend/tests/services/test_workflow_sync.py
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
import pytest
|
||||||
|
from unittest.mock import AsyncMock, MagicMock, patch
|
||||||
|
from app.services.workout_sync import WorkoutSyncService
|
||||||
|
from app.models.workout import Workout
|
||||||
|
from app.models.garmin_sync_log import GarminSyncLog
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_successful_sync():
|
||||||
|
"""Test successful sync of new activities"""
|
||||||
|
mock_db = AsyncMock()
|
||||||
|
mock_garmin = MagicMock()
|
||||||
|
mock_garmin.get_activities.return_value = [{'activityId': '123'}]
|
||||||
|
mock_garmin.get_activity_details.return_value = {'metrics': 'data'}
|
||||||
|
|
||||||
|
service = WorkoutSyncService(mock_db)
|
||||||
|
service.garmin_service = mock_garmin
|
||||||
|
|
||||||
|
result = await service.sync_recent_activities()
|
||||||
|
|
||||||
|
assert result == 1
|
||||||
|
mock_db.add.assert_called()
|
||||||
|
mock_db.commit.assert_awaited()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_duplicate_activity_handling():
|
||||||
|
"""Test skipping duplicate activities"""
|
||||||
|
mock_db = AsyncMock()
|
||||||
|
mock_db.execute.return_value.scalar_one_or_none.return_value = True
|
||||||
|
mock_garmin = MagicMock()
|
||||||
|
mock_garmin.get_activities.return_value = [{'activityId': '123'}]
|
||||||
|
|
||||||
|
service = WorkoutSyncService(mock_db)
|
||||||
|
service.garmin_service = mock_garmin
|
||||||
|
|
||||||
|
result = await service.sync_recent_activities()
|
||||||
|
assert result == 0
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_activity_detail_retry_logic():
|
||||||
|
"""Test retry logic for activity details"""
|
||||||
|
mock_db = AsyncMock()
|
||||||
|
mock_garmin = MagicMock()
|
||||||
|
mock_garmin.get_activities.return_value = [{'activityId': '123'}]
|
||||||
|
mock_garmin.get_activity_details.side_effect = [Exception(), {'metrics': 'data'}]
|
||||||
|
|
||||||
|
service = WorkoutSyncService(mock_db)
|
||||||
|
service.garmin_service = mock_garmin
|
||||||
|
|
||||||
|
result = await service.sync_recent_activities()
|
||||||
|
assert mock_garmin.get_activity_details.call_count == 2
|
||||||
|
assert result == 1
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_auth_error_handling():
|
||||||
|
"""Test authentication error handling"""
|
||||||
|
mock_db = AsyncMock()
|
||||||
|
mock_garmin = MagicMock()
|
||||||
|
mock_garmin.get_activities.side_effect = Exception("Auth failed")
|
||||||
|
|
||||||
|
service = WorkoutSyncService(mock_db)
|
||||||
|
service.garmin_service = mock_garmin
|
||||||
|
|
||||||
|
with pytest.raises(Exception):
|
||||||
|
await service.sync_recent_activities()
|
||||||
|
|
||||||
|
sync_log = mock_db.add.call_args[0][0]
|
||||||
|
assert sync_log.status == "auth_error"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_sync_status():
|
||||||
|
"""Test retrieval of latest sync status"""
|
||||||
|
mock_db = AsyncMock()
|
||||||
|
mock_log = GarminSyncLog(status="success")
|
||||||
|
mock_db.execute.return_value.scalar_one_or_none.return_value = mock_log
|
||||||
|
|
||||||
|
service = WorkoutSyncService(mock_db)
|
||||||
|
result = await service.get_latest_sync_status()
|
||||||
|
|
||||||
|
assert result.status == "success"
|
||||||
@@ -11,6 +11,11 @@ services:
|
|||||||
- ./data/gpx:/app/data/gpx
|
- ./data/gpx:/app/data/gpx
|
||||||
- ./data/sessions:/app/data/sessions
|
- ./data/sessions:/app/data/sessions
|
||||||
- ./data/logs:/app/logs
|
- ./data/logs:/app/logs
|
||||||
|
logging:
|
||||||
|
driver: "json-file"
|
||||||
|
options:
|
||||||
|
max-size: "10m"
|
||||||
|
max-file: "5"
|
||||||
environment:
|
environment:
|
||||||
- DATABASE_URL=postgresql://postgres:${POSTGRES_PASSWORD}@db:5432/cycling
|
- DATABASE_URL=postgresql://postgres:${POSTGRES_PASSWORD}@db:5432/cycling
|
||||||
- API_KEY=${API_KEY}
|
- API_KEY=${API_KEY}
|
||||||
@@ -33,12 +38,23 @@ services:
|
|||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
ports:
|
ports:
|
||||||
- "3000:3000"
|
- "80:80"
|
||||||
environment:
|
environment:
|
||||||
- REACT_APP_API_URL=http://localhost:8000
|
- REACT_APP_API_URL=http://backend:8000
|
||||||
|
- NODE_ENV=production
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:80/healthz"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
depends_on:
|
depends_on:
|
||||||
backend:
|
backend:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
memory: 512M
|
||||||
|
cpus: '0.5'
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: postgres:15-alpine
|
image: postgres:15-alpine
|
||||||
|
|||||||
@@ -8,7 +8,8 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- "8000:8000"
|
- "8000:8000"
|
||||||
environment:
|
environment:
|
||||||
- DATABASE_URL=postgresql://postgres:password@db:5432/cycling
|
- DATABASE_URL=postgresql+asyncpg://postgres:password@db:5432/cycling
|
||||||
|
- GPX_STORAGE_PATH=/app/data/gpx
|
||||||
- GARMIN_USERNAME=${GARMIN_USERNAME}
|
- GARMIN_USERNAME=${GARMIN_USERNAME}
|
||||||
- GARMIN_PASSWORD=${GARMIN_PASSWORD}
|
- GARMIN_PASSWORD=${GARMIN_PASSWORD}
|
||||||
- OPENROUTER_API_KEY=${OPENROUTER_API_KEY}
|
- OPENROUTER_API_KEY=${OPENROUTER_API_KEY}
|
||||||
@@ -27,9 +28,9 @@ services:
|
|||||||
frontend:
|
frontend:
|
||||||
build: ./frontend
|
build: ./frontend
|
||||||
ports:
|
ports:
|
||||||
- "3000:3000"
|
- "8888:80"
|
||||||
environment:
|
environment:
|
||||||
- REACT_APP_API_URL=http://localhost:8000
|
- REACT_APP_API_URL=http://backend:8000
|
||||||
- REACT_APP_API_KEY=${API_KEY}
|
- REACT_APP_API_KEY=${API_KEY}
|
||||||
|
|
||||||
db:
|
db:
|
||||||
|
|||||||
11
frontend/.dockerignore
Normal file
11
frontend/.dockerignore
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
node_modules
|
||||||
|
.next
|
||||||
|
Dockerfile
|
||||||
|
.dockerignore
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
coverage
|
||||||
|
.env
|
||||||
|
.env.local
|
||||||
|
.vscode
|
||||||
|
*.log
|
||||||
@@ -1,39 +1,60 @@
|
|||||||
# Build stage
|
# Stage 1: Build application
|
||||||
FROM node:20-alpine AS build
|
FROM node:20-alpine AS builder
|
||||||
|
|
||||||
# Set working directory
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Copy package.json and package-lock.json
|
# Copy package manifests first for optimal caching
|
||||||
COPY package*.json ./
|
COPY package.json package-lock.json* ./
|
||||||
|
|
||||||
# Install all dependencies including devDependencies
|
# Clean cache and install dependencies
|
||||||
RUN npm install --include=dev
|
RUN npm cache clean --force && \
|
||||||
|
export NODE_OPTIONS="--max-old-space-size=1024" && \
|
||||||
|
npm install --include=dev
|
||||||
|
|
||||||
# Copy source code
|
# Copy source files
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Build application
|
# Build application with production settings
|
||||||
RUN npm run build
|
RUN export NODE_OPTIONS="--max-old-space-size=1024" && \
|
||||||
|
npm run build
|
||||||
|
|
||||||
# Production stage
|
# Stage 2: Production runtime
|
||||||
FROM node:20-alpine AS production
|
FROM nginx:1.25-alpine
|
||||||
|
|
||||||
# Set working directory
|
# Install curl for healthchecks
|
||||||
WORKDIR /app
|
RUN apk add --no-cache curl
|
||||||
|
|
||||||
# Copy build artifacts and dependencies
|
# Create necessary directories and set permissions
|
||||||
COPY --from=build /app/package*.json ./
|
RUN mkdir -p /var/cache/nginx/client_temp && \
|
||||||
COPY --from=build /app/.next ./.next
|
mkdir -p /var/run/nginx && \
|
||||||
COPY --from=build /app/node_modules ./node_modules
|
chown -R nginx:nginx /usr/share/nginx/html && \
|
||||||
COPY --from=build /app/public ./public
|
chown -R nginx:nginx /var/cache/nginx && \
|
||||||
|
chown -R nginx:nginx /var/run/nginx && \
|
||||||
|
chmod -R 755 /usr/share/nginx/html
|
||||||
|
|
||||||
# Create non-root user
|
# Copy build artifacts
|
||||||
RUN addgroup -S appgroup && adduser -S appuser -G appgroup
|
COPY --from=builder /app/.next /usr/share/nginx/html/_next
|
||||||
USER appuser
|
|
||||||
|
|
||||||
# Expose application port
|
# Copy nginx configuration
|
||||||
EXPOSE 3000
|
COPY nginx.conf /etc/nginx/nginx.conf
|
||||||
|
|
||||||
# Run application
|
# Copy Next.js routes manifest for proper routing
|
||||||
CMD ["npm", "start"]
|
COPY --from=builder /app/.next/routes-manifest.json /usr/share/nginx/html/_next/
|
||||||
|
|
||||||
|
# Copy main HTML files to root
|
||||||
|
COPY --from=builder /app/.next/server/pages/index.html /usr/share/nginx/html/index.html
|
||||||
|
COPY --from=builder /app/.next/server/pages/404.html /usr/share/nginx/html/404.html
|
||||||
|
|
||||||
|
# Modify nginx config to use custom PID path
|
||||||
|
RUN sed -i 's|pid /var/run/nginx.pid;|pid /var/run/nginx/nginx.pid;|' /etc/nginx/nginx.conf
|
||||||
|
|
||||||
|
# Healthcheck
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||||
|
CMD curl --fail http://localhost:80 || exit 1
|
||||||
|
|
||||||
|
# Run as root to avoid permission issues
|
||||||
|
# USER nginx
|
||||||
|
|
||||||
|
EXPOSE 80
|
||||||
|
|
||||||
|
CMD ["nginx", "-g", "daemon off;"]
|
||||||
15
frontend/jest.config.js
Normal file
15
frontend/jest.config.js
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
module.exports = {
|
||||||
|
collectCoverage: true,
|
||||||
|
coverageDirectory: "coverage",
|
||||||
|
coverageReporters: ["text", "lcov"],
|
||||||
|
coveragePathIgnorePatterns: [
|
||||||
|
"/node_modules/",
|
||||||
|
"/.next/",
|
||||||
|
"/__tests__/",
|
||||||
|
"jest.config.js"
|
||||||
|
],
|
||||||
|
testEnvironment: "jest-environment-jsdom",
|
||||||
|
moduleNameMapper: {
|
||||||
|
"^@/(.*)$": "<rootDir>/src/$1"
|
||||||
|
}
|
||||||
|
};
|
||||||
45
frontend/nginx.conf
Normal file
45
frontend/nginx.conf
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
worker_processes auto;
|
||||||
|
|
||||||
|
events {
|
||||||
|
worker_connections 1024;
|
||||||
|
}
|
||||||
|
|
||||||
|
http {
|
||||||
|
include /etc/nginx/mime.types;
|
||||||
|
default_type application/octet-stream;
|
||||||
|
|
||||||
|
sendfile on;
|
||||||
|
keepalive_timeout 65;
|
||||||
|
gzip on;
|
||||||
|
gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript;
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 80;
|
||||||
|
server_name localhost;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
root /usr/share/nginx/html;
|
||||||
|
index index.html;
|
||||||
|
try_files $uri $uri/ /index.html;
|
||||||
|
|
||||||
|
# Cache control for static assets
|
||||||
|
location ~* \.(js|css|png|jpg|jpeg|gif|ico|svg)$ {
|
||||||
|
expires 1y;
|
||||||
|
add_header Cache-Control "public, immutable";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Next.js specific routes
|
||||||
|
location /_next/ {
|
||||||
|
alias /usr/share/nginx/html/_next/;
|
||||||
|
expires 365d;
|
||||||
|
add_header Cache-Control "public, max-age=31536000, immutable";
|
||||||
|
}
|
||||||
|
|
||||||
|
# Health check endpoint
|
||||||
|
location /healthz {
|
||||||
|
access_log off;
|
||||||
|
return 200 'ok';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -8,14 +8,19 @@
|
|||||||
"start": "next start",
|
"start": "next start",
|
||||||
"lint": "next lint",
|
"lint": "next lint",
|
||||||
"test": "jest",
|
"test": "jest",
|
||||||
"test:watch": "jest --watch"
|
"test:watch": "jest --watch",
|
||||||
|
"test:coverage": "jest --coverage"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@emotion/react": "^11.14.0",
|
"@emotion/react": "^11.14.0",
|
||||||
"@emotion/styled": "^11.14.1",
|
"@emotion/styled": "^11.14.1",
|
||||||
|
"axios": "^1.7.2",
|
||||||
|
"date-fns": "^3.6.0",
|
||||||
"next": "14.2.3",
|
"next": "14.2.3",
|
||||||
"react": "18.2.0",
|
"react": "18.2.0",
|
||||||
"react-dom": "18.2.0",
|
"react-dom": "18.2.0",
|
||||||
|
"react-router-dom": "^6.22.3",
|
||||||
|
"react-toastify": "^10.0.4",
|
||||||
"recharts": "2.8.0"
|
"recharts": "2.8.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|||||||
26
frontend/src/components/__tests__/LoadingSpinner.test.jsx
Normal file
26
frontend/src/components/__tests__/LoadingSpinner.test.jsx
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import { render, screen } from '@testing-library/react';
|
||||||
|
import LoadingSpinner from '../LoadingSpinner';
|
||||||
|
|
||||||
|
describe('LoadingSpinner Component', () => {
|
||||||
|
test('renders spinner with animation', () => {
|
||||||
|
render(<LoadingSpinner />);
|
||||||
|
|
||||||
|
// Check for the spinner container
|
||||||
|
const spinnerContainer = screen.getByRole('status');
|
||||||
|
expect(spinnerContainer).toBeInTheDocument();
|
||||||
|
|
||||||
|
// Verify animation classes
|
||||||
|
const spinnerElement = screen.getByTestId('loading-spinner');
|
||||||
|
expect(spinnerElement).toHaveClass('animate-spin');
|
||||||
|
expect(spinnerElement).toHaveClass('rounded-full');
|
||||||
|
|
||||||
|
// Check accessibility attributes
|
||||||
|
expect(spinnerElement).toHaveAttribute('aria-live', 'polite');
|
||||||
|
expect(spinnerElement).toHaveAttribute('aria-busy', 'true');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('matches snapshot', () => {
|
||||||
|
const { asFragment } = render(<LoadingSpinner />);
|
||||||
|
expect(asFragment()).toMatchSnapshot();
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -44,8 +44,19 @@ export const AuthProvider = ({ children }) => {
|
|||||||
|
|
||||||
export const useAuth = () => {
|
export const useAuth = () => {
|
||||||
const context = useContext(AuthContext);
|
const context = useContext(AuthContext);
|
||||||
|
|
||||||
|
// Return safe defaults during build time
|
||||||
|
if (typeof window === 'undefined') {
|
||||||
|
return {
|
||||||
|
apiKey: null,
|
||||||
|
authFetch: () => {},
|
||||||
|
loading: false
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
if (!context) {
|
if (!context) {
|
||||||
throw new Error('useAuth must be used within an AuthProvider');
|
throw new Error('useAuth must be used within an AuthProvider');
|
||||||
}
|
}
|
||||||
|
|
||||||
return context;
|
return context;
|
||||||
};
|
};
|
||||||
@@ -7,6 +7,7 @@ import LoadingSpinner from '../components/LoadingSpinner';
|
|||||||
|
|
||||||
const Dashboard = () => {
|
const Dashboard = () => {
|
||||||
const { apiKey, loading: apiLoading } = useAuth();
|
const { apiKey, loading: apiLoading } = useAuth();
|
||||||
|
const isBuildTime = typeof window === 'undefined';
|
||||||
const [recentWorkouts, setRecentWorkouts] = useState([]);
|
const [recentWorkouts, setRecentWorkouts] = useState([]);
|
||||||
const [currentPlan, setCurrentPlan] = useState(null);
|
const [currentPlan, setCurrentPlan] = useState(null);
|
||||||
const [stats, setStats] = useState({ totalWorkouts: 0, totalDistance: 0 });
|
const [stats, setStats] = useState({ totalWorkouts: 0, totalDistance: 0 });
|
||||||
@@ -18,16 +19,16 @@ const Dashboard = () => {
|
|||||||
const fetchDashboardData = async () => {
|
const fetchDashboardData = async () => {
|
||||||
try {
|
try {
|
||||||
const [workoutsRes, planRes, statsRes, healthRes] = await Promise.all([
|
const [workoutsRes, planRes, statsRes, healthRes] = await Promise.all([
|
||||||
fetch('/api/workouts?limit=3', {
|
fetch(`${process.env.REACT_APP_API_URL}/api/workouts?limit=3`, {
|
||||||
headers: { 'X-API-Key': apiKey }
|
headers: { 'X-API-Key': apiKey }
|
||||||
}),
|
}),
|
||||||
fetch('/api/plans/active', {
|
fetch(`${process.env.REACT_APP_API_URL}/api/plans/active`, {
|
||||||
headers: { 'X-API-Key': apiKey }
|
headers: { 'X-API-Key': apiKey }
|
||||||
}),
|
}),
|
||||||
fetch('/api/stats', {
|
fetch(`${process.env.REACT_APP_API_URL}/api/stats`, {
|
||||||
headers: { 'X-API-Key': apiKey }
|
headers: { 'X-API-Key': apiKey }
|
||||||
}),
|
}),
|
||||||
fetch('/api/health', {
|
fetch(`${process.env.REACT_APP_API_URL}/api/health`, {
|
||||||
headers: { 'X-API-Key': apiKey }
|
headers: { 'X-API-Key': apiKey }
|
||||||
})
|
})
|
||||||
]);
|
]);
|
||||||
@@ -61,6 +62,17 @@ const Dashboard = () => {
|
|||||||
fetchDashboardData();
|
fetchDashboardData();
|
||||||
}, [apiKey]);
|
}, [apiKey]);
|
||||||
|
|
||||||
|
if (isBuildTime) {
|
||||||
|
return (
|
||||||
|
<div className="p-6 max-w-7xl mx-auto">
|
||||||
|
<h1 className="text-3xl font-bold">Training Dashboard</h1>
|
||||||
|
<div className="bg-white p-6 rounded-lg shadow-md">
|
||||||
|
<p className="text-gray-600">Loading dashboard data...</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if (localLoading || apiLoading) return <LoadingSpinner />;
|
if (localLoading || apiLoading) return <LoadingSpinner />;
|
||||||
if (error) return <div className="p-6 text-red-500">{error}</div>;
|
if (error) return <div className="p-6 text-red-500">{error}</div>;
|
||||||
|
|
||||||
|
|||||||
@@ -9,8 +9,11 @@ const Plans = () => {
|
|||||||
const [selectedPlan, setSelectedPlan] = useState(null);
|
const [selectedPlan, setSelectedPlan] = useState(null);
|
||||||
const [loading, setLoading] = useState(true);
|
const [loading, setLoading] = useState(true);
|
||||||
const [error, setError] = useState('');
|
const [error, setError] = useState('');
|
||||||
|
|
||||||
|
const isBuildTime = typeof window === 'undefined';
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
if (isBuildTime) return;
|
||||||
const fetchPlans = async () => {
|
const fetchPlans = async () => {
|
||||||
try {
|
try {
|
||||||
const response = await axios.get('/api/plans', {
|
const response = await axios.get('/api/plans', {
|
||||||
@@ -30,6 +33,17 @@ const Plans = () => {
|
|||||||
fetchPlans();
|
fetchPlans();
|
||||||
}, [apiKey]);
|
}, [apiKey]);
|
||||||
|
|
||||||
|
if (typeof window === 'undefined') {
|
||||||
|
return (
|
||||||
|
<div className="p-6 max-w-7xl mx-auto">
|
||||||
|
<h1 className="text-3xl font-bold mb-8">Training Plans</h1>
|
||||||
|
<div className="bg-white p-6 rounded-lg shadow-md">
|
||||||
|
<p className="text-gray-600">Loading training plans...</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if (loading) return <div className="p-6 text-center">Loading plans...</div>;
|
if (loading) return <div className="p-6 text-center">Loading plans...</div>;
|
||||||
if (error) return <div className="p-6 text-red-600">{error}</div>;
|
if (error) return <div className="p-6 text-red-600">{error}</div>;
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,18 @@ import { useAuth } from '../context/AuthContext';
|
|||||||
|
|
||||||
const RoutesPage = () => {
|
const RoutesPage = () => {
|
||||||
const { apiKey } = useAuth();
|
const { apiKey } = useAuth();
|
||||||
|
|
||||||
|
// Handle build-time case where apiKey is undefined
|
||||||
|
if (typeof window === 'undefined') {
|
||||||
|
return (
|
||||||
|
<div className="p-6 max-w-7xl mx-auto">
|
||||||
|
<h1 className="text-3xl font-bold mb-8">Routes</h1>
|
||||||
|
<div className="bg-white p-6 rounded-lg shadow-md">
|
||||||
|
<p className="text-gray-600">Loading route management...</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="p-6 max-w-7xl mx-auto">
|
<div className="p-6 max-w-7xl mx-auto">
|
||||||
|
|||||||
@@ -9,8 +9,11 @@ const Workouts = () => {
|
|||||||
const [selectedWorkout, setSelectedWorkout] = useState(null);
|
const [selectedWorkout, setSelectedWorkout] = useState(null);
|
||||||
const [loading, setLoading] = useState(true);
|
const [loading, setLoading] = useState(true);
|
||||||
const [error, setError] = useState('');
|
const [error, setError] = useState('');
|
||||||
|
|
||||||
|
const isBuildTime = typeof window === 'undefined';
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
if (isBuildTime) return;
|
||||||
const fetchWorkouts = async () => {
|
const fetchWorkouts = async () => {
|
||||||
try {
|
try {
|
||||||
const response = await axios.get('/api/workouts', {
|
const response = await axios.get('/api/workouts', {
|
||||||
@@ -27,6 +30,17 @@ const Workouts = () => {
|
|||||||
fetchWorkouts();
|
fetchWorkouts();
|
||||||
}, [apiKey]);
|
}, [apiKey]);
|
||||||
|
|
||||||
|
if (isBuildTime) {
|
||||||
|
return (
|
||||||
|
<div className="p-6 max-w-7xl mx-auto">
|
||||||
|
<h1 className="text-3xl font-bold mb-8">Workouts</h1>
|
||||||
|
<div className="bg-white p-6 rounded-lg shadow-md">
|
||||||
|
<p className="text-gray-600">Loading workout data...</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if (loading) return <div className="p-6 text-center">Loading workouts...</div>;
|
if (loading) return <div className="p-6 text-center">Loading workouts...</div>;
|
||||||
if (error) return <div className="p-6 text-red-600">{error}</div>;
|
if (error) return <div className="p-6 text-red-600">{error}</div>;
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user