feat: Initial implementation of FitTrack Report Generator

This commit introduces the initial version of the FitTrack Report Generator, a FastAPI application for analyzing workout files.

Key features include:
- Parsing of FIT, TCX, and GPX workout files.
- Analysis of power, heart rate, speed, and elevation data.
- Generation of summary reports and charts.
- REST API for single and batch workout analysis.

The project structure has been set up with a `src` directory for core logic, an `api` directory for the FastAPI application, and a `tests` directory for unit, integration, and contract tests.

The development workflow is configured to use Docker and modern Python tooling.
This commit is contained in:
2025-10-11 09:54:13 -07:00
parent 6643a64ff0
commit 9e0bd322d3
152 changed files with 25695 additions and 49 deletions

Binary file not shown.

19
api/main.py Normal file
View File

@@ -0,0 +1,19 @@
from fastapi import FastAPI
from api.routers import analysis
from src.core.logger import logger
app = FastAPI()
app.include_router(analysis.router, prefix="/api")
@app.on_event("startup")
async def startup_event():
logger.info("FitTrack Report Generator API starting up...")
@app.on_event("shutdown")
async def shutdown_event():
logger.info("FitTrack Report Generator API shutting down...")
@app.get("/")
def read_root():
return {"message": "Welcome to FitTrack Report Generator API!"}

Binary file not shown.

364
api/routers/analysis.py Normal file
View File

@@ -0,0 +1,364 @@
from fastapi import APIRouter, UploadFile, File, Form, HTTPException, Depends, status
from fastapi.responses import StreamingResponse
from typing import Optional
from uuid import UUID, uuid4
from datetime import datetime
import pandas as pd
import io
from src.core.file_parser import FitParser, TcxParser, GpxParser
from src.core.workout_data import WorkoutData, WorkoutMetadata
from src.core.workout_analyzer import WorkoutAnalyzer
from src.core.report_generator import ReportGenerator
from src.db.session import get_db
from src.db.models import User, WorkoutAnalysis
from api.schemas import ErrorResponse
from sqlalchemy.orm import Session
from src.core.logger import logger
router = APIRouter()
@router.post("/analyze/workout", response_model=dict, responses={
status.HTTP_400_BAD_REQUEST: {"model": ErrorResponse},
status.HTTP_500_INTERNAL_SERVER_ERROR: {"model": ErrorResponse}
})
async def analyze_single_workout(
file: UploadFile = File(...),
user_id: Optional[UUID] = Form(None),
ftp_value: Optional[float] = Form(None),
db: Session = Depends(get_db)
):
logger.info("Received request to analyze single workout", filename=file.filename, user_id=user_id)
file_content = await file.read()
file_extension = file.filename.split(".")[-1].lower()
parser = None
if file_extension == "fit":
parser = FitParser()
elif file_extension == "tcx":
parser = TcxParser()
elif file_extension == "gpx":
parser = GpxParser()
else:
logger.warning("Unsupported file type received", filename=file.filename, file_extension=file_extension)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ErrorResponse(
code="UNSUPPORTED_FILE_TYPE",
message="The provided file type is not supported.",
details={
"filename": file.filename,
"extension": file_extension
}
).dict()
)
try:
workout_data = parser.parse(io.BytesIO(file_content))
logger.info("File parsed successfully", filename=file.filename)
except Exception as e:
logger.error("Error parsing file", filename=file.filename, error=str(e), exc_info=True)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ErrorResponse(
code="FILE_PARSING_ERROR",
message=f"Error parsing workout file: {e}",
details={"filename": file.filename}
).dict()
)
try:
workout_data = parser.parse(io.BytesIO(file_content))
logger.info("File parsed successfully", filename=file.filename)
except Exception as e:
logger.error("Error parsing file", filename=file.filename, error=str(e), exc_info=True)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ErrorResponse(
code="FILE_PARSING_ERROR",
message=f"Error parsing workout file: {e}",
details={"filename": file.filename}
).dict()
)
try:
# Fetch user's FTP from DB if not provided in the request
effective_ftp = ftp_value
if user_id and not effective_ftp:
user = db.query(User).filter(User.id == user_id).first()
if user and user.ftp_value:
effective_ftp = user.ftp_value
logger.info("Using FTP from user profile", user_id=user_id, ftp_value=effective_ftp)
# Initialize WorkoutAnalyzer
analyzer = WorkoutAnalyzer(workout_data)
# Perform analysis
analyzer.analyze_power_data(ftp=effective_ftp if effective_ftp else 0) # Need to fetch user FTP if not provided
analyzer.analyze_heart_rate_data(max_hr=180) # TODO: Get max_hr from user settings
analyzer.analyze_speed_data(max_speed=50) # TODO: Get max_speed from user settings
analyzer.analyze_elevation_data()
summary_metrics = analyzer.calculate_summary_metrics()
logger.info("Workout analysis completed", filename=file.filename, analysis_id=analysis_id)
# Generate report (placeholder)
report_generator = ReportGenerator(workout_data)
html_report_content = report_generator.generate_html_report()
# TODO: Save report to a file and get path
report_path = "/path/to/report.html" # Placeholder
# Generate charts (placeholder)
chart_paths = {} # Placeholder
# Store analysis in DB
analysis_id = uuid4()
new_analysis = WorkoutAnalysis(
id=analysis_id,
user_id=user_id,
file_name=file.filename,
analysis_date=datetime.utcnow(),
status="completed",
summary_metrics=summary_metrics, # This needs to be JSONB compatible
report_path=report_path,
chart_paths=chart_paths # This needs to be JSONB compatible
)
db.add(new_analysis)
db.commit()
db.refresh(new_analysis)
logger.info("Workout analysis saved to DB", analysis_id=analysis_id, filename=file.filename)
return {
"analysis_id": analysis_id,
"user_id": user_id,
"file_name": file.filename,
"analysis_date": new_analysis.analysis_date.isoformat() + "Z",
"status": "completed",
"metrics": summary_metrics,
"report_url": f"/api/analysis/{analysis_id}/report", # TODO: Implement report retrieval endpoint
"chart_urls": chart_paths # TODO: Implement chart retrieval endpoint
}
except Exception as e:
logger.error("Unexpected error during workout analysis or DB operation", filename=file.filename, error=str(e), exc_info=True)
db.rollback()
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ErrorResponse(
code="INTERNAL_SERVER_ERROR",
message=f"An unexpected error occurred during workout analysis: {e}"
).dict()
)
@router.get("/analysis/{analysis_id}/summary", response_model=dict, responses={
status.HTTP_404_NOT_FOUND: {"model": ErrorResponse},
status.HTTP_500_INTERNAL_SERVER_ERROR: {"model": ErrorResponse}
})
async def get_analysis_summary(
analysis_id: UUID,
db: Session = Depends(get_db)
):
logger.info("Received request for analysis summary", analysis_id=analysis_id)
try:
analysis = db.query(WorkoutAnalysis).filter(WorkoutAnalysis.id == analysis_id).first()
if not analysis:
logger.warning("Analysis not found", analysis_id=analysis_id)
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=ErrorResponse(
code="ANALYSIS_NOT_FOUND",
message=f"Analysis with ID {analysis_id} not found."
).dict()
)
logger.info("Analysis summary retrieved successfully", analysis_id=analysis_id)
return analysis.summary_metrics
except HTTPException as he:
raise he
except Exception as e:
logger.error("Unexpected error retrieving analysis summary", analysis_id=analysis_id, error=str(e), exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ErrorResponse(
code="INTERNAL_SERVER_ERROR",
message=f"An unexpected error occurred while retrieving analysis summary: {e}"
).dict()
)
@router.get("/analysis/{analysis_id}/charts", responses={
status.HTTP_200_OK: {
"content": {"image/png": {}},
"description": "Returns the chart image."
},
status.HTTP_404_NOT_FOUND: {"model": ErrorResponse},
status.HTTP_500_INTERNAL_SERVER_ERROR: {"model": ErrorResponse}
})
async def get_analysis_charts(
analysis_id: UUID,
chart_type: str,
db: Session = Depends(get_db)
):
logger.info("Received request for chart", analysis_id=analysis_id, chart_type=chart_type)
analysis = db.query(WorkoutAnalysis).filter(WorkoutAnalysis.id == analysis_id).first()
if not analysis:
logger.warning("Analysis not found for chart request", analysis_id=analysis_id)
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=ErrorResponse(
code="ANALYSIS_NOT_FOUND",
message=f"Analysis with ID {analysis_id} not found."
).dict()
)
if chart_type not in analysis.chart_paths:
logger.warning("Chart type not found in analysis", analysis_id=analysis_id, chart_type=chart_type)
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=ErrorResponse(
code="CHART_NOT_FOUND",
message=f"Chart type {chart_type} not found for analysis ID {analysis_id}."
).dict()
)
chart_path = analysis.chart_paths[chart_type]
try:
with open(chart_path, "rb") as f:
logger.info("Chart file read successfully", analysis_id=analysis_id, chart_type=chart_type, chart_path=chart_path)
return StreamingResponse(io.BytesIO(f.read()), media_type="image/png")
except FileNotFoundError:
logger.error("Chart file not found", analysis_id=analysis_id, chart_type=chart_type, chart_path=chart_path, exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ErrorResponse(
code="CHART_FILE_ERROR",
message=f"Chart file not found at {chart_path}."
).dict()
)
except Exception as e:
logger.error("Error retrieving chart", analysis_id=analysis_id, chart_type=chart_type, error=str(e), exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ErrorResponse(
code="CHART_RETRIEVAL_ERROR",
message=f"Error retrieving chart: {e}"
).dict()
)
@router.post("/analyze/batch", response_model=dict, responses={
status.HTTP_400_BAD_REQUEST: {"model": ErrorResponse},
status.HTTP_500_INTERNAL_SERVER_ERROR: {"model": ErrorResponse}
})
async def analyze_batch_workouts(
zip_file: UploadFile = File(...),
user_id: Optional[UUID] = Form(None),
ftp_value: Optional[float] = Form(None),
db: Session = Depends(get_db)
):
logger.info("Received request to analyze batch workouts", filename=zip_file.filename, user_id=user_id)
if zip_file.content_type != "application/zip":
logger.warning("Invalid file type for batch analysis", filename=zip_file.filename, content_type=zip_file.content_type)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ErrorResponse(
code="INVALID_FILE_TYPE",
message="Only ZIP files are supported for batch analysis."
).dict()
)
zip_content = await zip_file.read()
if not zip_content:
logger.warning("Empty ZIP file received for batch analysis", filename=zip_file.filename)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ErrorResponse(
code="EMPTY_ZIP_FILE",
message="The provided ZIP file is empty."
).dict()
)
try:
batch_processor = BatchProcessor(db_session=db)
results = batch_processor.process_zip_file(zip_content, user_id, ftp_value)
batch_id = uuid4()
total_files = len(results)
failed_files = sum(1 for r in results if r["status"] == "failed")
status_message = "completed"
if failed_files > 0:
status_message = "completed_with_errors"
if failed_files == total_files:
status_message = "failed"
logger.info("Batch analysis completed", batch_id=batch_id, total_files=total_files, failed_files=failed_files, status=status_message)
return {
"batch_id": batch_id,
"status": status_message,
"total_files": total_files,
"results": results
}
except Exception as e:
logger.error("Unexpected error during batch processing", filename=zip_file.filename, error=str(e), exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ErrorResponse(
code="INTERNAL_SERVER_ERROR",
message=f"An unexpected error occurred during batch processing: {e}"
).dict()
)
@router.post("/analyze/batch", response_model=dict, responses={
status.HTTP_400_BAD_REQUEST: {"model": ErrorResponse},
status.HTTP_500_INTERNAL_SERVER_ERROR: {"model": ErrorResponse}
})
async def analyze_batch_workouts(
zip_file: UploadFile = File(...),
user_id: Optional[UUID] = Form(None),
ftp_value: Optional[float] = Form(None),
db: Session = Depends(get_db)
):
if zip_file.content_type != "application/zip":
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ErrorResponse(
code="INVALID_FILE_TYPE",
message="Only ZIP files are supported for batch analysis."
).dict()
)
zip_content = await zip_file.read()
if not zip_content:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ErrorResponse(
code="EMPTY_ZIP_FILE",
message="The provided ZIP file is empty."
).dict()
)
try:
batch_processor = BatchProcessor(db_session=db)
results = batch_processor.process_zip_file(zip_content, user_id, ftp_value)
batch_id = uuid4()
total_files = len(results)
failed_files = sum(1 for r in results if r["status"] == "failed")
status_message = "completed"
if failed_files > 0:
status_message = "completed_with_errors"
if failed_files == total_files:
status_message = "failed"
return {
"batch_id": batch_id,
"status": status_message,
"total_files": total_files,
"results": results
}
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ErrorResponse(
code="INTERNAL_SERVER_ERROR",
message=f"An unexpected error occurred during batch processing: {e}"
).dict()
)

7
api/schemas.py Normal file
View File

@@ -0,0 +1,7 @@
from pydantic import BaseModel
from typing import Optional, Dict, Any
class ErrorResponse(BaseModel):
code: str
message: str
details: Optional[Dict[str, Any]] = None