This commit is contained in:
2025-10-12 06:38:44 -07:00
parent 9e0bd322d3
commit 3886dcb9ab
158 changed files with 2022 additions and 9699 deletions

Binary file not shown.

Binary file not shown.

View File

@@ -6,14 +6,17 @@ app = FastAPI()
app.include_router(analysis.router, prefix="/api")
@app.on_event("startup")
async def startup_event():
logger.info("FitTrack Report Generator API starting up...")
@app.on_event("shutdown")
async def shutdown_event():
logger.info("FitTrack Report Generator API shutting down...")
@app.get("/")
def read_root():
return {"message": "Welcome to FitTrack Report Generator API!"}
return {"message": "Welcome to FitTrack Report Generator API!"}

View File

@@ -5,30 +5,40 @@ from uuid import UUID, uuid4
from datetime import datetime
import pandas as pd
import io
import httpx
from src.core.file_parser import FitParser, TcxParser, GpxParser
from src.core.workout_data import WorkoutData, WorkoutMetadata
from src.core.workout_analyzer import WorkoutAnalyzer
from src.core.report_generator import ReportGenerator
from src.core.chart_generator import ChartGenerator
from src.db.session import get_db
from src.db.models import User, WorkoutAnalysis
from api.schemas import ErrorResponse
from sqlalchemy.orm import Session
from src.core.logger import logger
from src.clients.centraldb_client import CentralDBClient
from src.core.cache import cache
router = APIRouter()
@router.post("/analyze/workout", response_model=dict, responses={
status.HTTP_400_BAD_REQUEST: {"model": ErrorResponse},
status.HTTP_500_INTERNAL_SERVER_ERROR: {"model": ErrorResponse}
})
@router.post(
"/analyze/workout",
response_model=dict,
responses={
status.HTTP_400_BAD_REQUEST: {"model": ErrorResponse},
status.HTTP_500_INTERNAL_SERVER_ERROR: {"model": ErrorResponse},
},
)
async def analyze_single_workout(
file: UploadFile = File(...),
user_id: Optional[UUID] = Form(None),
ftp_value: Optional[float] = Form(None),
db: Session = Depends(get_db)
db: Session = Depends(get_db),
):
logger.info("Received request to analyze single workout", filename=file.filename, user_id=user_id)
logger.info(f"Received request to analyze single workout for file: {file.filename}, user_id: {user_id}")
file_content = await file.read()
file_extension = file.filename.split(".")[-1].lower()
@@ -40,45 +50,28 @@ async def analyze_single_workout(
elif file_extension == "gpx":
parser = GpxParser()
else:
logger.warning("Unsupported file type received", filename=file.filename, file_extension=file_extension)
logger.warning(f"Unsupported file type received for file: {file.filename}, extension: {file_extension}")
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ErrorResponse(
code="UNSUPPORTED_FILE_TYPE",
message="The provided file type is not supported.",
details={
"filename": file.filename,
"extension": file_extension
}
).dict()
details={"filename": file.filename, "extension": file_extension},
).dict(),
)
try:
workout_data = parser.parse(io.BytesIO(file_content))
logger.info("File parsed successfully", filename=file.filename)
logger.info(f"File parsed successfully: {file.filename}")
except Exception as e:
logger.error("Error parsing file", filename=file.filename, error=str(e), exc_info=True)
logger.error(f"Error parsing file: {file.filename}, error: {e}", exc_info=True)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ErrorResponse(
code="FILE_PARSING_ERROR",
message=f"Error parsing workout file: {e}",
details={"filename": file.filename}
).dict()
)
try:
workout_data = parser.parse(io.BytesIO(file_content))
logger.info("File parsed successfully", filename=file.filename)
except Exception as e:
logger.error("Error parsing file", filename=file.filename, error=str(e), exc_info=True)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ErrorResponse(
code="FILE_PARSING_ERROR",
message=f"Error parsing workout file: {e}",
details={"filename": file.filename}
).dict()
details={"filename": file.filename},
).dict(),
)
try:
@@ -88,45 +81,50 @@ async def analyze_single_workout(
user = db.query(User).filter(User.id == user_id).first()
if user and user.ftp_value:
effective_ftp = user.ftp_value
logger.info("Using FTP from user profile", user_id=user_id, ftp_value=effective_ftp)
logger.info(f"Using FTP from user profile for user_id: {user_id}, ftp_value: {effective_ftp}")
# Initialize WorkoutAnalyzer
analyzer = WorkoutAnalyzer(workout_data)
# Perform analysis
analyzer.analyze_power_data(ftp=effective_ftp if effective_ftp else 0) # Need to fetch user FTP if not provided
analyzer.analyze_heart_rate_data(max_hr=180) # TODO: Get max_hr from user settings
analyzer.analyze_speed_data(max_speed=50) # TODO: Get max_speed from user settings
analyzer.analyze_power_data(
ftp=effective_ftp if effective_ftp else 0
) # Need to fetch user FTP if not provided
analyzer.analyze_heart_rate_data(
max_hr=180
) # TODO: Get max_hr from user settings
analyzer.analyze_speed_data(
max_speed=50
) # TODO: Get max_speed from user settings
analyzer.analyze_elevation_data()
summary_metrics = analyzer.calculate_summary_metrics()
logger.info("Workout analysis completed", filename=file.filename, analysis_id=analysis_id)
# Generate report (placeholder)
report_generator = ReportGenerator(workout_data)
html_report_content = report_generator.generate_html_report()
# TODO: Save report to a file and get path
report_path = "/path/to/report.html" # Placeholder
report_path = "/path/to/report.html" # Placeholder
# Generate charts (placeholder)
chart_paths = {} # Placeholder
chart_paths = {} # Placeholder
# Store analysis in DB
analysis_id = uuid4()
logger.info(f"Workout analysis completed for file: {file.filename}, analysis_id: {analysis_id}")
new_analysis = WorkoutAnalysis(
id=analysis_id,
user_id=user_id,
file_name=file.filename,
analysis_date=datetime.utcnow(),
status="completed",
summary_metrics=summary_metrics, # This needs to be JSONB compatible
summary_metrics=summary_metrics, # This needs to be JSONB compatible
report_path=report_path,
chart_paths=chart_paths # This needs to be JSONB compatible
chart_paths=chart_paths, # This needs to be JSONB compatible
)
db.add(new_analysis)
db.commit()
db.refresh(new_analysis)
logger.info("Workout analysis saved to DB", analysis_id=analysis_id, filename=file.filename)
logger.info(f"Workout analysis saved to DB for analysis_id: {analysis_id}, filename: {file.filename}")
return {
"analysis_id": analysis_id,
@@ -135,230 +133,217 @@ async def analyze_single_workout(
"analysis_date": new_analysis.analysis_date.isoformat() + "Z",
"status": "completed",
"metrics": summary_metrics,
"report_url": f"/api/analysis/{analysis_id}/report", # TODO: Implement report retrieval endpoint
"chart_urls": chart_paths # TODO: Implement chart retrieval endpoint
"report_url": f"/api/analysis/{analysis_id}/report", # TODO: Implement report retrieval endpoint
"chart_urls": chart_paths, # TODO: Implement chart retrieval endpoint
}
except Exception as e:
logger.error("Unexpected error during workout analysis or DB operation", filename=file.filename, error=str(e), exc_info=True)
logger.error(f"Unexpected error during workout analysis or DB operation for file: {file.filename}, error: {e}", exc_info=True)
db.rollback()
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ErrorResponse(
code="INTERNAL_SERVER_ERROR",
message=f"An unexpected error occurred during workout analysis: {e}"
).dict()
message=f"An unexpected error occurred during workout analysis: {e}",
).dict(),
)
@router.get("/analysis/{analysis_id}/summary", response_model=dict, responses={
status.HTTP_404_NOT_FOUND: {"model": ErrorResponse},
status.HTTP_500_INTERNAL_SERVER_ERROR: {"model": ErrorResponse}
})
async def get_analysis_summary(
analysis_id: UUID,
db: Session = Depends(get_db)
):
logger.info("Received request for analysis summary", analysis_id=analysis_id)
@router.get(
"/analysis/{analysis_id}/summary",
response_model=dict,
responses={
status.HTTP_404_NOT_FOUND: {"model": ErrorResponse},
status.HTTP_500_INTERNAL_SERVER_ERROR: {"model": ErrorResponse},
},
)
async def get_analysis_summary(analysis_id: UUID, db: Session = Depends(get_db)):
logger.info(f"Received request for analysis summary for analysis_id: {analysis_id}")
# Check cache first
cached_summary = cache.get(str(analysis_id))
if cached_summary:
logger.info(f"Analysis summary found in cache for analysis_id: {analysis_id}")
return cached_summary
try:
analysis = db.query(WorkoutAnalysis).filter(WorkoutAnalysis.id == analysis_id).first()
if not analysis:
logger.warning("Analysis not found", analysis_id=analysis_id)
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=ErrorResponse(
code="ANALYSIS_NOT_FOUND",
message=f"Analysis with ID {analysis_id} not found."
).dict()
# If not in cache, check local DB
analysis = (
db.query(WorkoutAnalysis).filter(WorkoutAnalysis.id == analysis_id).first()
)
if analysis:
logger.info(f"Analysis summary found in local DB for analysis_id: {analysis_id}")
cache.set(str(analysis_id), analysis.summary_metrics)
return analysis.summary_metrics
# If not in local DB, check CentralDB
centraldb_client = CentralDBClient()
try:
artifact = await centraldb_client.get_analysis_artifact(str(analysis_id))
logger.info(f"Analysis artifact found in CentralDB for analysis_id: {analysis_id}")
summary_metrics = artifact["data"]
cache.set(str(analysis_id), summary_metrics)
# Also store it in the local DB for future requests
new_analysis = WorkoutAnalysis(
id=analysis_id,
summary_metrics=summary_metrics,
# ... other fields might be needed depending on the model
)
logger.info("Analysis summary retrieved successfully", analysis_id=analysis_id)
return analysis.summary_metrics
db.add(new_analysis)
db.commit()
return summary_metrics
except httpx.HTTPStatusError as e:
if e.response.status_code != 404:
raise e # Re-raise if it's not a 'not found' error
# If not in CentralDB, download FIT file, analyze, and store
logger.info(f"Analysis not found for analysis_id: {analysis_id}, starting new analysis from FIT file")
fit_file_content = await centraldb_client.download_fit_file(str(analysis_id))
parser = FitParser()
workout_data = parser.parse(io.BytesIO(fit_file_content))
analyzer = WorkoutAnalyzer(workout_data)
analyzer.analyze_power_data()
analyzer.analyze_heart_rate_data()
analyzer.analyze_speed_data()
analyzer.analyze_elevation_data()
summary_metrics = analyzer.calculate_summary_metrics()
# Store in CentralDB
await centraldb_client.create_analysis_artifact(
str(analysis_id), data=summary_metrics
)
logger.info(f"New analysis artifact stored in CentralDB for analysis_id: {analysis_id}")
# Store in local DB and cache
new_analysis = WorkoutAnalysis(
id=analysis_id,
summary_metrics=summary_metrics,
# ... other fields
)
db.add(new_analysis)
db.commit()
cache.set(str(analysis_id), summary_metrics)
logger.info(f"New analysis stored locally and cached for analysis_id: {analysis_id}")
return summary_metrics
except HTTPException as he:
raise he
except Exception as e:
logger.error("Unexpected error retrieving analysis summary", analysis_id=analysis_id, error=str(e), exc_info=True)
logger.error(f"Unexpected error retrieving analysis summary for analysis_id: {analysis_id}, error: {e}", exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ErrorResponse(
code="INTERNAL_SERVER_ERROR",
message=f"An unexpected error occurred while retrieving analysis summary: {e}"
).dict()
message=f"An unexpected error occurred while retrieving analysis summary: {e}",
).dict(),
)
@router.get("/analysis/{analysis_id}/charts", responses={
status.HTTP_200_OK: {
"content": {"image/png": {}},
"description": "Returns the chart image."
@router.get(
"/analysis/{analysis_id}/charts",
responses={
status.HTTP_200_OK: {
"content": {"image/png": {}},
"description": "Returns the chart image.",
},
status.HTTP_404_NOT_FOUND: {"model": ErrorResponse},
status.HTTP_500_INTERNAL_SERVER_ERROR: {"model": ErrorResponse},
},
status.HTTP_404_NOT_FOUND: {"model": ErrorResponse},
status.HTTP_500_INTERNAL_SERVER_ERROR: {"model": ErrorResponse}
})
)
async def get_analysis_charts(
analysis_id: UUID,
chart_type: str,
db: Session = Depends(get_db)
analysis_id: UUID, chart_type: str, db: Session = Depends(get_db)
):
logger.info("Received request for chart", analysis_id=analysis_id, chart_type=chart_type)
analysis = db.query(WorkoutAnalysis).filter(WorkoutAnalysis.id == analysis_id).first()
if not analysis:
logger.warning("Analysis not found for chart request", analysis_id=analysis_id)
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=ErrorResponse(
code="ANALYSIS_NOT_FOUND",
message=f"Analysis with ID {analysis_id} not found."
).dict()
)
logger.info(f"Received request for chart for analysis_id: {analysis_id}, chart_type: {chart_type}")
if chart_type not in analysis.chart_paths:
logger.warning("Chart type not found in analysis", analysis_id=analysis_id, chart_type=chart_type)
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=ErrorResponse(
code="CHART_NOT_FOUND",
message=f"Chart type {chart_type} not found for analysis ID {analysis_id}."
).dict()
)
chart_path = analysis.chart_paths[chart_type]
try:
with open(chart_path, "rb") as f:
logger.info("Chart file read successfully", analysis_id=analysis_id, chart_type=chart_type, chart_path=chart_path)
return StreamingResponse(io.BytesIO(f.read()), media_type="image/png")
except FileNotFoundError:
logger.error("Chart file not found", analysis_id=analysis_id, chart_type=chart_type, chart_path=chart_path, exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ErrorResponse(
code="CHART_FILE_ERROR",
message=f"Chart file not found at {chart_path}."
).dict()
)
except Exception as e:
logger.error("Error retrieving chart", analysis_id=analysis_id, chart_type=chart_type, error=str(e), exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ErrorResponse(
code="CHART_RETRIEVAL_ERROR",
message=f"Error retrieving chart: {e}"
).dict()
)
@router.post("/analyze/batch", response_model=dict, responses={
status.HTTP_400_BAD_REQUEST: {"model": ErrorResponse},
status.HTTP_500_INTERNAL_SERVER_ERROR: {"model": ErrorResponse}
})
async def analyze_batch_workouts(
zip_file: UploadFile = File(...),
user_id: Optional[UUID] = Form(None),
ftp_value: Optional[float] = Form(None),
db: Session = Depends(get_db)
):
logger.info("Received request to analyze batch workouts", filename=zip_file.filename, user_id=user_id)
if zip_file.content_type != "application/zip":
logger.warning("Invalid file type for batch analysis", filename=zip_file.filename, content_type=zip_file.content_type)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ErrorResponse(
code="INVALID_FILE_TYPE",
message="Only ZIP files are supported for batch analysis."
).dict()
)
zip_content = await zip_file.read()
if not zip_content:
logger.warning("Empty ZIP file received for batch analysis", filename=zip_file.filename)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ErrorResponse(
code="EMPTY_ZIP_FILE",
message="The provided ZIP file is empty."
).dict()
)
# Check cache first
cache_key = f"{analysis_id}_{chart_type}"
cached_chart = cache.get(cache_key)
if cached_chart:
logger.info(f"Chart found in cache for analysis_id: {analysis_id}, chart_type: {chart_type}")
return StreamingResponse(io.BytesIO(cached_chart), media_type="image/png")
try:
batch_processor = BatchProcessor(db_session=db)
results = batch_processor.process_zip_file(zip_content, user_id, ftp_value)
centraldb_client = CentralDBClient()
try:
chart_content = await centraldb_client.retrieve_chart(
str(analysis_id), chart_type
)
logger.info(f"Chart found in CentralDB for analysis_id: {analysis_id}, chart_type: {chart_type}")
cache.set(cache_key, chart_content)
return StreamingResponse(io.BytesIO(chart_content), media_type="image/png")
except httpx.HTTPStatusError as e:
if e.response.status_code != 404:
raise
batch_id = uuid4()
total_files = len(results)
failed_files = sum(1 for r in results if r["status"] == "failed")
status_message = "completed"
if failed_files > 0:
status_message = "completed_with_errors"
if failed_files == total_files:
status_message = "failed"
logger.info("Batch analysis completed", batch_id=batch_id, total_files=total_files, failed_files=failed_files, status=status_message)
logger.info(f"Chart not found for analysis_id: {analysis_id}, starting new analysis from FIT file")
fit_file_content = await centraldb_client.download_fit_file(str(analysis_id))
return {
"batch_id": batch_id,
"status": status_message,
"total_files": total_files,
"results": results
}
parser = FitParser()
workout_data = parser.parse(io.BytesIO(fit_file_content))
analyzer = WorkoutAnalyzer(workout_data)
analyzer.analyze_power_data()
analyzer.analyze_heart_rate_data()
analyzer.analyze_speed_data()
analyzer.analyze_elevation_data()
summary_metrics = analyzer.calculate_summary_metrics()
chart_generator = ChartGenerator(workout_data)
if chart_type == "power_curve":
chart_content = chart_generator.generate_power_curve_chart()
elif chart_type == "elevation_profile":
chart_content = chart_generator.generate_elevation_profile_chart()
elif chart_type == "zone_distribution_power":
chart_content = chart_generator.generate_zone_distribution_chart("power")
elif chart_type == "zone_distribution_heart_rate":
chart_content = chart_generator.generate_zone_distribution_chart("heart_rate")
elif chart_type == "zone_distribution_speed":
chart_content = chart_generator.generate_zone_distribution_chart("speed")
else:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ErrorResponse(
code="INVALID_CHART_TYPE",
message=f"Invalid chart type: {chart_type}",
).dict(),
)
await centraldb_client.upload_chart(str(analysis_id), chart_type, chart_content)
logger.info(f"New chart stored in CentralDB for analysis_id: {analysis_id}, chart_type: {chart_type}")
try:
await centraldb_client.get_analysis_artifact(str(analysis_id))
except httpx.HTTPStatusError as e:
if e.response.status_code == 404:
await centraldb_client.create_analysis_artifact(
str(analysis_id), data=summary_metrics
)
logger.info(f"New analysis artifact stored in CentralDB for analysis_id: {analysis_id}")
cache.set(cache_key, chart_content)
logger.info(f"New chart cached for analysis_id: {analysis_id}, chart_type: {chart_type}")
return StreamingResponse(io.BytesIO(chart_content), media_type="image/png")
except httpx.HTTPStatusError as e:
raise HTTPException(
status_code=e.response.status_code,
detail=ErrorResponse(
code="CHART_RETRIEVAL_ERROR", message=f"Error retrieving chart: {e}"
).dict(),
)
except HTTPException as he:
raise he
except Exception as e:
logger.error("Unexpected error during batch processing", filename=zip_file.filename, error=str(e), exc_info=True)
logger.error(f"Unexpected error retrieving chart for analysis_id: {analysis_id}, chart_type: {chart_type}, error: {e}", exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ErrorResponse(
code="INTERNAL_SERVER_ERROR",
message=f"An unexpected error occurred during batch processing: {e}"
).dict()
)
@router.post("/analyze/batch", response_model=dict, responses={
status.HTTP_400_BAD_REQUEST: {"model": ErrorResponse},
status.HTTP_500_INTERNAL_SERVER_ERROR: {"model": ErrorResponse}
})
async def analyze_batch_workouts(
zip_file: UploadFile = File(...),
user_id: Optional[UUID] = Form(None),
ftp_value: Optional[float] = Form(None),
db: Session = Depends(get_db)
):
if zip_file.content_type != "application/zip":
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ErrorResponse(
code="INVALID_FILE_TYPE",
message="Only ZIP files are supported for batch analysis."
).dict()
code="INTERNAL_SERVER_ERROR", message=f"An unexpected error occurred: {e}"
).dict(),
)
zip_content = await zip_file.read()
if not zip_content:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ErrorResponse(
code="EMPTY_ZIP_FILE",
message="The provided ZIP file is empty."
).dict()
)
try:
batch_processor = BatchProcessor(db_session=db)
results = batch_processor.process_zip_file(zip_content, user_id, ftp_value)
batch_id = uuid4()
total_files = len(results)
failed_files = sum(1 for r in results if r["status"] == "failed")
status_message = "completed"
if failed_files > 0:
status_message = "completed_with_errors"
if failed_files == total_files:
status_message = "failed"
return {
"batch_id": batch_id,
"status": status_message,
"total_files": total_files,
"results": results
}
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ErrorResponse(
code="INTERNAL_SERVER_ERROR",
message=f"An unexpected error occurred during batch processing: {e}"
).dict()
)

View File

@@ -1,6 +1,7 @@
from pydantic import BaseModel
from typing import Optional, Dict, Any
class ErrorResponse(BaseModel):
code: str
message: str