changed to db for fit streams

This commit is contained in:
2026-01-14 05:39:16 -08:00
parent 362f4cb5aa
commit 45dbc32295
99 changed files with 2118 additions and 1684 deletions

View File

@@ -46,7 +46,7 @@ class EffortAnalysisData(BaseModel):
class ComparisonResponse(BaseModel):
efforts: List[EffortAnalysisData]
winners: Dict[str, int] # metric_key -> effort_id of winner
winners: Dict[str, Optional[int]] # metric_key -> effort_id of winner
@router.post("/segments/efforts/compare", response_model=ComparisonResponse)
def compare_efforts(effort_ids: List[int] = Body(...), db: Session = Depends(get_db)):
@@ -162,58 +162,94 @@ def export_analysis(effort_ids: List[int] = Body(...), db: Session = Depends(get
# Or simplistic: Fetch inside loop (N+1 query, but export is rare/manual action).
effort = db.query(SegmentEffort).get(e_dict['effort_id'])
if effort and effort.activity and effort.activity.file_content:
if effort and effort.activity:
try:
act = effort.activity
raw_data = extract_activity_data(act.file_content, act.file_type or 'fit')
streams = {}
# Slice by time
# Timestamps in raw_data['timestamps']
timestamps = raw_data.get('timestamps', [])
start_time = effort.start_time
end_time = effort.end_time
# Normalize start/end to match stream timestamps timezone
if timestamps and timestamps[0]:
stream_tz = timestamps[0].tzinfo
# 1. Use ActivityStream table (Preferred)
if act.streams and act.streams.time_offset:
ast = act.streams
base_time = act.start_time
# Helper to align
# Reconstruct absolute timestamps
full_timestamps = [base_time + __import__('datetime').timedelta(seconds=t) for t in ast.time_offset]
# Alignment helper
def align_tz(dt, target_tz):
if dt.tzinfo == target_tz:
return dt
if dt.tzinfo is None and target_tz is not None:
return dt.replace(tzinfo=target_tz) # Assume same ref
if dt.tzinfo is not None and target_tz is None:
return dt.replace(tzinfo=None) # Strip
if not target_tz: return dt.replace(tzinfo=None) # naive
if dt.tzinfo == target_tz: return dt
if dt.tzinfo is None: return dt.replace(tzinfo=target_tz)
return dt.astimezone(target_tz)
start_time = align_tz(start_time, stream_tz)
end_time = align_tz(end_time, stream_tz)
# Simple list comprehension to find indices
indices = [i for i, t in enumerate(timestamps)
if t and start_time <= t <= end_time]
streams = {}
if indices:
first = indices[0]
last = indices[-1] + 1
start_time = align_tz(effort.start_time, full_timestamps[0].tzinfo if full_timestamps else None)
end_time = align_tz(effort.end_time, full_timestamps[0].tzinfo if full_timestamps else None)
# Find slice indices
# Since sorted, can optimize, but simple loop fine for export
indices = [i for i, t in enumerate(full_timestamps) if start_time <= t <= end_time]
# Keys to extract
keys = ['heart_rate', 'power', 'speed', 'cadence', 'temperature']
for k in keys:
if k in raw_data:
streams[k] = raw_data[k][first:last]
# Points/Elevation
if 'points' in raw_data:
sliced_points = raw_data['points'][first:last]
streams['latlng'] = [[p[1], p[0]] for p in sliced_points] # lat, lon
streams['elevation'] = [p[2] if len(p) > 2 else None for p in sliced_points]
if indices:
first = indices[0]
last = indices[-1] + 1
streams['timestamps'] = [t.isoformat() if t else None for t in raw_data['timestamps'][first:last]]
# Extract slices
streams['timestamps'] = [t.isoformat() for t in full_timestamps[first:last]]
if ast.heart_rate: streams['heart_rate'] = ast.heart_rate[first:last]
if ast.power: streams['power'] = ast.power[first:last]
if ast.speed: streams['speed'] = ast.speed[first:last]
if ast.cadence: streams['cadence'] = ast.cadence[first:last]
if ast.temperature: streams['temperature'] = ast.temperature[first:last]
if ast.elevation: streams['elevation'] = ast.elevation[first:last]
# LatLng - reconstruct from parallel arrays
if ast.latitude and ast.longitude:
lats = ast.latitude[first:last]
lngs = ast.longitude[first:last]
# Zip only up to shortest length to avoid errors
min_len = min(len(lats), len(lngs))
streams['latlng'] = [[lats[i], lngs[i]] for i in range(min_len)]
# 2. Fallback to File Parsing
elif act.file_content:
raw_data = extract_activity_data(act.file_content, act.file_type or 'fit')
# Slice by time
timestamps = raw_data.get('timestamps', [])
start_time = effort.start_time
end_time = effort.end_time
if timestamps and timestamps[0]:
stream_tz = timestamps[0].tzinfo
def align_tz_fallback(dt, target_tz):
if dt.tzinfo == target_tz: return dt
if dt.tzinfo is None and target_tz is not None: return dt.replace(tzinfo=target_tz)
if dt.tzinfo is not None and target_tz is None: return dt.replace(tzinfo=None)
return dt.astimezone(target_tz)
start_time = align_tz_fallback(start_time, stream_tz)
end_time = align_tz_fallback(end_time, stream_tz)
indices = [i for i, t in enumerate(timestamps) if t and start_time <= t <= end_time]
if indices:
first = indices[0]
last = indices[-1] + 1
keys = ['heart_rate', 'power', 'speed', 'cadence', 'temperature']
for k in keys:
if k in raw_data:
streams[k] = raw_data[k][first:last]
if 'points' in raw_data:
sliced_points = raw_data['points'][first:last]
streams['latlng'] = [[p[1], p[0]] for p in sliced_points] # lat, lon
streams['elevation'] = [p[2] if len(p) > 2 else None for p in sliced_points]
streams['timestamps'] = [t.isoformat() if t else None for t in raw_data['timestamps'][first:last]]
e_dict['streams'] = streams
except Exception as e:
print(f"Error extracting streams for effort {effort.id}: {e}")