many updates
This commit is contained in:
73
FitnessSync/scratch/check_stormchaser_generic.py
Normal file
73
FitnessSync/scratch/check_stormchaser_generic.py
Normal file
@@ -0,0 +1,73 @@
|
||||
|
||||
import sys
|
||||
import os
|
||||
sys.path.append('/app/backend')
|
||||
import logging
|
||||
from src.services.postgresql_manager import PostgreSQLManager
|
||||
from src.utils.config import config
|
||||
from src.models.activity import Activity
|
||||
from src.models.bike_setup import BikeSetup
|
||||
from sqlalchemy import or_, and_
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
|
||||
def analyze():
|
||||
print("Connecting to database...")
|
||||
db_manager = PostgreSQLManager(config.DATABASE_URL)
|
||||
session = db_manager.SessionLocal()
|
||||
|
||||
try:
|
||||
# 1. Find the Stormchaser setup(s)
|
||||
stormchasers = session.query(BikeSetup).filter(
|
||||
or_(
|
||||
BikeSetup.name.ilike('%Stormchaser%'),
|
||||
BikeSetup.frame.ilike('%Stormchaser%')
|
||||
)
|
||||
).order_by(BikeSetup.purchase_date.asc()).all()
|
||||
|
||||
reference_date = None
|
||||
for bike in stormchasers:
|
||||
if bike.purchase_date:
|
||||
if reference_date is None or bike.purchase_date < reference_date:
|
||||
reference_date = bike.purchase_date
|
||||
|
||||
if not reference_date:
|
||||
print("No Stormchaser purchase date found.")
|
||||
return
|
||||
|
||||
print(f"Using Reference Date: {reference_date}")
|
||||
|
||||
# 2. Find Generic Bike ID
|
||||
generic = session.query(BikeSetup).filter(BikeSetup.name == 'GenericBike').first()
|
||||
if not generic:
|
||||
print("GenericBike setup not found.")
|
||||
return
|
||||
|
||||
# 3. Query Activities
|
||||
# Find Generic Rides in range
|
||||
query = session.query(Activity).filter(
|
||||
Activity.bike_setup_id == generic.id,
|
||||
Activity.start_time >= reference_date
|
||||
)
|
||||
|
||||
count = query.count()
|
||||
|
||||
print(f"\nResult: {count} rides tagged as Generic since {reference_date}")
|
||||
|
||||
# Also check smart confidence stats on recently matched Stormchaser rides
|
||||
print("\n[Smart Match Stats Sample (Stormchaser)]")
|
||||
sc_rides = session.query(Activity).filter(
|
||||
Activity.bike_setup_id.in_([b.id for b in stormchasers]),
|
||||
Activity.start_time >= reference_date
|
||||
).order_by(Activity.start_time.desc()).limit(10).all()
|
||||
|
||||
for r in sc_rides:
|
||||
print(f"ID: {r.id} | Conf: {r.bike_match_confidence} | Type: {r.activity_type}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error during analysis: {e}")
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
analyze()
|
||||
138
FitnessSync/scratch/debug_match.py
Normal file
138
FitnessSync/scratch/debug_match.py
Normal file
@@ -0,0 +1,138 @@
|
||||
|
||||
import logging
|
||||
from sqlalchemy.orm import Session
|
||||
from src.services.postgresql_manager import PostgreSQLManager
|
||||
from src.utils.config import config
|
||||
from src.models.activity import Activity
|
||||
from src.models.segment import Segment
|
||||
from src.services.segment_matcher import SegmentMatcher
|
||||
from src.services.parsers import extract_points_from_file
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def debug_matching():
|
||||
db_manager = PostgreSQLManager(config.DATABASE_URL)
|
||||
|
||||
with db_manager.get_db_session() as db:
|
||||
# 1. Fetch Segment
|
||||
segment = db.query(Segment).filter(Segment.name.like("%Etiwanda%")).first()
|
||||
if not segment:
|
||||
print("ERROR: Segment 'Etiwanda Climb' not found.")
|
||||
return
|
||||
|
||||
print(f"DEBUG: Found Segment ID {segment.id}: {segment.name}")
|
||||
print(f"DEBUG: Segment Distance: {segment.distance}")
|
||||
|
||||
# 2. Fetch Activity
|
||||
# Try finding by ID or Garmin ID
|
||||
act_id = 21072264737
|
||||
activity = db.query(Activity).filter(Activity.id == act_id).first()
|
||||
if not activity:
|
||||
activity = db.query(Activity).filter(Activity.garmin_activity_id == str(act_id)).first()
|
||||
|
||||
if not activity:
|
||||
print(f"ERROR: Activity {act_id} not found.")
|
||||
return
|
||||
|
||||
print(f"DEBUG: Found Activity ID {activity.id} (Garmin: {activity.garmin_activity_id})")
|
||||
|
||||
# 3. Extract Points
|
||||
if not activity.file_content:
|
||||
print("ERROR: Activity has no file content")
|
||||
return
|
||||
|
||||
points = extract_points_from_file(activity.file_content, activity.file_type)
|
||||
print(f"DEBUG: Extracted {len(points)} points from activity.")
|
||||
|
||||
# 4. Trigger Match
|
||||
matcher = SegmentMatcher(db)
|
||||
|
||||
# Manually invoke match parts to trace
|
||||
# (Copying logic from match_activity wrapper)
|
||||
|
||||
import json
|
||||
seg_points = json.loads(segment.points) if isinstance(segment.points, str) else segment.points
|
||||
|
||||
print(f"DEBUG: Segment has {len(seg_points)} points.")
|
||||
|
||||
print(f"DEBUG: Segment has {len(seg_points)} points.")
|
||||
|
||||
efforts = matcher.match_activity(activity, points)
|
||||
|
||||
if efforts:
|
||||
print(f"SUCCESS: match_activity returned {len(efforts)} efforts.")
|
||||
for e in efforts:
|
||||
print(f" - Segment {e.segment_id} (Duration: {e.elapsed_time}s)")
|
||||
else:
|
||||
print("FAILURE: match_activity returned NO efforts.")
|
||||
|
||||
# --- Deep Trace ---
|
||||
print("\n--- DEEP TRACE ---")
|
||||
ENTRY_RADIUS = 25.0
|
||||
CORRIDOR_RADIUS = 35.0
|
||||
from src.utils.geo import haversine_distance, perpendicular_distance
|
||||
|
||||
start_node = seg_points[0]
|
||||
end_node = seg_points[-1]
|
||||
|
||||
# Check Start Proximity
|
||||
start_candidates = []
|
||||
min_start_dist = float('inf')
|
||||
|
||||
for i, p in enumerate(points):
|
||||
dist = haversine_distance(p[1], p[0], start_node[1], start_node[0])
|
||||
if dist < min_start_dist: min_start_dist = dist
|
||||
if dist <= ENTRY_RADIUS:
|
||||
start_candidates.append(i)
|
||||
|
||||
print(f"Min distance to Start Node: {min_start_dist:.2f}m")
|
||||
print(f"Start Candidates: {start_candidates}")
|
||||
|
||||
if not start_candidates:
|
||||
print("FAIL: Start node never reached within 25m.")
|
||||
return
|
||||
|
||||
# Trace Candidate 0 (or all)
|
||||
for start_idx in start_candidates:
|
||||
print(f"\nChecking candidate starting at index {start_idx}...")
|
||||
|
||||
effort_accum_dist = 0.0
|
||||
deviated = False
|
||||
completed = False
|
||||
|
||||
max_deviation = 0.0
|
||||
|
||||
for j in range(start_idx + 1, len(points)):
|
||||
p = points[j]
|
||||
prev_p = points[j-1]
|
||||
|
||||
# Accumulate distance
|
||||
step_dist = haversine_distance(p[1], p[0], prev_p[1], prev_p[0])
|
||||
effort_accum_dist += step_dist
|
||||
|
||||
# Check deviation
|
||||
dev = matcher._min_dist_to_segment_path(p, seg_points)
|
||||
if dev > max_deviation: max_deviation = dev
|
||||
|
||||
if dev > CORRIDOR_RADIUS:
|
||||
print(f" DEVIATION at index {j}! Dist {dev:.2f}m > {CORRIDOR_RADIUS}m. AccumDist: {effort_accum_dist:.2f}m")
|
||||
deviated = True
|
||||
break
|
||||
|
||||
# Check completion
|
||||
d_end = haversine_distance(p[1], p[0], end_node[1], end_node[0])
|
||||
if d_end <= ENTRY_RADIUS:
|
||||
if effort_accum_dist >= 0.8 * segment.distance:
|
||||
print(f" COMPLETION possible at index {j}. d_end={d_end:.2f}m, dist={effort_accum_dist:.2f}m")
|
||||
completed = True
|
||||
# Don't break immediately, could get closer?
|
||||
# But logic breaks on completion check?
|
||||
# Logic: returns first valid end.
|
||||
|
||||
if not completed:
|
||||
print(f" Candidate ended without completion. Max Deviation: {max_deviation:.2f}m. Total Dist: {effort_accum_dist:.2f}m vs Target {segment.distance:.2f}m")
|
||||
|
||||
if __name__ == "__main__":
|
||||
debug_matching()
|
||||
168
FitnessSync/scratch/debug_specific_matches.py
Normal file
168
FitnessSync/scratch/debug_specific_matches.py
Normal file
@@ -0,0 +1,168 @@
|
||||
|
||||
import sys
|
||||
import os
|
||||
import statistics
|
||||
import logging
|
||||
sys.path.append('/app/backend')
|
||||
|
||||
from src.services.postgresql_manager import PostgreSQLManager
|
||||
from src.utils.config import config
|
||||
from src.models.activity import Activity
|
||||
from src.models.bike_setup import BikeSetup
|
||||
from src.services.parsers import extract_activity_data
|
||||
from src.services.bike_matching import WHEEL_CIRCUMFERENCE_M
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Re-implement logic to capture samples
|
||||
def analyze_streams_debug(speed_stream, cadence_stream, window_size=10):
|
||||
if not speed_stream or not cadence_stream or len(speed_stream) != len(cadence_stream):
|
||||
print(" - Streams missing or mismatched length")
|
||||
return [], 0.0
|
||||
|
||||
ratios = []
|
||||
samples = []
|
||||
|
||||
rejected_reasons = {'none': 0, 'threshold': 0, 'variance': 0}
|
||||
|
||||
n = len(speed_stream)
|
||||
for i in range(0, n - window_size, 5):
|
||||
window_speeds = speed_stream[i:i+window_size]
|
||||
window_cadences = cadence_stream[i:i+window_size]
|
||||
|
||||
if any(v is None for v in window_speeds) or any(c is None for c in window_cadences):
|
||||
rejected_reasons['none'] += 1
|
||||
continue
|
||||
|
||||
if all(c > 55 for c in window_cadences) and all(v > 2.5 for v in window_speeds):
|
||||
try:
|
||||
cad_std = statistics.stdev(window_cadences)
|
||||
spd_std = statistics.stdev(window_speeds)
|
||||
|
||||
if cad_std < 5 and spd_std < 0.5:
|
||||
avg_speed = statistics.mean(window_speeds)
|
||||
avg_cadence = statistics.mean(window_cadences)
|
||||
|
||||
ratio = (avg_speed * 60) / (avg_cadence * WHEEL_CIRCUMFERENCE_M)
|
||||
ratios.append(ratio)
|
||||
|
||||
if len(samples) < 10:
|
||||
samples.append({
|
||||
'time_idx': i,
|
||||
'avg_spd': avg_speed,
|
||||
'avg_cad': avg_cadence,
|
||||
'ratio': ratio,
|
||||
'cad_std': cad_std
|
||||
})
|
||||
else:
|
||||
rejected_reasons['variance'] += 1
|
||||
except statistics.StatisticsError:
|
||||
pass
|
||||
else:
|
||||
rejected_reasons['threshold'] += 1
|
||||
|
||||
if not ratios:
|
||||
print(f" - No steady segments. Rejections: {rejected_reasons}")
|
||||
return [], 0.0
|
||||
|
||||
return samples, statistics.median(ratios)
|
||||
|
||||
def main():
|
||||
target_ids = ['21072264737', '18469350198', '18349164690']
|
||||
|
||||
db = PostgreSQLManager(config.DATABASE_URL).SessionLocal()
|
||||
|
||||
# Load all setups
|
||||
setups = db.query(BikeSetup).all()
|
||||
print(f"Loaded {len(setups)} bike setups.")
|
||||
for s in setups:
|
||||
if s.rear_cog == 0:
|
||||
print(f" - {s.name or s.frame}: No gears configured (skipped)")
|
||||
continue
|
||||
mech_ratio = s.chainring / s.rear_cog
|
||||
print(f" - {s.name or s.frame}: {s.chainring}/{s.rear_cog} = {mech_ratio:.3f} (Active: {s.purchase_date} to {s.retirement_date})")
|
||||
|
||||
print("\n" + "="*80)
|
||||
|
||||
# Add a control activity to verify script works
|
||||
print("\n" + "="*80)
|
||||
print("CONTROL CHECK: Finding a random activity WITH cadence to verify script logic...")
|
||||
control_activity = db.query(Activity).filter(Activity.avg_cadence > 0, Activity.file_content != None).first()
|
||||
if control_activity:
|
||||
target_ids.append(control_activity.garmin_activity_id)
|
||||
else:
|
||||
print("No control activity found!")
|
||||
|
||||
for gid in target_ids:
|
||||
print(f"\nAnalyzing Activity Garmin ID: {gid}")
|
||||
activity = db.query(Activity).filter(Activity.garmin_activity_id == str(gid)).first()
|
||||
|
||||
if not activity:
|
||||
print(" - Not found in DB")
|
||||
continue
|
||||
|
||||
print(f" - Type: {activity.activity_type}")
|
||||
print(f" - Date: {activity.start_time}")
|
||||
print(f" - Global Avg Speed: {activity.avg_speed:.2f} m/s" if activity.avg_speed else " - Global Avg Speed: None")
|
||||
print(f" - Global Avg Cadence: {activity.avg_cadence:.1f} rpm" if activity.avg_cadence else " - Global Avg Cadence: None")
|
||||
|
||||
if not activity.file_content:
|
||||
print(" - No file content available")
|
||||
continue
|
||||
|
||||
data = extract_activity_data(activity.file_content, activity.file_type)
|
||||
speeds = data.get('speed') or []
|
||||
cadences = data.get('cadence') or []
|
||||
|
||||
# Check if actual data exists
|
||||
valid_speeds = [x for x in speeds if x is not None]
|
||||
valid_cadences = [x for x in cadences if x is not None]
|
||||
|
||||
if len(valid_cadences) < 10:
|
||||
print(" - CRITICAL: No cadence data stream found in file.")
|
||||
print(" - Result: IMPOSSIBLE TO MATCH GEAR RATIO.")
|
||||
continue
|
||||
|
||||
samples, observed_ratio = analyze_streams_debug(speeds, cadences)
|
||||
|
||||
print(f" - Steady Segments Found: {len(samples) if samples else 0}")
|
||||
print(f" - Observed Ratio (Median): {observed_ratio:.3f}")
|
||||
|
||||
if samples:
|
||||
print(" - First 10 Steady Samples:")
|
||||
for s in samples:
|
||||
print(f" - T={s['time_idx']}s | Spd={s['avg_spd']:.1f} | Cad={s['avg_cad']:.1f} | R={s['ratio']:.3f} (std_cad={s['cad_std']:.1f})")
|
||||
|
||||
# ... logic continues ...
|
||||
|
||||
print("\n - Matching Against Setups:")
|
||||
if observed_ratio > 0:
|
||||
for bike in setups:
|
||||
if bike.rear_cog == 0:
|
||||
continue
|
||||
# Date Check
|
||||
active = True
|
||||
if bike.purchase_date and activity.start_time.date() < bike.purchase_date:
|
||||
active = False
|
||||
if bike.retirement_date and activity.start_time.date() > bike.retirement_date:
|
||||
active = False
|
||||
|
||||
status_str = "ACTIVE" if active else "INACTIVE"
|
||||
|
||||
mech_ratio = bike.chainring / bike.rear_cog
|
||||
diff = abs(observed_ratio - mech_ratio)
|
||||
error_pct = diff / mech_ratio
|
||||
confidence = max(0.0, 1.0 - error_pct)
|
||||
|
||||
marker = "<<< BEST MATCH" if confidence > 0.9 else ""
|
||||
if not active: marker = "(Date Mismatch)"
|
||||
|
||||
print(f" - {bike.name or bike.frame} ({bike.chainring}/{bike.rear_cog}): Mech={mech_ratio:.3f} | Diff={diff:.3f} | Conf={confidence:.3f} [{status_str}] {marker}")
|
||||
else:
|
||||
print(" - Could not calculate valid observed ratio from streams.")
|
||||
|
||||
db.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
28
FitnessSync/scratch/test_save.py
Normal file
28
FitnessSync/scratch/test_save.py
Normal file
@@ -0,0 +1,28 @@
|
||||
|
||||
import requests
|
||||
import json
|
||||
|
||||
URL = "http://localhost:8000/api/segments/save_custom"
|
||||
|
||||
def test_save_custom():
|
||||
payload = {
|
||||
"name": "Test Segment Discovered",
|
||||
"description": "Created via API test",
|
||||
"activity_type": "cycling",
|
||||
"points": [
|
||||
[151.2093, -33.8688, 10],
|
||||
[151.2100, -33.8690, 15],
|
||||
[151.2110, -33.8700, 20]
|
||||
]
|
||||
}
|
||||
|
||||
try:
|
||||
res = requests.post(URL, json=payload)
|
||||
print(f"Status: {res.status_code}")
|
||||
print(f"Response: {res.text}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_save_custom()
|
||||
146
FitnessSync/scratch/trace_turns.py
Normal file
146
FitnessSync/scratch/trace_turns.py
Normal file
@@ -0,0 +1,146 @@
|
||||
|
||||
import sys
|
||||
import os
|
||||
import math
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
import logging
|
||||
|
||||
sys.path.append('/app/backend')
|
||||
|
||||
from src.services.discovery import SegmentDiscoveryService
|
||||
from src.models.activity import Activity
|
||||
from src.utils.geo import calculate_bearing, haversine_distance
|
||||
from src.services.parsers import extract_activity_data
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
def trace_activity(activity_id):
|
||||
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://postgres:password@localhost:5433/fitbit_garmin_sync")
|
||||
|
||||
engine = create_engine(DATABASE_URL)
|
||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||
db = SessionLocal()
|
||||
|
||||
# 1. Fetch Activity
|
||||
act = db.query(Activity).filter(Activity.garmin_activity_id == str(activity_id)).first()
|
||||
if not act:
|
||||
print(f"Activity {activity_id} NOT FOUND.")
|
||||
return
|
||||
|
||||
print(f"Tracing Activity {act.id} ({act.garmin_activity_id})")
|
||||
|
||||
# 2. Extract Data
|
||||
# Parser returns a dict: {'points': [], 'timestamps': [], ...}
|
||||
result = extract_activity_data(act.file_content, act.file_type)
|
||||
points = result['points']
|
||||
timestamps = result['timestamps']
|
||||
|
||||
# Filter points (same logic as service)
|
||||
clean_points = []
|
||||
clean_ts = []
|
||||
for i in range(len(points)):
|
||||
if points[i][0] is not None and points[i][1] is not None:
|
||||
clean_points.append(points[i])
|
||||
clean_ts.append(timestamps[i])
|
||||
|
||||
print(f"Clean points: {len(clean_points)}")
|
||||
|
||||
# 3. Simulate Analysis Loop
|
||||
current_segment = [clean_points[0]]
|
||||
last_bearing = None
|
||||
|
||||
segments_found = 0
|
||||
|
||||
skipped_dist = 0
|
||||
max_dist = 0
|
||||
max_time_diff = 0
|
||||
max_bearing_diff = 0
|
||||
|
||||
for i in range(1, len(clean_points)):
|
||||
p1 = clean_points[i-1]
|
||||
p2 = clean_points[i]
|
||||
|
||||
# dist
|
||||
dist = haversine_distance(p1[1], p1[0], p2[1], p2[0])
|
||||
max_dist = max(max_dist, dist)
|
||||
|
||||
if dist < 2.0:
|
||||
skipped_dist += 1
|
||||
continue
|
||||
|
||||
bearing = calculate_bearing(p1[1], p1[0], p2[1], p2[0])
|
||||
|
||||
is_turn = False
|
||||
diff = 0
|
||||
if last_bearing is not None:
|
||||
diff = abs(bearing - last_bearing)
|
||||
if diff > 180: diff = 360 - diff
|
||||
max_bearing_diff = max(max_bearing_diff, diff)
|
||||
|
||||
if diff > 60:
|
||||
is_turn = True
|
||||
print(f"TURN DETECTED at index {i}: Bearing {last_bearing:.1f} -> {bearing:.1f} (Diff: {diff:.1f})")
|
||||
elif diff > 10: # Log even smaller turns to see noise
|
||||
print(f" Minor Turn at index {i}: Bearing {last_bearing:.1f} -> {bearing:.1f} (Diff: {diff:.1f})")
|
||||
|
||||
last_bearing = bearing
|
||||
|
||||
t1 = clean_ts[i-1]
|
||||
t2 = clean_ts[i]
|
||||
time_diff = (t2 - t1).total_seconds()
|
||||
max_time_diff = max(max_time_diff, time_diff)
|
||||
|
||||
is_pause = time_diff > 10
|
||||
if is_pause:
|
||||
print(f"PAUSE DETECTED at index {i}: {time_diff}s")
|
||||
|
||||
if is_pause or is_turn:
|
||||
if len(current_segment) > 10:
|
||||
segments_found += 1
|
||||
print(f" -> Segment Created (Points: {len(current_segment)})")
|
||||
|
||||
current_segment = [p2]
|
||||
else:
|
||||
current_segment.append(p2)
|
||||
|
||||
print(f"\nStats:")
|
||||
print(f" Total Points: {len(clean_points)}")
|
||||
print(f" Skipped (dist < 5m): {skipped_dist}")
|
||||
print(f" Max Point-to-Point Dist: {max_dist:.2f}m")
|
||||
print(f" Max Time Diff: {max_time_diff}s")
|
||||
print(f" Max Bearing Diff: {max_bearing_diff:.1f}")
|
||||
print(f" Segments Found (from splits): {segments_found}")
|
||||
|
||||
from src.utils.geo import ramer_douglas_peucker
|
||||
|
||||
for eps in [2.0, 5.0, 10.0, 15.0]:
|
||||
print(f"\n--- Riper RDP Trace (epsilon={eps}m) ---")
|
||||
simplified = ramer_douglas_peucker(clean_points, eps)
|
||||
print(f"Simplified points: {len(simplified)} (from {len(clean_points)})")
|
||||
|
||||
last_bearing = None
|
||||
turns_found = 0
|
||||
|
||||
for i in range(1, len(simplified)):
|
||||
p1 = simplified[i-1]
|
||||
p2 = simplified[i]
|
||||
|
||||
bearing = calculate_bearing(p1[1], p1[0], p2[1], p2[0])
|
||||
|
||||
if last_bearing is not None:
|
||||
diff = abs(bearing - last_bearing)
|
||||
if diff > 180: diff = 360 - diff
|
||||
|
||||
if diff > 60:
|
||||
print(f"TURN: {last_bearing:.1f} -> {bearing:.1f} (Diff: {diff:.1f})")
|
||||
turns_found += 1
|
||||
elif diff > 45:
|
||||
print(f" Minor Turn (>45): {last_bearing:.1f} -> {bearing:.1f} (Diff: {diff:.1f})")
|
||||
|
||||
last_bearing = bearing
|
||||
|
||||
print(f"Total Turns > 60: {turns_found}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
trace_activity(21465710074)
|
||||
19
FitnessSync/scratch/trigger_scan.py
Normal file
19
FitnessSync/scratch/trigger_scan.py
Normal file
@@ -0,0 +1,19 @@
|
||||
|
||||
import requests
|
||||
import json
|
||||
|
||||
# Activity ID 31 matches Garmin ID 21072264737
|
||||
ACT_ID = 21072264737
|
||||
URL = f"http://localhost:8000/api/segments/scan/{ACT_ID}"
|
||||
|
||||
def trigger():
|
||||
print(f"Triggering scan for Activity {ACT_ID}...")
|
||||
try:
|
||||
res = requests.post(URL)
|
||||
print(f"Status: {res.status_code}")
|
||||
print(f"Response: {res.text}")
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
trigger()
|
||||
Reference in New Issue
Block a user