added activity view

This commit is contained in:
2026-01-09 09:59:36 -08:00
parent c45e41b6a9
commit 55e37fbca8
168 changed files with 8799 additions and 2426 deletions

View File

@@ -0,0 +1,19 @@
from src.models.config import Configuration
from src.services.postgresql_manager import PostgreSQLManager
import os
print(f"CWD: {os.getcwd()}")
try:
db = PostgreSQLManager().get_db_session()
with db as session:
# Get the first configuration, assuming singleton pattern or similar
config = session.query(Configuration).first()
if config:
print(f"Config ID: {config.id}")
print(f"Fitbit Client ID: {'[PRESENT]' if config.fitbit_client_id else '[MISSING]'}")
print(f"Fitbit Client Secret: {'[PRESENT]' if config.fitbit_client_secret else '[MISSING]'}")
print(f"Fitbit Redirect URI: {config.fitbit_redirect_uri}")
else:
print("No Configuration record found.")
except Exception as e:
print(f"Error: {e}")

View File

@@ -0,0 +1,27 @@
import garth
import inspect
try:
# Try to inspect sso.login directly if available
from garth import sso
print(f"sso.login signature: {inspect.signature(sso.login)}")
except ImportError:
print("Could not import garth.sso")
# Try dummy call to catch argument errors without actually logging in
try:
print("Testing return_on_mfa=True...")
garth.login("dummy", "dummy", return_on_mfa=True)
except TypeError as e:
print(f"return_on_mfa failed: {e}")
except Exception as e:
print(f"return_on_mfa result: {type(e).__name__} (This is good, it accepted the arg)")
try:
print("Testing prompt_mfa=True...")
garth.login("dummy", "dummy", prompt_mfa=True)
except TypeError as e:
print(f"prompt_mfa failed: {e}")
except Exception as e:
print(f"prompt_mfa result: {type(e).__name__}")

View File

@@ -0,0 +1,17 @@
from src.models.api_token import APIToken
from src.services.postgresql_manager import PostgreSQLManager
import os
print(f"CWD: {os.getcwd()}")
try:
db = PostgreSQLManager().get_db_session()
with db as session:
token = session.query(APIToken).filter_by(token_type="fitbit").first()
if token:
print(f"Token Found: ID={token.id}")
print(f"Access Token: {'[PRESENT]' if token.access_token else '[MISSING]'}")
print(f"Refresh Token: {'[PRESENT]' if token.refresh_token else '[MISSING]'}")
else:
print("No Fitbit token found in DB")
except Exception as e:
print(f"Error: {e}")

View File

@@ -0,0 +1,80 @@
import os
import sys
import json
from sqlalchemy import create_engine, text
from sqlalchemy.orm import sessionmaker
import garth
from garth.auth_tokens import OAuth1Token, OAuth2Token
# Setup DB connection
DATABASE_URL = "postgresql://postgres:password@db:5432/fitbit_garmin_sync"
engine = create_engine(DATABASE_URL)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
def debug_connection():
print(f"Garth version: {garth.__version__}")
print(f"Type of garth.client: {type(garth.client)}")
print(f"Content of garth.http.client: {garth.http.client}")
db = SessionLocal()
try:
# Fetch tokens
result = db.execute(text("SELECT garth_oauth1_token, garth_oauth2_token FROM api_tokens WHERE token_type='garmin'"))
row = result.fetchone()
if not row:
print("No tokens found.")
return
oauth1_json = row[0]
oauth2_json = row[1]
print(f"Loaded OAuth1 JSON type: {type(oauth1_json)}")
oauth1_data = json.loads(oauth1_json)
oauth2_data = json.loads(oauth2_json)
print("Instantiating tokens...")
oauth1 = OAuth1Token(**oauth1_data)
oauth2 = OAuth2Token(**oauth2_data)
print(f"OAuth1Token object: {oauth1}")
# assign to garth.client
garth.client.oauth1_token = oauth1
garth.client.oauth2_token = oauth2
print("Tokens assigned.")
print(f"garth.client.oauth1_token type: {type(garth.client.oauth1_token)}")
# Try connectapi directly
print("Attempting garth.client.connectapi('/userprofile-service/socialProfile')...")
try:
profile_direct = garth.client.connectapi("/userprofile-service/socialProfile")
print("Direct connectapi success!")
print(f"Profile keys: {profile_direct.keys()}")
except Exception as e:
print(f"Direct connectapi failed: {e}")
import traceback
traceback.print_exc()
# Try via UserProfile.get()
print("Attempting garth.UserProfile.get()...")
try:
profile = garth.UserProfile.get()
print("UserProfile.get() success!")
except Exception as e:
print(f"UserProfile.get() failed: {e}")
import traceback
traceback.print_exc()
except Exception as e:
print(f"General error: {e}")
import traceback
traceback.print_exc()
finally:
db.close()
if __name__ == "__main__":
debug_connection()

View File

@@ -0,0 +1,87 @@
import os
import psycopg2
import binascii
# Connection settings from docker-compose.yml
# DATABASE_URL=postgresql://postgres:password@db:5432/fitbit_garmin_sync
# We are running this INSIDE the container, so 'db' host resolves.
# OR we are running on host, so 'localhost' and port 5433 using the exposed port.
# We will try to detect if we are in the container or not, or just try both.
# Actually, I will plan to run this via `docker compose exec app python inspect_activity.py`
# so I should use the internal container settings.
# Internal: host='db', port=5432
DB_HOST = "db"
DB_NAME = "fitbit_garmin_sync"
DB_USER = "postgres"
DB_PASSWORD = "password"
ACTIVITY_ID = "21342551924"
try:
conn = psycopg2.connect(
host=DB_HOST,
database=DB_NAME,
user=DB_USER,
password=DB_PASSWORD
)
print("Connected to database.")
cur = conn.cursor()
query = "SELECT file_type, file_content FROM activities WHERE garmin_activity_id = %s"
cur.execute(query, (ACTIVITY_ID,))
row = cur.fetchone()
if row:
file_type = row[0]
content = row[1]
print(f"Stored file_type: {file_type}")
if content is None:
print("Activity found, but file_content is NULL.")
else:
if isinstance(content, memoryview):
content = bytes(content)
print(f"Content Type: {type(content)}")
print(f"Content Length: {len(content)} bytes")
# Print first 50 bytes in hex
print(f"First 50 bytes (hex): {binascii.hexlify(content[:50])}")
print(f"First 50 bytes (repr): {repr(content[:50])}")
# Check for common signatures
if content.startswith(b'PK'):
print("Signature matches ZIP file.")
elif content.startswith(b'\x0e\x10') or b'.FIT' in content[:20]:
print("Signature might be FIT file.")
elif content.startswith(b'<?xml'):
print("Signature matches XML (TCX/GPX).")
elif content.strip().startswith(b'{'):
print("Signature matches JSON.")
# Check if it looks like base64
try:
# Try decoding as utf-8 first
s = content.decode('utf-8')
# Check if characters are base64-ish
import base64
try:
decoded = base64.b64decode(s)
print("This looks like Base64 encoded data.")
print(f"Decoded first 20 bytes: {decoded[:20]}")
if decoded.startswith(b'PK'):
print("Decoded data matches ZIP signature.")
except Exception as e:
print(f"Not valid Base64: {e}")
except UnicodeDecodeError:
print("Not UTF-8 text, likely binary.")
else:
print(f"No activity found with garmin_activity_id {ACTIVITY_ID}")
cur.close()
conn.close()
except Exception as e:
print(f"Error: {e}")

View File

@@ -0,0 +1,45 @@
import logging
import sys
import os
# Adjust path to find backend modules
sys.path.append(os.path.abspath("/home/sstent/Projects/FitTrack2/FitnessSync/backend"))
from src.services.garmin.client import GarminClient
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from src.models import Base
from src.services.postgresql_manager import PostgreSQLManager
# Mock/Setup DB connection to load tokens
DATABASE_URL = "postgresql://user:password@localhost:5432/fitness_sync"
engine = create_engine(DATABASE_URL)
SessionLocal = sessionmaker(bind=engine)
db = SessionLocal()
try:
client = GarminClient()
if client.load_tokens(db):
print("Tokens loaded.")
# Fetch 1 activity
activities = client.get_activities('2025-12-01', '2026-01-01') # Adjust dates to find recent ones
if activities:
act = activities[0]
print("\n--- Available Keys in Activity Metadata ---")
for k in sorted(act.keys()):
val = act[k]
# Truncate long values
val_str = str(val)
if len(val_str) > 50: val_str = val_str[:50] + "..."
print(f"{k}: {val_str}")
print(f"\nTotal keys: {len(act.keys())}")
else:
print("No activities found in range.")
else:
print("Failed to load tokens.")
except Exception as e:
print(f"Error: {e}")
finally:
db.close()

View File

@@ -0,0 +1,44 @@
import os
import sys
from sqlalchemy import create_engine, text
from sqlalchemy.orm import sessionmaker
# Add backend directory to path so we can import models if needed,
# but for raw inspection we'll use raw SQL
sys.path.append('/app/backend')
os.chdir('/app/backend')
from src.config import get_settings
settings = get_settings()
engine = create_engine(settings.DATABASE_URL)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
def inspect_tokens():
db = SessionLocal()
try:
result = db.execute(text("SELECT token_type, garth_oauth1_token, garth_oauth2_token, updated_at FROM api_tokens WHERE token_type='garmin'"))
row = result.fetchone()
if not row:
print("No Garmin token record found.")
return
print(f"Token Type: {row[0]}")
print(f"Updated At: {row[3]}")
print("-" * 40)
print("OAuth1 Token Raw:")
print(row[1])
print("-" * 40)
print("OAuth2 Token Raw:")
print(row[2])
print("-" * 40)
except Exception as e:
print(f"Error inspecting tokens: {e}")
finally:
db.close()
if __name__ == "__main__":
inspect_tokens()

View File

@@ -0,0 +1,41 @@
import os
import sys
from sqlalchemy import create_engine, text
from sqlalchemy.orm import sessionmaker
# Hardcode DB URL from docker-compose.yml
# DATABASE_URL=postgresql://postgres:password@db:5432/fitbit_garmin_sync
DATABASE_URL = "postgresql://postgres:password@db:5432/fitbit_garmin_sync"
engine = create_engine(DATABASE_URL)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
def inspect_tokens():
db = SessionLocal()
try:
# Use raw SQL to avoid needing model definitions
result = db.execute(text("SELECT token_type, garth_oauth1_token, garth_oauth2_token, updated_at FROM api_tokens WHERE token_type='garmin'"))
row = result.fetchone()
if not row:
print("No Garmin token record found.")
return
print(f"Token Type: {row[0]}")
print(f"Updated At: {row[3]}")
print("-" * 40)
print("OAuth1 Token Raw:")
print(row[1])
print("-" * 40)
print("OAuth2 Token Raw:")
print(row[2])
print("-" * 40)
except Exception as e:
print(f"Error inspecting tokens: {e}")
finally:
db.close()
if __name__ == "__main__":
inspect_tokens()

View File

@@ -0,0 +1,129 @@
import os
import datetime
from dotenv import load_dotenv
# Load env vars first
load_dotenv()
from src.services.postgresql_manager import PostgreSQLManager
from src.services.fitbit_client import FitbitClient
from src.models.api_token import APIToken
from src.models.weight_record import WeightRecord
# Set DB URL
if not os.environ.get('DATABASE_URL'):
os.environ['DATABASE_URL'] = 'postgresql://postgres:password@localhost:5433/fitbit_garmin_sync'
def main():
print("Connecting to DB...")
db = PostgreSQLManager()
with db.get_db_session() as session:
# Get a date with NULL BMI
null_record = session.query(WeightRecord).filter(WeightRecord.bmi == None).first()
if not null_record:
print("No NULL BMI records found.")
return
date_str = null_record.date.strftime('%Y-%m-%d')
print(f"Checking Fitbit API for date: {date_str} (Existing BMI: {null_record.bmi})")
# Init Client
token_record = session.query(APIToken).filter_by(token_type='fitbit').first()
if not token_record:
print("No Fitbit token found.")
return
token_dict = {
'access_token': token_record.access_token,
'refresh_token': token_record.refresh_token,
'expires_at': token_record.expires_at.timestamp() if token_record.expires_at else None,
'scope': token_record.scopes
}
from src.utils.config import config
from src.models.config import Configuration
client_id = config.FITBIT_CLIENT_ID
client_secret = config.FITBIT_CLIENT_SECRET
# Fallback to DB
if not client_id or not client_secret:
print("Config missing credentials, checking DB...")
db_config = session.query(Configuration).first()
if db_config:
client_id = db_config.fitbit_client_id
client_secret = db_config.fitbit_client_secret
print("Loaded credentials from DB Configuration.")
if not client_id or not client_secret:
print("ERROR: Could not find Fitbit credentials in Config or DB.")
return
def refresh_cb(token):
print("DEBUG: Token refreshed!", flush=True)
# In real app we would save to DB here
# Update the client with new token just in case
pass
client = FitbitClient(
client_id=client_id,
client_secret=client_secret,
redirect_uri=os.environ.get('FITBIT_REDIRECT_URI', 'http://localhost:8000/api/setup/fitbit/callback'),
access_token=token_record.access_token,
refresh_token=token_record.refresh_token,
refresh_cb=refresh_cb
)
# Force refresh validation?
# fitbit library calculates expiry. If expired, it refreshes ON REQUEST.
# So making the request should trigger it.
try:
logs = client.get_weight_logs(date_str, date_str)
print(f"Fetched {len(logs)} logs for {date_str}")
if logs:
print("First log payload:", logs[0])
if 'bmi' in logs[0]:
print(f"BMI in response: {logs[0]['bmi']}")
else:
print("BMI NOT found in response.")
except Exception as e:
print(f"Error fetching logs: {e}")
# Try manual refresh if library didn't auto-refresh
try:
print("Attempting manual refresh...")
# python-fitbit usually exposes client.fitbit.client.refresh_token
# Token URL: https://api.fitbit.com/oauth2/token
new_token = client.fitbit.client.refresh_token(
'https://api.fitbit.com/oauth2/token',
refresh_token=token_record.refresh_token,
auth=((client_id, client_secret))
)
print("Manual refresh success:", new_token.keys())
# Retry request
client.fitbit.client.token = new_token
logs = client.get_weight_logs(date_str)
if logs:
print("Retry Payload:", logs[0])
except Exception as re:
print(f"Manual refresh failed: {re}")
# Need to refresh likely
# But let's assume valid or client handles it?
# Actually client usually needs active session or refresh.
# The client.get_weight_logs wraps request.
try:
logs = client.get_weight_logs(date_str)
print(f"Fetched {len(logs)} logs for {date_str}")
if logs:
print("First log payload:", logs[0])
if 'bmi' in logs[0]:
print(f"BMI in response: {logs[0]['bmi']}")
else:
print("BMI NOT found in response.")
except Exception as e:
print(f"Error fetching logs: {e}")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,39 @@
import os
from src.services.postgresql_manager import PostgreSQLManager
from src.models.weight_record import WeightRecord
from sqlalchemy import func
# Set DB URL
if not os.environ.get('DATABASE_URL'):
os.environ['DATABASE_URL'] = 'postgresql://postgres:password@localhost:5433/fitbit_garmin_sync'
def main():
print("Connecting to DB...")
db = PostgreSQLManager()
with db.get_db_session() as session:
total = session.query(WeightRecord).count()
# null bmi
null_bmi = session.query(WeightRecord).filter(WeightRecord.bmi == None).count()
# zero bmi
zero_bmi = session.query(WeightRecord).filter(WeightRecord.bmi == 0).count()
# valid bmi
valid_bmi = session.query(WeightRecord).filter(WeightRecord.bmi > 0).count()
print(f"Total Records: {total}")
print(f"Null BMI: {null_bmi}")
print(f"Zero BMI: {zero_bmi}")
print(f"Valid BMI: {valid_bmi}")
if valid_bmi > 0:
sample = session.query(WeightRecord).filter(WeightRecord.bmi > 0).first()
print(f"Sample Valid: Date={sample.date}, Weight={sample.weight}, BMI={sample.bmi}")
if null_bmi > 0:
sample = session.query(WeightRecord).filter(WeightRecord.bmi == None).first()
print(f"Sample Null: Date={sample.date}, Weight={sample.weight}, BMI={sample.bmi}")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,21 @@
try:
import garminconnect
print(f"garminconnect version: {getattr(garminconnect, '__version__', 'unknown')}")
from garminconnect import Garmin
print("\n--- Garmin Class Attributes ---")
for attr in dir(Garmin):
if not attr.startswith("__"):
print(attr)
if hasattr(Garmin, 'ActivityDownloadFormat'):
print("\n--- Garmin.ActivityDownloadFormat Attributes ---")
for attr in dir(Garmin.ActivityDownloadFormat):
if not attr.startswith("__"):
print(f"{attr}: {getattr(Garmin.ActivityDownloadFormat, attr)}")
else:
print("\nGarmin.ActivityDownloadFormat not found.")
except Exception as e:
print(f"Error: {e}")

View File

@@ -0,0 +1,14 @@
import garth
from garth.http import Client
print("Garth version:", garth.__version__)
c = Client()
print("Client dir:", dir(c))
if hasattr(c, "sess"):
print("Has .sess")
if hasattr(c, "session"):
print("Has .session")
if hasattr(c, "_session"):
print("Has ._session")

View File

@@ -0,0 +1,49 @@
from backend.src.services.postgresql_manager import PostgreSQLManager
from backend.src.utils.config import config
from backend.src.models.weight_record import WeightRecord
from backend.src.models.health_metric import HealthMetric
from sqlalchemy import func
import sys
def check_count():
print("DEBUG: Connecting to DB...", flush=True)
try:
db_manager = PostgreSQLManager(config.DATABASE_URL)
print("DEBUG: Session factory created.", flush=True)
with db_manager.get_db_session() as session:
print("DEBUG: Session active.", flush=True)
# Check WeightRecord
try:
wr_count = session.query(func.count(WeightRecord.id)).scalar()
print(f"Total WeightRecord (Legacy?) records: {wr_count}")
except Exception as e:
print(f"Error querying WeightRecord: {e}")
# Check HealthMetric (Fitbit Weight)
try:
hm_count = session.query(func.count(HealthMetric.id)).filter(
HealthMetric.metric_type == 'weight',
HealthMetric.source == 'fitbit'
).scalar()
print(f"Total HealthMetric (Fitbit Weight) records: {hm_count}")
except Exception as e:
print(f"Error querying HealthMetric: {e}")
# Also check top 5 from HealthMetric
print("\nLatest 5 HealthMetric (Fitbit Weight) records:")
latest = session.query(HealthMetric).filter(
HealthMetric.metric_type == 'weight',
HealthMetric.source == 'fitbit'
).order_by(HealthMetric.date.desc()).limit(5).all()
for r in latest:
print(f" - {r.date}: {r.metric_value} {r.unit}")
except Exception as e:
print(f"CRITICAL ERROR: {e}")
import traceback
traceback.print_exc()
if __name__ == "__main__":
check_count()

View File

@@ -0,0 +1,19 @@
from src.services.postgresql_manager import PostgreSQLManager
from src.models.base import Base
from src.models.scheduled_job import ScheduledJob
from src.utils.config import config
import logging
from sqlalchemy import text
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
pm = PostgreSQLManager(config.DATABASE_URL)
with pm.get_db_session() as session:
print("Dropping scheduled_jobs table...")
session.execute(text("DROP TABLE IF EXISTS scheduled_jobs"))
session.commit()
print("Re-creating tables...")
pm.init_db()
print("Done.")

View File

@@ -0,0 +1,30 @@
import requests
import json
def test_api():
url = "http://localhost:8000/api/metrics/query"
params = {
"metric_type": "weight",
"source": "fitbit",
"start_date": "2025-01-01",
"end_date": "2026-01-02",
"limit": 1000 # Try requesting 1000
}
try:
response = requests.get(url, params=params)
response.raise_for_status()
data = response.json()
print(f"Status: {response.status_code}")
print(f"Records returned: {len(data)}")
if len(data) < 10:
print("Data preview:", json.dumps(data, indent=2))
else:
print("First record:", json.dumps(data[0], indent=2))
print("Last record:", json.dumps(data[-1], indent=2))
except Exception as e:
print(f"Error: {e}")
if __name__ == "__main__":
test_api()

View File

@@ -0,0 +1,89 @@
import requests
import time
BASE_URL = "http://localhost:8000/api"
def get_job(job_id):
try:
res = requests.get(f"{BASE_URL}/jobs/active", timeout=5)
active = res.json()
return next((j for j in active if j['id'] == job_id), None)
except:
return None
def main():
print("Triggering test job...")
try:
res = requests.post(f"{BASE_URL}/status/test-job", timeout=5)
job_id = res.json()['job_id']
print(f"Job ID: {job_id}")
except Exception as e:
print(f"FAILURE: Could not trigger job: {e}")
return
time.sleep(2)
job = get_job(job_id)
if not job:
print("FAILURE: Job not active")
return
print(f"Initial Progress: {job['progress']}%")
# PAUSE
print("Pausing job...")
requests.post(f"{BASE_URL}/jobs/{job_id}/pause")
time.sleep(1)
# Check if paused
job = get_job(job_id)
print(f"Status after pause: {job['status']}")
if job['status'] != 'paused':
print("FAILURE: Status is not 'paused'")
prog_at_pause = job['progress']
time.sleep(3)
job = get_job(job_id)
print(f"Progress after 3s pause: {job['progress']}%")
if job['progress'] != prog_at_pause:
print("FAILURE: Job continued running while paused!")
else:
print("SUCCESS: Job paused correctly.")
# RESUME
print("Resuming job...")
requests.post(f"{BASE_URL}/jobs/{job_id}/resume")
time.sleep(3)
job = get_job(job_id)
print(f"Status after resume: {job['status']}")
print(f"Progress after resume: {job['progress']}%")
if job['progress'] > prog_at_pause:
print("SUCCESS: Job resumed and progress advanced.")
else:
print("FAILURE: Job didn't advance after resume.")
# CANCEL
print("Cancelling job...")
requests.post(f"{BASE_URL}/jobs/{job_id}/cancel")
time.sleep(2)
# Should be completed/cancelled
# Wait for retention cleanup (10s) + buffer
print("Waiting for retention cleanup (12s)...")
time.sleep(12)
# Check History
print("Checking History...")
res = requests.get(f"{BASE_URL}/jobs/history")
history = res.json()
my_job = next((j for j in history if j['id'] == job_id), None)
if my_job:
print(f"SUCCESS: Job found in history. Status: {my_job['status']}")
else:
print("FAILURE: Job not found in history.")
if __name__ == "__main__":
main()