mirror of
https://github.com/sstent/foodplanner.git
synced 2026-04-04 20:13:52 +00:00
Compare commits
2 Commits
09653d7415
...
6972c9b8f9
| Author | SHA1 | Date | |
|---|---|---|---|
| 6972c9b8f9 | |||
| 9fa3380730 |
63
MIGRATION.md
Normal file
63
MIGRATION.md
Normal file
@@ -0,0 +1,63 @@
|
||||
# Database Migration Guide
|
||||
|
||||
This guide outlines the offline workflow to migrate your `meal_planner` data from SQLite to PostgreSQL.
|
||||
|
||||
## Prerequisites
|
||||
- Docker Compose installed.
|
||||
- The application running (or capable of running) via `docker-compose`.
|
||||
|
||||
## Migration Steps
|
||||
|
||||
### 1. Backup your SQLite Database
|
||||
First, create a safety copy of your current database.
|
||||
```bash
|
||||
cp data/meal_planner.db meal_planner_backup.db
|
||||
```
|
||||
|
||||
### 2. Stop the Application
|
||||
Stop the running application container to ensure no new data is written.
|
||||
```bash
|
||||
docker-compose stop foodtracker
|
||||
```
|
||||
|
||||
### 3. Start PostgreSQL
|
||||
Ensure the new PostgreSQL service is running.
|
||||
```bash
|
||||
docker-compose up -d postgres
|
||||
```
|
||||
|
||||
### 4. Run the Migration
|
||||
Use a temporary container to run the migration script. We mount your backup file and connect to the postgres service.
|
||||
```bash
|
||||
# Syntax: python migrate_to_postgres.py --sqlite-path <path_to_db> --pg-url <postgres_url>
|
||||
|
||||
docker-compose run --rm \
|
||||
-v $(pwd)/meal_planner_backup.db:/backup.db \
|
||||
-v $(pwd)/migrate_to_postgres.py:/app/migrate_to_postgres.py \
|
||||
foodtracker \
|
||||
python migrate_to_postgres.py \
|
||||
--sqlite-path /backup.db \
|
||||
--pg-url postgresql://user:password@postgres/meal_planner
|
||||
```
|
||||
|
||||
### 5. Update Configuration
|
||||
Edit `docker-compose.yml` to switch the active database.
|
||||
1. Comment out the SQLite `DATABASE_URL`.
|
||||
2. Uncomment the PostgreSQL `DATABASE_URL`.
|
||||
|
||||
```yaml
|
||||
environment:
|
||||
# - DATABASE_URL=sqlite:////app/data/meal_planner.db
|
||||
- DATABASE_URL=postgresql://user:password@postgres/meal_planner
|
||||
```
|
||||
|
||||
### 6. Restart the Application
|
||||
Rebuild and start the application to use the new database.
|
||||
```bash
|
||||
docker-compose up -d --build foodtracker
|
||||
```
|
||||
|
||||
## Verification
|
||||
1. Log in to the application.
|
||||
2. Verify your Foods, Meals, and Plans are present.
|
||||
3. Check `docker logs foodplanner-foodtracker-1` to ensure no database connection errors.
|
||||
55
alembic/versions/e1c2d8d5c1a8_add_fitbit_tables.py
Normal file
55
alembic/versions/e1c2d8d5c1a8_add_fitbit_tables.py
Normal file
@@ -0,0 +1,55 @@
|
||||
"""add fitbit tables
|
||||
|
||||
Revision ID: e1c2d8d5c1a8
|
||||
Revises: 4522e2de4143
|
||||
Create Date: 2026-01-12 12:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'e1c2d8d5c1a8'
|
||||
down_revision: Union[str, None] = '31fdce040eea'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Create fitbit_config table
|
||||
op.create_table('fitbit_config',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('client_id', sa.String(), nullable=True),
|
||||
sa.Column('client_secret', sa.String(), nullable=True),
|
||||
sa.Column('redirect_uri', sa.String(), nullable=True),
|
||||
sa.Column('access_token', sa.String(), nullable=True),
|
||||
sa.Column('refresh_token', sa.String(), nullable=True),
|
||||
sa.Column('expires_at', sa.Float(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_fitbit_config_id'), 'fitbit_config', ['id'], unique=False)
|
||||
|
||||
# Create weight_logs table
|
||||
op.create_table('weight_logs',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('date', sa.Date(), nullable=True),
|
||||
sa.Column('weight', sa.Float(), nullable=True),
|
||||
sa.Column('source', sa.String(), nullable=True),
|
||||
sa.Column('fitbit_log_id', sa.String(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_weight_logs_date'), 'weight_logs', ['date'], unique=False)
|
||||
op.create_index(op.f('ix_weight_logs_fitbit_log_id'), 'weight_logs', ['fitbit_log_id'], unique=True)
|
||||
op.create_index(op.f('ix_weight_logs_id'), 'weight_logs', ['id'], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index(op.f('ix_weight_logs_id'), table_name='weight_logs')
|
||||
op.drop_index(op.f('ix_weight_logs_fitbit_log_id'), table_name='weight_logs')
|
||||
op.drop_index(op.f('ix_weight_logs_date'), table_name='weight_logs')
|
||||
op.drop_table('weight_logs')
|
||||
op.drop_index(op.f('ix_fitbit_config_id'), table_name='fitbit_config')
|
||||
op.drop_table('fitbit_config')
|
||||
@@ -11,6 +11,7 @@ from app.api.routes import (
|
||||
templates,
|
||||
tracker,
|
||||
weekly_menu,
|
||||
fitbit,
|
||||
)
|
||||
|
||||
api_router = APIRouter()
|
||||
@@ -20,6 +21,7 @@ api_router.include_router(meals.router, tags=["meals"])
|
||||
api_router.include_router(templates.router, tags=["templates"])
|
||||
api_router.include_router(charts.router, tags=["charts"])
|
||||
api_router.include_router(admin.router, tags=["admin"])
|
||||
api_router.include_router(fitbit.router, tags=["fitbit"])
|
||||
api_router.include_router(weekly_menu.router, tags=["weekly_menu"])
|
||||
api_router.include_router(plans.router, tags=["plans"])
|
||||
api_router.include_router(export.router, tags=["export"])
|
||||
|
||||
@@ -187,6 +187,45 @@ async def restore_backup(request: Request, backup_file: str = Form(...)):
|
||||
# You might want to add some user-facing error feedback here
|
||||
pass
|
||||
|
||||
# Redirect back to the backups page
|
||||
# Redirect back to the backups page
|
||||
from fastapi.responses import RedirectResponse
|
||||
return RedirectResponse(url="/admin/backups", status_code=303)
|
||||
return RedirectResponse(url="/admin/backups", status_code=303)
|
||||
|
||||
@router.get("/admin/config-status", response_class=HTMLResponse)
|
||||
async def admin_config_status_page(request: Request):
|
||||
"""Display current system configuration and database status."""
|
||||
from urllib.parse import urlparse
|
||||
|
||||
# Analyze DATABASE_URL securely
|
||||
db_url = DATABASE_URL
|
||||
masked_url = db_url
|
||||
db_host = "Unknown"
|
||||
db_type = "Unknown"
|
||||
|
||||
try:
|
||||
# Simple parsing logic to avoid exposing credentials if urlparse fails or acts unexpectedly
|
||||
if "sqlite" in db_url:
|
||||
db_type = "SQLite"
|
||||
db_host = db_url.replace("sqlite:///", "")
|
||||
masked_url = "sqlite:///" + db_host
|
||||
elif "postgresql" in db_url:
|
||||
db_type = "PostgreSQL"
|
||||
parsed = urlparse(db_url)
|
||||
db_host = parsed.hostname
|
||||
# Mask password
|
||||
if parsed.password:
|
||||
masked_url = db_url.replace(parsed.password, "******")
|
||||
except Exception as e:
|
||||
logging.error(f"Error parsing database URL: {e}")
|
||||
masked_url = "Error parsing URL"
|
||||
|
||||
config_data = {
|
||||
"database_url": db_url,
|
||||
"database_url_masked": masked_url,
|
||||
"database_type": db_type,
|
||||
"database_host": db_host,
|
||||
"debug": True
|
||||
}
|
||||
|
||||
return templates.TemplateResponse(request, "admin/config.html", {"request": request, "config": config_data})
|
||||
@@ -3,7 +3,7 @@ from starlette.responses import HTMLResponse
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import date, timedelta
|
||||
from typing import List
|
||||
from app.database import get_db, TrackedDay, TrackedMeal, calculate_day_nutrition_tracked
|
||||
from app.database import get_db, TrackedDay, TrackedMeal, calculate_day_nutrition_tracked, WeightLog
|
||||
|
||||
router = APIRouter(tags=["charts"])
|
||||
|
||||
@@ -37,17 +37,101 @@ async def get_charts_data(
|
||||
).order_by(TrackedDay.date.desc()).all()
|
||||
|
||||
chart_data = []
|
||||
for tracked_day in tracked_days:
|
||||
tracked_meals = db.query(TrackedMeal).filter(
|
||||
TrackedMeal.tracked_day_id == tracked_day.id
|
||||
# Fetch all tracked days and weight logs for the period
|
||||
tracked_days_map = {
|
||||
d.date: d for d in db.query(TrackedDay).filter(
|
||||
TrackedDay.person == person,
|
||||
TrackedDay.date >= start_date,
|
||||
TrackedDay.date <= end_date
|
||||
).all()
|
||||
day_totals = calculate_day_nutrition_tracked(tracked_meals, db)
|
||||
chart_data.append({
|
||||
"date": tracked_day.date.isoformat(),
|
||||
"calories": round(day_totals.get("calories", 0), 2),
|
||||
"protein": round(day_totals.get("protein", 0), 2),
|
||||
"fat": round(day_totals.get("fat", 0), 2),
|
||||
"net_carbs": round(day_totals.get("net_carbs", 0), 2)
|
||||
})
|
||||
}
|
||||
|
||||
# Sort logs desc
|
||||
weight_logs_map = {
|
||||
w.date: w for w in db.query(WeightLog).filter(
|
||||
WeightLog.date >= start_date,
|
||||
WeightLog.date <= end_date
|
||||
).order_by(WeightLog.date.desc()).all()
|
||||
}
|
||||
|
||||
# Get last weight BEFORE start_date (for initial carry forward)
|
||||
last_historical_weight_log = db.query(WeightLog).filter(
|
||||
WeightLog.date < start_date
|
||||
).order_by(WeightLog.date.desc()).first()
|
||||
|
||||
last_historical_weight_val = last_historical_weight_log.weight * 2.20462 if last_historical_weight_log else None
|
||||
|
||||
# Find the most recent weight available (either in range or history)
|
||||
# This is for "Today" (end_date)
|
||||
latest_weight_val = last_historical_weight_val
|
||||
|
||||
# Check if we have newer weights in the map
|
||||
# Values in weight_logs_map are WeightLog objects.
|
||||
# Find the one with max date <= end_date. Since map key is date, we can check.
|
||||
# But filtering the map is tedious. Let's just iterate.
|
||||
# Actually, we already have `weight_logs_map` (in range).
|
||||
# If the range has weights, the newest one is the "latest" known weight relevant to the end of chart.
|
||||
if weight_logs_map:
|
||||
# Get max date
|
||||
max_date = max(weight_logs_map.keys())
|
||||
latest_weight_val = weight_logs_map[max_date].weight * 2.20462
|
||||
|
||||
chart_data = []
|
||||
|
||||
# Iterate dates. Note: i=0 is end_date (Today), i=days-1 is start_date (Oldest)
|
||||
for i in range(days):
|
||||
current_date = end_date - timedelta(days=i)
|
||||
|
||||
tracked_day = tracked_days_map.get(current_date)
|
||||
weight_log = weight_logs_map.get(current_date)
|
||||
|
||||
calories = 0
|
||||
protein = 0
|
||||
fat = 0
|
||||
net_carbs = 0
|
||||
|
||||
# Calculate nutrition
|
||||
if tracked_day:
|
||||
tracked_meals = db.query(TrackedMeal).filter(
|
||||
TrackedMeal.tracked_day_id == tracked_day.id
|
||||
).all()
|
||||
day_totals = calculate_day_nutrition_tracked(tracked_meals, db)
|
||||
calories = round(day_totals.get("calories", 0), 2)
|
||||
protein = round(day_totals.get("protein", 0), 2)
|
||||
fat = round(day_totals.get("fat", 0), 2)
|
||||
net_carbs = round(day_totals.get("net_carbs", 0), 2)
|
||||
|
||||
weight_lbs = None
|
||||
is_real = False
|
||||
|
||||
if weight_log:
|
||||
weight_lbs = round(weight_log.weight * 2.20462, 2)
|
||||
is_real = True
|
||||
|
||||
# Logic for Start and End Points (to ensure line connects across view)
|
||||
|
||||
# If this is the Oldest date in view (start_date) and no real weight
|
||||
if i == days - 1 and weight_lbs is None:
|
||||
# Use historical weight if available (to start the line)
|
||||
if last_historical_weight_val is not None:
|
||||
weight_lbs = round(last_historical_weight_val, 2)
|
||||
# is_real remains False (inferred)
|
||||
|
||||
# If this is the Newest date in view (end_date/Today) and no real weight
|
||||
if i == 0 and weight_lbs is None:
|
||||
# Use latest known weight (to end the line)
|
||||
if latest_weight_val is not None:
|
||||
weight_lbs = round(latest_weight_val, 2)
|
||||
# is_real remains False (inferred)
|
||||
|
||||
chart_data.append({
|
||||
"date": current_date.isoformat(),
|
||||
"calories": calories,
|
||||
"protein": protein,
|
||||
"fat": fat,
|
||||
"net_carbs": net_carbs,
|
||||
"weight_lbs": weight_lbs,
|
||||
"weight_is_real": is_real
|
||||
})
|
||||
|
||||
return chart_data
|
||||
283
app/api/routes/fitbit.py
Normal file
283
app/api/routes/fitbit.py
Normal file
@@ -0,0 +1,283 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, Form
|
||||
from fastapi.responses import HTMLResponse, RedirectResponse, JSONResponse
|
||||
from sqlalchemy.orm import Session
|
||||
import requests
|
||||
import base64
|
||||
import json
|
||||
import datetime
|
||||
from datetime import date
|
||||
from typing import Optional
|
||||
|
||||
from app.database import get_db, FitbitConfig, WeightLog
|
||||
from main import templates
|
||||
|
||||
from urllib.parse import quote
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# --- Helpers ---
|
||||
|
||||
def get_config(db: Session) -> FitbitConfig:
|
||||
config = db.query(FitbitConfig).first()
|
||||
if not config:
|
||||
config = FitbitConfig()
|
||||
db.add(config)
|
||||
db.commit()
|
||||
db.refresh(config)
|
||||
return config
|
||||
|
||||
def refresh_tokens(db: Session, config: FitbitConfig):
|
||||
if not config.refresh_token:
|
||||
return None
|
||||
|
||||
token_url = "https://api.fitbit.com/oauth2/token"
|
||||
auth_str = f"{config.client_id}:{config.client_secret}"
|
||||
b64_auth = base64.b64encode(auth_str.encode()).decode()
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Basic {b64_auth}",
|
||||
"Content-Type": "application/x-www-form-urlencoded"
|
||||
}
|
||||
|
||||
data = {
|
||||
"grant_type": "refresh_token",
|
||||
"refresh_token": config.refresh_token
|
||||
}
|
||||
|
||||
try:
|
||||
response = requests.post(token_url, headers=headers, data=data)
|
||||
if response.status_code == 200:
|
||||
tokens = response.json()
|
||||
config.access_token = tokens['access_token']
|
||||
config.refresh_token = tokens['refresh_token']
|
||||
# config.expires_at = datetime.datetime.now().timestamp() + tokens['expires_in'] # Optional
|
||||
db.commit()
|
||||
return config.access_token
|
||||
else:
|
||||
print(f"Failed to refresh token: {response.text}")
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"Error refreshing token: {e}")
|
||||
return None
|
||||
|
||||
def get_valid_access_token(db: Session, config: FitbitConfig):
|
||||
# Simply try to refresh if we suspect it's old (or just always return current and handle 401 caller side)
|
||||
# For now, return current, caller handles 401 by calling refresh
|
||||
return config.access_token
|
||||
|
||||
|
||||
# --- Routes ---
|
||||
|
||||
@router.get("/admin/fitbit", response_class=HTMLResponse)
|
||||
async def fitbit_page(request: Request, db: Session = Depends(get_db)):
|
||||
config = get_config(db)
|
||||
# Mask secret
|
||||
masked_secret = "*" * 8 if config.client_secret else ""
|
||||
is_connected = bool(config.access_token)
|
||||
|
||||
# Get recent logs
|
||||
logs = db.query(WeightLog).order_by(WeightLog.date.desc()).limit(30).all()
|
||||
|
||||
return templates.TemplateResponse("admin/fitbit.html", {
|
||||
"request": request,
|
||||
"config": config,
|
||||
"masked_secret": masked_secret,
|
||||
"is_connected": is_connected,
|
||||
"logs": logs
|
||||
})
|
||||
|
||||
@router.post("/admin/fitbit/config")
|
||||
async def update_config(
|
||||
request: Request,
|
||||
client_id: str = Form(...),
|
||||
client_secret: str = Form(...),
|
||||
redirect_uri: str = Form(...),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
config = get_config(db)
|
||||
config.client_id = client_id
|
||||
config.client_secret = client_secret
|
||||
config.redirect_uri = redirect_uri
|
||||
db.commit()
|
||||
return RedirectResponse(url="/admin/fitbit", status_code=303)
|
||||
|
||||
@router.get("/admin/fitbit/auth_url")
|
||||
async def get_auth_url(db: Session = Depends(get_db)):
|
||||
config = get_config(db)
|
||||
if not config.client_id or not config.redirect_uri:
|
||||
return {"status": "error", "message": "Client ID and Redirect URI must be configured first."}
|
||||
|
||||
encoded_redirect_uri = quote(config.redirect_uri, safe='')
|
||||
auth_url = (
|
||||
"https://www.fitbit.com/oauth2/authorize"
|
||||
f"?response_type=code&client_id={config.client_id}"
|
||||
f"&redirect_uri={encoded_redirect_uri}"
|
||||
"&scope=weight"
|
||||
"&expires_in=604800"
|
||||
)
|
||||
return {"status": "success", "url": auth_url}
|
||||
|
||||
@router.post("/admin/fitbit/auth/exchange")
|
||||
async def exchange_code(
|
||||
request: Request,
|
||||
code_input: str = Form(...),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
config = get_config(db)
|
||||
|
||||
# Parse code from URL if provided
|
||||
code = code_input.strip()
|
||||
if "?" in code and "code=" in code:
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
try:
|
||||
query = parse_qs(urlparse(code).query)
|
||||
if 'code' in query:
|
||||
code = query['code'][0]
|
||||
except:
|
||||
pass
|
||||
|
||||
if code.endswith('#_=_'):
|
||||
code = code[:-4]
|
||||
|
||||
# Exchange
|
||||
token_url = "https://api.fitbit.com/oauth2/token"
|
||||
auth_str = f"{config.client_id}:{config.client_secret}"
|
||||
b64_auth = base64.b64encode(auth_str.encode()).decode()
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Basic {b64_auth}",
|
||||
"Content-Type": "application/x-www-form-urlencoded"
|
||||
}
|
||||
|
||||
data = {
|
||||
"clientId": config.client_id,
|
||||
"grant_type": "authorization_code",
|
||||
"redirect_uri": config.redirect_uri,
|
||||
"code": code
|
||||
}
|
||||
|
||||
try:
|
||||
response = requests.post(token_url, headers=headers, data=data)
|
||||
if response.status_code == 200:
|
||||
tokens = response.json()
|
||||
config.access_token = tokens['access_token']
|
||||
config.refresh_token = tokens['refresh_token']
|
||||
db.commit()
|
||||
return RedirectResponse(url="/admin/fitbit", status_code=303)
|
||||
else:
|
||||
return templates.TemplateResponse("error.html", {
|
||||
"request": request,
|
||||
"error_title": "Auth Failed",
|
||||
"error_message": f"Fitbit Error: {response.text}",
|
||||
"error_details": ""
|
||||
})
|
||||
except Exception as e:
|
||||
return templates.TemplateResponse("error.html", {
|
||||
"request": request,
|
||||
"error_title": "Auth Error",
|
||||
"error_message": str(e),
|
||||
"error_details": ""
|
||||
})
|
||||
|
||||
@router.post("/admin/fitbit/sync")
|
||||
async def sync_data(
|
||||
request: Request,
|
||||
scope: str = Form("30d"),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
config = get_config(db)
|
||||
if not config.access_token:
|
||||
return JSONResponse({"status": "error", "message": "Not connected"}, status_code=400)
|
||||
|
||||
# Helper to fetch with token refresh support
|
||||
def fetch_weights_range(start_date: date, end_date: date, token: str):
|
||||
url = f"https://api.fitbit.com/1/user/-/body/log/weight/date/{start_date}/{end_date}.json"
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Accept": "application/json"
|
||||
}
|
||||
return requests.get(url, headers=headers)
|
||||
|
||||
# Determine ranges
|
||||
ranges = []
|
||||
today = datetime.date.today()
|
||||
|
||||
if scope == "all":
|
||||
# Start from a reasonable past date, e.g., 2015-01-01
|
||||
current_start = datetime.date(2015, 1, 1)
|
||||
while current_start <= today:
|
||||
current_end = current_start + datetime.timedelta(days=30)
|
||||
if current_end > today:
|
||||
current_end = today
|
||||
ranges.append((current_start, current_end))
|
||||
current_start = current_end + datetime.timedelta(days=1)
|
||||
else:
|
||||
# Default 30 days
|
||||
start = today - datetime.timedelta(days=30)
|
||||
ranges.append((start, today))
|
||||
|
||||
total_new = 0
|
||||
errors = []
|
||||
|
||||
# Iterate ranges
|
||||
# We need to manage token state outside the loop to avoid re-refreshing constantly if it fails
|
||||
current_token = config.access_token
|
||||
|
||||
print(f"DEBUG: Starting sync for scope={scope} with {len(ranges)} ranges.")
|
||||
|
||||
for start, end in ranges:
|
||||
print(f"DEBUG: Fetching range {start} to {end}...")
|
||||
resp = fetch_weights_range(start, end, current_token)
|
||||
|
||||
print(f"DEBUG: Response status: {resp.status_code}")
|
||||
|
||||
# Handle 401 (Refresh)
|
||||
if resp.status_code == 401:
|
||||
print(f"Token expired during sync of {start}-{end}, refreshing...")
|
||||
new_token = refresh_tokens(db, config)
|
||||
if new_token:
|
||||
current_token = new_token
|
||||
resp = fetch_weights_range(start, end, current_token)
|
||||
print(f"DEBUG: Retried request status: {resp.status_code}")
|
||||
else:
|
||||
errors.append("Token expired and refresh failed.")
|
||||
break
|
||||
|
||||
# Handle 429 (Rate Limit) - Basic handling: stop
|
||||
if resp.status_code == 429:
|
||||
errors.append("Rate limit exceeded.")
|
||||
print("DEBUG: Rate limit exceeded.")
|
||||
break
|
||||
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
weights = data.get('weight', [])
|
||||
print(f"DEBUG: Found {len(weights)} weights in this range.")
|
||||
for w in weights:
|
||||
log_id = str(w.get('logId'))
|
||||
weight_val = float(w.get('weight'))
|
||||
date_str = w.get('date')
|
||||
|
||||
existing = db.query(WeightLog).filter(WeightLog.fitbit_log_id == log_id).first()
|
||||
if not existing:
|
||||
log = WeightLog(
|
||||
date=datetime.date.fromisoformat(date_str),
|
||||
weight=weight_val,
|
||||
fitbit_log_id=log_id,
|
||||
source='fitbit'
|
||||
)
|
||||
db.add(log)
|
||||
total_new += 1
|
||||
else:
|
||||
existing.weight = weight_val
|
||||
db.commit()
|
||||
else:
|
||||
print(f"DEBUG: Error response: {resp.text}")
|
||||
errors.append(f"Error {resp.status_code} for range {start}-{end}: {resp.text}")
|
||||
|
||||
print(f"DEBUG: Sync complete. Total new: {total_new}. Errors: {errors}")
|
||||
|
||||
if errors:
|
||||
return JSONResponse({"status": "warning", "message": f"Synced {total_new} records, but encountered errors: {', '.join(errors[:3])}..."})
|
||||
else:
|
||||
return JSONResponse({"status": "success", "message": f"Synced {total_new} new records (" + ("All History" if scope == 'all' else "30d") + ")"})
|
||||
@@ -170,6 +170,26 @@ class TrackedMealFood(Base):
|
||||
tracked_meal = relationship("TrackedMeal", back_populates="tracked_foods")
|
||||
food = relationship("Food")
|
||||
|
||||
class FitbitConfig(Base):
|
||||
__tablename__ = "fitbit_config"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
client_id = Column(String)
|
||||
client_secret = Column(String)
|
||||
redirect_uri = Column(String, default="http://localhost:8080/fitbit-callback")
|
||||
access_token = Column(String, nullable=True)
|
||||
refresh_token = Column(String, nullable=True)
|
||||
expires_at = Column(Float, nullable=True) # Timestamp
|
||||
|
||||
class WeightLog(Base):
|
||||
__tablename__ = "weight_logs"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
date = Column(Date, index=True)
|
||||
weight = Column(Float)
|
||||
source = Column(String, default="fitbit")
|
||||
fitbit_log_id = Column(String, unique=True, index=True) # To prevent duplicates
|
||||
|
||||
# Pydantic models
|
||||
class FoodCreate(BaseModel):
|
||||
name: str
|
||||
|
||||
@@ -4,11 +4,13 @@ services:
|
||||
ports:
|
||||
- "8999:8999"
|
||||
environment:
|
||||
- DATABASE_URL=sqlite:////app/data/meal_planner.db
|
||||
#- DATABASE_URL=sqlite:////app/data/meal_planner.db
|
||||
- DATABASE_URL=postgresql://postgres:postgres@master.postgres.service.dc1.consul/meal_planner
|
||||
- PYTHONUNBUFFERED=1
|
||||
volumes:
|
||||
- ./alembic:/app/alembic
|
||||
- ./data:/app/data
|
||||
- ./backups:/app/backups
|
||||
- ./app:/app/app
|
||||
- ./templates:/app/templates
|
||||
- ./main.py:/app/main.py
|
||||
- ./main.py:/app/main.py
|
||||
|
||||
150
migrate_to_postgres.py
Normal file
150
migrate_to_postgres.py
Normal file
@@ -0,0 +1,150 @@
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
from sqlalchemy import create_engine, text, inspect
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
# Import models to ensure simple table discovery if needed,
|
||||
# though we will mostly work with raw tables or inspection.
|
||||
from app.database import Base, Food, Meal, MealFood, Plan, Template, TemplateMeal, WeeklyMenu, WeeklyMenuDay, TrackedDay, TrackedMeal, TrackedMealFood, FitbitConfig, WeightLog
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def migrate():
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description='Migrate data from SQLite to PostgreSQL')
|
||||
parser.add_argument('--sqlite-path', help='Path to source SQLite database file', default=os.getenv('SQLITE_PATH', '/app/data/meal_planner.db'))
|
||||
parser.add_argument('--pg-url', help='PostgreSQL connection URL', default=os.getenv('PG_DATABASE_URL'))
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Configuration
|
||||
# Source: SQLite
|
||||
sqlite_path = args.sqlite_path
|
||||
sqlite_url = f"sqlite:///{sqlite_path}"
|
||||
|
||||
# Destination: Postgres
|
||||
if args.pg_url:
|
||||
pg_url = args.pg_url
|
||||
else:
|
||||
# update this if running externally
|
||||
pg_user = os.getenv('POSTGRES_USER', 'user')
|
||||
pg_password = os.getenv('POSTGRES_PASSWORD', 'password')
|
||||
pg_host = os.getenv('POSTGRES_HOST', 'postgres')
|
||||
pg_db = os.getenv('POSTGRES_DB', 'meal_planner')
|
||||
pg_url = f"postgresql://{pg_user}:{pg_password}@{pg_host}/{pg_db}"
|
||||
|
||||
logger.info(f"Source SQLite: {sqlite_url}")
|
||||
logger.info(f"Destination Postgres: {pg_url}")
|
||||
|
||||
# Create Engines
|
||||
try:
|
||||
sqlite_engine = create_engine(sqlite_url)
|
||||
pg_engine = create_engine(pg_url)
|
||||
|
||||
# Test connections
|
||||
with sqlite_engine.connect() as conn:
|
||||
pass
|
||||
logger.info("Connected to SQLite.")
|
||||
|
||||
with pg_engine.connect() as conn:
|
||||
pass
|
||||
logger.info("Connected to Postgres.")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to connect to databases: {e}")
|
||||
return
|
||||
|
||||
# Create tables in Postgres if they don't exist
|
||||
# Using the Base metadata from the app
|
||||
logger.info("Creating tables in Postgres...")
|
||||
Base.metadata.drop_all(pg_engine) # Clean start to avoid conflicts
|
||||
Base.metadata.create_all(pg_engine)
|
||||
logger.info("Tables created.")
|
||||
|
||||
# Define table order to respect Foreign Keys
|
||||
tables_ordered = [
|
||||
'foods',
|
||||
'meals',
|
||||
'meal_foods',
|
||||
'templates',
|
||||
'template_meals',
|
||||
'weekly_menus',
|
||||
'weekly_menu_days',
|
||||
'plans',
|
||||
'tracked_days',
|
||||
'tracked_meals',
|
||||
'tracked_meal_foods',
|
||||
'fitbit_config',
|
||||
'weight_logs'
|
||||
]
|
||||
|
||||
# Migration Loop
|
||||
with sqlite_engine.connect() as sqlite_conn, pg_engine.connect() as pg_conn:
|
||||
for table_name in tables_ordered:
|
||||
logger.info(f"Migrating table: {table_name}")
|
||||
|
||||
# Read from SQLite
|
||||
try:
|
||||
# Use raw SQL to get all data, handling potential missing tables gracefully if app changed
|
||||
result = sqlite_conn.execute(text(f"SELECT * FROM {table_name}"))
|
||||
rows = result.fetchall()
|
||||
keys = result.keys()
|
||||
|
||||
if not rows:
|
||||
logger.info(f" No data in {table_name}, skipping.")
|
||||
continue
|
||||
|
||||
# Insert into Postgres
|
||||
# We simply create a list of dicts
|
||||
data = [dict(zip(keys, row)) for row in rows]
|
||||
|
||||
# Setup insert statement
|
||||
# We use SQLAlchemy core to make it db-agnostic enough
|
||||
table_obj = Base.metadata.tables[table_name]
|
||||
|
||||
pg_conn.execute(table_obj.insert(), data)
|
||||
pg_conn.commit()
|
||||
|
||||
logger.info(f" Migrated {len(rows)} rows.")
|
||||
|
||||
# Reset Sequence for Serial ID columns
|
||||
# Postgres sequences usually named table_id_seq
|
||||
if 'id' in keys:
|
||||
# Find max id
|
||||
max_id = max(row[0] for row in rows) # Assuming 'id' is first or we can look it up.
|
||||
# Safer:
|
||||
max_id_val = 0
|
||||
for d in data:
|
||||
if d['id'] > max_id_val:
|
||||
max_id_val = d['id']
|
||||
|
||||
if max_id_val > 0:
|
||||
seq_name = f"{table_name}_id_seq"
|
||||
# Check if sequence exists (it should for Serial)
|
||||
try:
|
||||
pg_conn.execute(text(f"SELECT setval('{seq_name}', {max_id_val})"))
|
||||
pg_conn.commit()
|
||||
logger.info(f" Sequence {seq_name} reset to {max_id_val}")
|
||||
except Exception as seq_err:
|
||||
logger.warn(f" Could not reset sequence {seq_name} (might not exist): {seq_err}")
|
||||
pg_conn.rollback()
|
||||
|
||||
except Exception as e:
|
||||
# Check for "no such table" specific error which is common if a feature isn't used
|
||||
if "no such table" in str(e):
|
||||
logger.warning(f" Table {table_name} not found in source SQLite. Skipping.")
|
||||
continue
|
||||
|
||||
logger.error(f"Error migrating {table_name}: {e}")
|
||||
pg_conn.rollback()
|
||||
# Decide whether to stop or continue. Stopping is safer.
|
||||
return
|
||||
|
||||
logger.info("Migration completed successfully.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
migrate()
|
||||
98
planner.nomad
Normal file
98
planner.nomad
Normal file
@@ -0,0 +1,98 @@
|
||||
job "foodplanner" {
|
||||
datacenters = ["dc1"]
|
||||
|
||||
type = "service"
|
||||
|
||||
group "app" {
|
||||
count = 1
|
||||
|
||||
network {
|
||||
port "http" {
|
||||
to = 8999
|
||||
}
|
||||
}
|
||||
|
||||
service {
|
||||
name = "foodplanner"
|
||||
port = "http"
|
||||
|
||||
check {
|
||||
type = "http"
|
||||
path = "/"
|
||||
interval = "10s"
|
||||
timeout = "2s"
|
||||
}
|
||||
}
|
||||
|
||||
# Prestart restore task
|
||||
task "restore" {
|
||||
driver = "docker"
|
||||
lifecycle {
|
||||
hook = "prestart"
|
||||
sidecar = false
|
||||
}
|
||||
config {
|
||||
# image = "litestream/litestream:latest"
|
||||
image = "litestream/litestream:0.3"
|
||||
args = [
|
||||
"restore",
|
||||
# "-if-replica-exists",
|
||||
#"-if-db-not-exists",
|
||||
"-o", "/alloc/tmp/meal_planner.db",
|
||||
"sftp://root:odroid@192.168.4.63/mnt/Shares/litestream/foodplanner.db"
|
||||
]
|
||||
volumes = [
|
||||
"/opt/nomad/data:/data"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
task "app" {
|
||||
driver = "docker"
|
||||
|
||||
config {
|
||||
image = "ghcr.io/sstent/foodplanner:main"
|
||||
ports = ["http"]
|
||||
|
||||
# Mount the SQLite database file to persist data
|
||||
# Adjust the source path as needed for your environment
|
||||
volumes = [
|
||||
"/mnt/Public/configs/FoodPlanner_backups:/app/backups/",
|
||||
]
|
||||
}
|
||||
env {
|
||||
DATABASE_PATH = "/alloc/tmp"
|
||||
DATABASE_URL = "sqlite:////alloc/tmp/meal_planner.db"
|
||||
}
|
||||
resources {
|
||||
cpu = 500
|
||||
memory = 1024
|
||||
}
|
||||
|
||||
# Restart policy
|
||||
restart {
|
||||
attempts = 3
|
||||
interval = "10m"
|
||||
delay = "15s"
|
||||
mode = "fail"
|
||||
}
|
||||
}
|
||||
|
||||
# Litestream sidecar for continuous replication
|
||||
task "litestream" {
|
||||
driver = "docker"
|
||||
lifecycle {
|
||||
hook = "poststart" # runs after main task starts
|
||||
sidecar = true
|
||||
}
|
||||
config {
|
||||
# image = "litestream/litestream:0.5.0-test.10"
|
||||
image = "litestream/litestream:0.3"
|
||||
args = [
|
||||
"replicate",
|
||||
"/alloc/tmp/meal_planner.db",
|
||||
"sftp://root:odroid@192.168.4.63/mnt/Shares/litestream/foodplanner.db"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@ starlette==0.37.2
|
||||
anyio==4.4.0
|
||||
uvicorn[standard]==0.24.0
|
||||
sqlalchemy>=2.0.24
|
||||
#psycopg2-binary==2.9.9
|
||||
psycopg2-binary==2.9.9
|
||||
python-multipart>=0.0.7
|
||||
jinja2==3.1.2
|
||||
openfoodfacts>=0.2.0
|
||||
|
||||
73
templates/admin/config.html
Normal file
73
templates/admin/config.html
Normal file
@@ -0,0 +1,73 @@
|
||||
{% extends "admin/index.html" %}
|
||||
|
||||
{% block admin_content %}
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h5 class="mb-0">System Configuration Status</h5>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<p class="text-muted">Current system environment and database connection details.</p>
|
||||
|
||||
<table class="table table-striped table-bordered">
|
||||
<thead class="table-light">
|
||||
<tr>
|
||||
<th style="width: 30%">Setting</th>
|
||||
<th>Value</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td><strong>Database Type</strong></td>
|
||||
<td>
|
||||
{% if 'sqlite' in config.database_url %}
|
||||
<span class="badge bg-secondary">SQLite</span>
|
||||
{% elif 'postgresql' in config.database_url %}
|
||||
<span class="badge bg-primary">PostgreSQL</span>
|
||||
{% else %}
|
||||
<span class="badge bg-warning text-dark">{{ config.database_type }}</span>
|
||||
{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>Connection URL</strong></td>
|
||||
<td><code>{{ config.database_url_masked }}</code></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>Database Host/Path</strong></td>
|
||||
<td>{{ config.database_host }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><strong>Environment</strong></td>
|
||||
<td>
|
||||
{% if config.debug %}
|
||||
<span class="badge bg-warning text-dark">Debug Mode</span>
|
||||
{% else %}
|
||||
<span class="badge bg-success">Production</span>
|
||||
{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<div class="alert alert-info mt-3">
|
||||
<i class="bi bi-info-circle"></i>
|
||||
{% if 'sqlite' in config.database_url %}
|
||||
Running in portable SQLite mode. To switch to PostgreSQL, please refer to the migration guide.
|
||||
{% else %}
|
||||
Running in PostgreSQL mode. Database is hosted at <strong>{{ config.database_host }}</strong>.
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', function () {
|
||||
// Activate the correct tab
|
||||
const tabLink = document.getElementById('config-status-tab');
|
||||
if (tabLink) {
|
||||
tabLink.classList.add('active');
|
||||
tabLink.setAttribute('aria-selected', 'true');
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
179
templates/admin/fitbit.html
Normal file
179
templates/admin/fitbit.html
Normal file
@@ -0,0 +1,179 @@
|
||||
{% extends "admin/index.html" %}
|
||||
|
||||
{% block admin_content %}
|
||||
<div class="card mb-4">
|
||||
<div class="card-header d-flex justify-content-between align-items-center">
|
||||
<h5 class="mb-0">Fitbit Connection</h5>
|
||||
{% if is_connected %}
|
||||
<span class="badge bg-success">Connected</span>
|
||||
{% else %}
|
||||
<span class="badge bg-secondary">Disconnected</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="card-body">
|
||||
|
||||
<!-- Configuration Form -->
|
||||
<form action="/admin/fitbit/config" method="post" class="mb-4">
|
||||
<div class="row">
|
||||
<div class="col-md-4 mb-3">
|
||||
<label class="form-label">Client ID</label>
|
||||
<input type="text" class="form-control" name="client_id" value="{{ config.client_id or '' }}"
|
||||
required>
|
||||
</div>
|
||||
<div class="col-md-4 mb-3">
|
||||
<label class="form-label">Client Secret</label>
|
||||
<input type="text" class="form-control" name="client_secret"
|
||||
value="{{ config.client_secret or '' }}" required>
|
||||
</div>
|
||||
<div class="col-md-4 mb-3">
|
||||
<label class="form-label">Redirect URI</label>
|
||||
<input type="text" class="form-control" name="redirect_uri"
|
||||
value="{{ config.redirect_uri or 'http://localhost:8080/fitbit-callback' }}" required>
|
||||
</div>
|
||||
</div>
|
||||
<button type="submit" class="btn btn-outline-primary btn-sm">Update Configuration</button>
|
||||
</form>
|
||||
|
||||
<hr>
|
||||
|
||||
{% if not is_connected %}
|
||||
<div class="alert alert-info">
|
||||
<strong>Connect to Fitbit:</strong>
|
||||
<ol>
|
||||
<li>Click "Get Authorization URL" below.</li>
|
||||
<li>Visit the URL in your browser and authorize the app.</li>
|
||||
<li>You will be redirected to a URL (likely failing to load). Copy the entire URL.</li>
|
||||
<li>Paste it in the box below and click "Complete Connection".</li>
|
||||
</ol>
|
||||
</div>
|
||||
|
||||
<div class="mb-3">
|
||||
<button id="get-auth-url-btn" class="btn btn-primary">Get Authorization URL</button>
|
||||
<div id="auth-url-container" class="mt-2" style="display:none;">
|
||||
<textarea class="form-control" rows="2" readonly id="auth-url-display"></textarea>
|
||||
<a href="#" target="_blank" id="auth-link" class="btn btn-sm btn-link">Open Link</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<form action="/admin/fitbit/auth/exchange" method="post">
|
||||
<div class="input-group">
|
||||
<input type="text" class="form-control" name="code_input"
|
||||
placeholder="Paste full redirected URL or code here..." required>
|
||||
<button class="btn btn-success" type="submit">Complete Connection</button>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
{% else %}
|
||||
|
||||
<div class="d-flex align-items-center gap-3">
|
||||
<button class="btn btn-primary sync-btn" data-scope="30d">
|
||||
<i class="bi bi-arrow-repeat"></i> Sync Last 30 Days
|
||||
</button>
|
||||
<button class="btn btn-secondary sync-btn" data-scope="all">
|
||||
<i class="bi bi-clock-history"></i> Sync All History
|
||||
</button>
|
||||
<span id="sync-status" class="text-muted"></span>
|
||||
</div>
|
||||
|
||||
{% endif %}
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
<h5 class="mb-0">Recent Weight Logs</h5>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<table class="table table-sm table-striped">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Date</th>
|
||||
<th>Weight (kg)</th>
|
||||
<th>Source</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for log in logs %}
|
||||
<tr>
|
||||
<td>{{ log.date }}</td>
|
||||
<td>{{ log.weight }}</td>
|
||||
<td>{{ log.source }}</td>
|
||||
</tr>
|
||||
{% else %}
|
||||
<tr>
|
||||
<td colspan="3" class="text-center text-muted">No logs found. Sync to import data.</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', function () {
|
||||
// Auth URL handler
|
||||
const authBtn = document.getElementById('get-auth-url-btn');
|
||||
if (authBtn) {
|
||||
authBtn.addEventListener('click', async () => {
|
||||
try {
|
||||
const response = await fetch('/admin/fitbit/auth_url');
|
||||
const data = await response.json();
|
||||
if (data.status === 'success') {
|
||||
const container = document.getElementById('auth-url-container');
|
||||
const display = document.getElementById('auth-url-display');
|
||||
const link = document.getElementById('auth-link');
|
||||
|
||||
display.value = data.url;
|
||||
link.href = data.url;
|
||||
container.style.display = 'block';
|
||||
} else {
|
||||
alert('Error: ' + data.message);
|
||||
}
|
||||
} catch (e) {
|
||||
alert('Request failed: ' + e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Sync handler
|
||||
const syncBtns = document.querySelectorAll('.sync-btn');
|
||||
syncBtns.forEach(btn => {
|
||||
btn.addEventListener('click', async () => {
|
||||
const scope = btn.dataset.scope;
|
||||
const statusFn = document.getElementById('sync-status');
|
||||
|
||||
// Disable all sync buttons
|
||||
syncBtns.forEach(b => b.disabled = true);
|
||||
|
||||
statusFn.textContent = scope === 'all' ? 'Syncing history (this may take a while)...' : 'Syncing...';
|
||||
statusFn.className = 'text-muted';
|
||||
|
||||
try {
|
||||
const formData = new FormData();
|
||||
formData.append('scope', scope);
|
||||
|
||||
const response = await fetch('/admin/fitbit/sync', {
|
||||
method: 'POST',
|
||||
body: formData
|
||||
});
|
||||
const data = await response.json();
|
||||
if (data.status === 'success' || data.status === 'warning') {
|
||||
statusFn.textContent = data.message;
|
||||
statusFn.className = data.status === 'warning' ? 'text-warning' : 'text-success';
|
||||
setTimeout(() => location.reload(), 2000); // Reload to show data
|
||||
} else {
|
||||
statusFn.textContent = 'Error: ' + data.message;
|
||||
statusFn.className = 'text-danger';
|
||||
}
|
||||
} catch (e) {
|
||||
statusFn.textContent = 'Failed: ' + e;
|
||||
statusFn.className = 'text-danger';
|
||||
} finally {
|
||||
syncBtns.forEach(b => b.disabled = false);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
@@ -13,6 +13,12 @@
|
||||
<li class="nav-item" role="presentation">
|
||||
<a class="nav-link" id="llm-config-tab" href="/admin/llm_config">LLM Config</a>
|
||||
</li>
|
||||
<li class="nav-item" role="presentation">
|
||||
<a class="nav-link" id="fitbit-tab" href="/admin/fitbit">Fitbit</a>
|
||||
</li>
|
||||
<li class="nav-item" role="presentation">
|
||||
<a class="nav-link" id="config-status-tab" href="/admin/config-status">Config Status</a>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
<div class="tab-content mt-3">
|
||||
|
||||
@@ -100,30 +100,82 @@
|
||||
resizeChart();
|
||||
|
||||
chart = new Chart(ctx, {
|
||||
type: 'bar', // Switch to bar chart
|
||||
type: 'bar',
|
||||
data: {
|
||||
labels: labels,
|
||||
datasets: [
|
||||
{
|
||||
type: 'line',
|
||||
label: 'Weight (lbs)',
|
||||
data: data.map(item => item.weight_lbs),
|
||||
borderColor: '#0d6efd', // Bootstrap primary (Blue)
|
||||
backgroundColor: '#0d6efd',
|
||||
borderWidth: 2,
|
||||
pointRadius: function (context) {
|
||||
const index = context.dataIndex;
|
||||
const item = data[index]; // Access data array from outer scope
|
||||
|
||||
// Show dot if it's a real weight measurement
|
||||
if (item.weight_is_real) return 4;
|
||||
|
||||
// "Or the first point if no datapoints in the view"
|
||||
// Check if ANY point in the view is real
|
||||
const anyReal = data.some(d => d.weight_is_real);
|
||||
if (!anyReal) {
|
||||
// Make sure we only show ONE dot (the first one / oldest date)
|
||||
// Data is sorted by date ascending in frontend (index 0 is oldest)
|
||||
if (index === 0 && item.weight_lbs !== null) return 4;
|
||||
}
|
||||
|
||||
return 0; // Hide dot for inferred points
|
||||
},
|
||||
yAxisID: 'y1',
|
||||
datalabels: {
|
||||
display: true,
|
||||
align: 'top',
|
||||
formatter: function (value, context) {
|
||||
// Only show label if radius > 0
|
||||
const index = context.dataIndex;
|
||||
const item = data[index];
|
||||
|
||||
// Same logic as pointRadius
|
||||
let show = false;
|
||||
if (item.weight_is_real) show = true;
|
||||
else {
|
||||
const anyReal = data.some(d => d.weight_is_real);
|
||||
if (!anyReal && index === 0 && item.weight_lbs !== null) show = true;
|
||||
}
|
||||
|
||||
return show ? (value ? value + ' lbs' : '') : '';
|
||||
},
|
||||
color: '#0d6efd',
|
||||
font: { weight: 'bold' }
|
||||
},
|
||||
spanGaps: true
|
||||
},
|
||||
{
|
||||
label: 'Net Carbs',
|
||||
data: netCarbsCals,
|
||||
backgroundColor: 'rgba(255, 193, 7, 0.8)', // Bootstrap warning (Yellow)
|
||||
borderColor: '#ffc107',
|
||||
borderWidth: 1
|
||||
borderWidth: 1,
|
||||
yAxisID: 'y'
|
||||
},
|
||||
{
|
||||
label: 'Fat',
|
||||
data: fatCals,
|
||||
backgroundColor: 'rgba(220, 53, 69, 0.8)', // Bootstrap danger (Red)
|
||||
borderColor: '#dc3545',
|
||||
borderWidth: 1
|
||||
borderWidth: 1,
|
||||
yAxisID: 'y'
|
||||
},
|
||||
{
|
||||
label: 'Protein',
|
||||
data: proteinCals,
|
||||
backgroundColor: 'rgba(25, 135, 84, 0.8)', // Bootstrap success (Green)
|
||||
borderColor: '#198754',
|
||||
borderWidth: 1
|
||||
borderWidth: 1,
|
||||
yAxisID: 'y'
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -133,14 +185,26 @@
|
||||
scales: {
|
||||
y: {
|
||||
beginAtZero: true,
|
||||
stacked: true, // Enable stacking for Y axis
|
||||
stacked: true,
|
||||
title: {
|
||||
display: true,
|
||||
text: 'Calories'
|
||||
}
|
||||
},
|
||||
y1: {
|
||||
type: 'linear',
|
||||
display: true,
|
||||
position: 'right',
|
||||
title: {
|
||||
display: true,
|
||||
text: 'Weight (lbs)'
|
||||
},
|
||||
grid: {
|
||||
drawOnChartArea: false // only want the grid lines for one axis to show up
|
||||
}
|
||||
},
|
||||
x: {
|
||||
stacked: true, // Enable stacking for X axis
|
||||
stacked: true,
|
||||
title: {
|
||||
display: true,
|
||||
text: 'Date'
|
||||
@@ -156,8 +220,11 @@
|
||||
label += ': ';
|
||||
}
|
||||
if (context.parsed.y !== null) {
|
||||
if (context.dataset.type === 'line') {
|
||||
return label + context.parsed.y + ' lbs';
|
||||
}
|
||||
const dayData = data[context.dataIndex];
|
||||
const macroKey = MACRO_KEYS[context.datasetIndex];
|
||||
const macroKey = MACRO_KEYS[context.datasetIndex - 1]; // Offset by 1 due to weight dataset
|
||||
const grams = dayData[macroKey];
|
||||
label += Math.round(context.parsed.y) + ' cals (' + Math.round(grams) + 'g)';
|
||||
}
|
||||
@@ -172,6 +239,8 @@
|
||||
size: 11
|
||||
},
|
||||
display: function (context) {
|
||||
if (context.dataset.type === 'line') return false; // Handled separately
|
||||
|
||||
const dayData = data[context.dataIndex];
|
||||
const pC = dayData.protein * 4;
|
||||
const fC = dayData.fat * 9;
|
||||
@@ -182,6 +251,8 @@
|
||||
return calcTotal > 0 && (value / calcTotal) > 0.05;
|
||||
},
|
||||
formatter: function (value, context) {
|
||||
if (context.dataset.type === 'line') return '';
|
||||
|
||||
const dayData = data[context.dataIndex];
|
||||
const pC = dayData.protein * 4;
|
||||
const fC = dayData.fat * 9;
|
||||
@@ -190,7 +261,7 @@
|
||||
|
||||
const totalCals = calcTotal || 1;
|
||||
const percent = Math.round((value / totalCals) * 100);
|
||||
const macroKey = MACRO_KEYS[context.datasetIndex];
|
||||
const macroKey = MACRO_KEYS[context.datasetIndex - 1]; // Offset by 1
|
||||
const grams = Math.round(dayData[macroKey]);
|
||||
|
||||
return grams + 'g\n' + percent + '%';
|
||||
|
||||
Reference in New Issue
Block a user