mirror of
https://github.com/sstent/foodplanner.git
synced 2026-02-13 19:05:44 +00:00
added alembic database migrations, json import/export
This commit is contained in:
116
alembic.ini
Normal file
116
alembic.ini
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts
|
||||||
|
script_location = alembic
|
||||||
|
|
||||||
|
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||||
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
|
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||||
|
# for all available tokens
|
||||||
|
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
|
# defaults to the current working directory.
|
||||||
|
prepend_sys_path = .
|
||||||
|
|
||||||
|
# timezone to use when rendering the date within the migration file
|
||||||
|
# as well as the filename.
|
||||||
|
# If specified, requires the python>=3.9 or backports.zoneinfo library.
|
||||||
|
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||||
|
# string value is passed to ZoneInfo()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the
|
||||||
|
# "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; This defaults
|
||||||
|
# to alembic/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path.
|
||||||
|
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||||
|
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||||
|
|
||||||
|
# version path separator; As mentioned above, this is the character used to split
|
||||||
|
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||||
|
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||||
|
# Valid values for version_path_separator are:
|
||||||
|
#
|
||||||
|
# version_path_separator = :
|
||||||
|
# version_path_separator = ;
|
||||||
|
# version_path_separator = space
|
||||||
|
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||||
|
|
||||||
|
# set to 'true' to search source files recursively
|
||||||
|
# in each "version_locations" directory
|
||||||
|
# new in Alembic version 1.10
|
||||||
|
# recursive_version_locations = false
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
sqlalchemy.url = sqlite:///data/meal_planner.db
|
||||||
|
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks = black
|
||||||
|
# black.type = console_scripts
|
||||||
|
# black.entrypoint = black
|
||||||
|
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||||
|
# hooks = ruff
|
||||||
|
# ruff.type = exec
|
||||||
|
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||||
|
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARN
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
||||||
1
alembic/README
Normal file
1
alembic/README
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Generic single-database configuration.
|
||||||
77
alembic/env.py
Normal file
77
alembic/env.py
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from sqlalchemy import engine_from_config
|
||||||
|
from sqlalchemy import pool
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
from main import Base
|
||||||
|
target_metadata = Base.metadata
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline() -> None:
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online() -> None:
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section, {}),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(
|
||||||
|
connection=connection, target_metadata=target_metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
||||||
26
alembic/script.py.mako
Normal file
26
alembic/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = ${repr(up_revision)}
|
||||||
|
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||||
|
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
${downgrades if downgrades else "pass"}
|
||||||
30
alembic/versions/be9f940574bb_initial_migration.py
Normal file
30
alembic/versions/be9f940574bb_initial_migration.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
"""Initial migration
|
||||||
|
|
||||||
|
Revision ID: be9f940574bb
|
||||||
|
Revises:
|
||||||
|
Create Date: 2025-09-28 06:56:22.385692
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = 'be9f940574bb'
|
||||||
|
down_revision: Union[str, None] = None
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
pass
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
pass
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -57,6 +57,7 @@ job "foodplanner" {
|
|||||||
# Adjust the source path as needed for your environment
|
# Adjust the source path as needed for your environment
|
||||||
volumes = [
|
volumes = [
|
||||||
"/alloc/tmp/:/data/"
|
"/alloc/tmp/:/data/"
|
||||||
|
"/mnt/Public/configs/FoodPlanner_backups:/backups",
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
350
main.py
350
main.py
@@ -17,6 +17,8 @@ import csv
|
|||||||
import requests
|
import requests
|
||||||
from fastapi import File, UploadFile
|
from fastapi import File, UploadFile
|
||||||
import logging
|
import logging
|
||||||
|
from alembic.config import Config
|
||||||
|
from alembic import command
|
||||||
|
|
||||||
# Configure logging
|
# Configure logging
|
||||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
||||||
@@ -206,6 +208,80 @@ class TrackedMealCreate(BaseModel):
|
|||||||
meal_time: str
|
meal_time: str
|
||||||
quantity: float = 1.0
|
quantity: float = 1.0
|
||||||
|
|
||||||
|
class FoodExport(FoodResponse):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class MealFoodExport(BaseModel):
|
||||||
|
food_id: int
|
||||||
|
quantity: float
|
||||||
|
|
||||||
|
class MealExport(BaseModel):
|
||||||
|
id: int
|
||||||
|
name: str
|
||||||
|
meal_type: str
|
||||||
|
meal_time: str
|
||||||
|
meal_foods: List[MealFoodExport]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
class PlanExport(BaseModel):
|
||||||
|
id: int
|
||||||
|
person: str
|
||||||
|
date: date
|
||||||
|
meal_id: int
|
||||||
|
meal_time: str
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
class TemplateMealExport(BaseModel):
|
||||||
|
meal_id: int
|
||||||
|
meal_time: str
|
||||||
|
|
||||||
|
class TemplateExport(BaseModel):
|
||||||
|
id: int
|
||||||
|
name: str
|
||||||
|
template_meals: List[TemplateMealExport]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
class WeeklyMenuDayExport(BaseModel):
|
||||||
|
day_of_week: int
|
||||||
|
template_id: int
|
||||||
|
|
||||||
|
class WeeklyMenuExport(BaseModel):
|
||||||
|
id: int
|
||||||
|
name: str
|
||||||
|
weekly_menu_days: List[WeeklyMenuDayExport]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
class TrackedMealExport(BaseModel):
|
||||||
|
meal_id: int
|
||||||
|
meal_time: str
|
||||||
|
quantity: float
|
||||||
|
|
||||||
|
class TrackedDayExport(BaseModel):
|
||||||
|
id: int
|
||||||
|
person: str
|
||||||
|
date: date
|
||||||
|
is_modified: bool
|
||||||
|
tracked_meals: List[TrackedMealExport]
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True
|
||||||
|
|
||||||
|
class AllData(BaseModel):
|
||||||
|
foods: List[FoodExport]
|
||||||
|
meals: List[MealExport]
|
||||||
|
plans: List[PlanExport]
|
||||||
|
templates: List[TemplateExport]
|
||||||
|
weekly_menus: List[WeeklyMenuExport]
|
||||||
|
tracked_days: List[TrackedDayExport]
|
||||||
|
|
||||||
# Database dependency
|
# Database dependency
|
||||||
def get_db():
|
def get_db():
|
||||||
db = SessionLocal()
|
db = SessionLocal()
|
||||||
@@ -214,15 +290,16 @@ def get_db():
|
|||||||
finally:
|
finally:
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
try:
|
@app.on_event("startup")
|
||||||
# Create tables
|
def run_migrations():
|
||||||
Base.metadata.create_all(bind=engine)
|
logging.info("Running database migrations...")
|
||||||
logging.info("Database tables checked/created successfully.")
|
try:
|
||||||
except Exception as e:
|
alembic_cfg = Config("alembic.ini")
|
||||||
logging.error(f"Failed to connect to or create database at {DATABASE_URL}: {e}")
|
command.upgrade(alembic_cfg, "head")
|
||||||
# Depending on desired behavior, you might want to exit the application or handle it gracefully
|
logging.info("Database migrations completed successfully.")
|
||||||
# For now, we'll re-raise to ensure the application doesn't start with a broken DB connection
|
except Exception as e:
|
||||||
raise
|
logging.error(f"Failed to run database migrations: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
# Utility functions
|
# Utility functions
|
||||||
def calculate_meal_nutrition(meal, db: Session):
|
def calculate_meal_nutrition(meal, db: Session):
|
||||||
@@ -299,6 +376,261 @@ async def root(request: Request):
|
|||||||
async def imports_page(request: Request):
|
async def imports_page(request: Request):
|
||||||
return templates.TemplateResponse("imports.html", {"request": request})
|
return templates.TemplateResponse("imports.html", {"request": request})
|
||||||
|
|
||||||
|
@app.get("/export/all", response_model=AllData)
|
||||||
|
async def export_all_data(db: Session = Depends(get_db)):
|
||||||
|
"""Export all data from the database as a single JSON file."""
|
||||||
|
foods = db.query(Food).all()
|
||||||
|
meals = db.query(Meal).all()
|
||||||
|
plans = db.query(Plan).all()
|
||||||
|
templates = db.query(Template).all()
|
||||||
|
weekly_menus = db.query(WeeklyMenu).all()
|
||||||
|
tracked_days = db.query(TrackedDay).all()
|
||||||
|
|
||||||
|
# Manual serialization to handle nested relationships
|
||||||
|
|
||||||
|
# Meals with MealFoods
|
||||||
|
meals_export = []
|
||||||
|
for meal in meals:
|
||||||
|
meal_foods_export = [
|
||||||
|
MealFoodExport(food_id=mf.food_id, quantity=mf.quantity)
|
||||||
|
for mf in meal.meal_foods
|
||||||
|
]
|
||||||
|
meals_export.append(
|
||||||
|
MealExport(
|
||||||
|
id=meal.id,
|
||||||
|
name=meal.name,
|
||||||
|
meal_type=meal.meal_type,
|
||||||
|
meal_time=meal.meal_time,
|
||||||
|
meal_foods=meal_foods_export,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Templates with TemplateMeals
|
||||||
|
templates_export = []
|
||||||
|
for template in templates:
|
||||||
|
template_meals_export = [
|
||||||
|
TemplateMealExport(meal_id=tm.meal_id, meal_time=tm.meal_time)
|
||||||
|
for tm in template.template_meals
|
||||||
|
]
|
||||||
|
templates_export.append(
|
||||||
|
TemplateExport(
|
||||||
|
id=template.id,
|
||||||
|
name=template.name,
|
||||||
|
template_meals=template_meals_export,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Weekly Menus with WeeklyMenuDays
|
||||||
|
weekly_menus_export = []
|
||||||
|
for weekly_menu in weekly_menus:
|
||||||
|
weekly_menu_days_export = [
|
||||||
|
WeeklyMenuDayExport(
|
||||||
|
day_of_week=wmd.day_of_week, template_id=wmd.template_id
|
||||||
|
)
|
||||||
|
for wmd in weekly_menu.weekly_menu_days
|
||||||
|
]
|
||||||
|
weekly_menus_export.append(
|
||||||
|
WeeklyMenuExport(
|
||||||
|
id=weekly_menu.id,
|
||||||
|
name=weekly_menu.name,
|
||||||
|
weekly_menu_days=weekly_menu_days_export,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Tracked Days with TrackedMeals
|
||||||
|
tracked_days_export = []
|
||||||
|
for tracked_day in tracked_days:
|
||||||
|
tracked_meals_export = [
|
||||||
|
TrackedMealExport(
|
||||||
|
meal_id=tm.meal_id,
|
||||||
|
meal_time=tm.meal_time,
|
||||||
|
quantity=tm.quantity,
|
||||||
|
)
|
||||||
|
for tm in tracked_day.tracked_meals
|
||||||
|
]
|
||||||
|
tracked_days_export.append(
|
||||||
|
TrackedDayExport(
|
||||||
|
id=tracked_day.id,
|
||||||
|
person=tracked_day.person,
|
||||||
|
date=tracked_day.date,
|
||||||
|
is_modified=tracked_day.is_modified,
|
||||||
|
tracked_meals=tracked_meals_export,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return AllData(
|
||||||
|
foods=[FoodExport.from_orm(f) for f in foods],
|
||||||
|
meals=meals_export,
|
||||||
|
plans=[PlanExport.from_orm(p) for p in plans],
|
||||||
|
templates=templates_export,
|
||||||
|
weekly_menus=weekly_menus_export,
|
||||||
|
tracked_days=tracked_days_export,
|
||||||
|
)
|
||||||
|
|
||||||
|
@app.post("/import/all")
|
||||||
|
async def import_all_data(file: UploadFile = File(...), db: Session = Depends(get_db)):
|
||||||
|
"""Import all data from a JSON file, overwriting existing data."""
|
||||||
|
try:
|
||||||
|
contents = await file.read()
|
||||||
|
data = AllData.parse_raw(contents)
|
||||||
|
|
||||||
|
# Validate data before import
|
||||||
|
validate_import_data(data)
|
||||||
|
|
||||||
|
# 1. Delete existing data in the correct order
|
||||||
|
db.query(TrackedMeal).delete()
|
||||||
|
db.query(TrackedDay).delete()
|
||||||
|
db.query(WeeklyMenuDay).delete()
|
||||||
|
db.query(WeeklyMenu).delete()
|
||||||
|
db.query(Plan).delete()
|
||||||
|
db.query(TemplateMeal).delete()
|
||||||
|
db.query(Template).delete()
|
||||||
|
db.query(MealFood).delete()
|
||||||
|
db.query(Meal).delete()
|
||||||
|
db.query(Food).delete()
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
# 2. Insert new data in the correct order
|
||||||
|
# Foods
|
||||||
|
for food_data in data.foods:
|
||||||
|
db.add(Food(**food_data.dict()))
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
# Meals
|
||||||
|
for meal_data in data.meals:
|
||||||
|
meal = Meal(
|
||||||
|
id=meal_data.id,
|
||||||
|
name=meal_data.name,
|
||||||
|
meal_type=meal_data.meal_type,
|
||||||
|
meal_time=meal_data.meal_time,
|
||||||
|
)
|
||||||
|
db.add(meal)
|
||||||
|
db.flush()
|
||||||
|
for mf_data in meal_data.meal_foods:
|
||||||
|
db.add(
|
||||||
|
MealFood(
|
||||||
|
meal_id=meal.id,
|
||||||
|
food_id=mf_data.food_id,
|
||||||
|
quantity=mf_data.quantity,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
# Templates
|
||||||
|
for template_data in data.templates:
|
||||||
|
template = Template(id=template_data.id, name=template_data.name)
|
||||||
|
db.add(template)
|
||||||
|
db.flush()
|
||||||
|
for tm_data in template_data.template_meals:
|
||||||
|
db.add(
|
||||||
|
TemplateMeal(
|
||||||
|
template_id=template.id,
|
||||||
|
meal_id=tm_data.meal_id,
|
||||||
|
meal_time=tm_data.meal_time,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
# Plans
|
||||||
|
for plan_data in data.plans:
|
||||||
|
db.add(Plan(**plan_data.dict()))
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
# Weekly Menus
|
||||||
|
for weekly_menu_data in data.weekly_menus:
|
||||||
|
weekly_menu = WeeklyMenu(
|
||||||
|
id=weekly_menu_data.id, name=weekly_menu_data.name
|
||||||
|
)
|
||||||
|
db.add(weekly_menu)
|
||||||
|
db.flush()
|
||||||
|
for wmd_data in weekly_menu_data.weekly_menu_days:
|
||||||
|
db.add(
|
||||||
|
WeeklyMenuDay(
|
||||||
|
weekly_menu_id=weekly_menu.id,
|
||||||
|
day_of_week=wmd_data.day_of_week,
|
||||||
|
template_id=wmd_data.template_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
# Tracked Days
|
||||||
|
for tracked_day_data in data.tracked_days:
|
||||||
|
tracked_day = TrackedDay(
|
||||||
|
id=tracked_day_data.id,
|
||||||
|
person=tracked_day_data.person,
|
||||||
|
date=tracked_day_data.date,
|
||||||
|
is_modified=tracked_day_data.is_modified,
|
||||||
|
)
|
||||||
|
db.add(tracked_day)
|
||||||
|
db.flush()
|
||||||
|
for tm_data in tracked_day_data.tracked_meals:
|
||||||
|
db.add(
|
||||||
|
TrackedMeal(
|
||||||
|
tracked_day_id=tracked_day.id,
|
||||||
|
meal_id=tm_data.meal_id,
|
||||||
|
meal_time=tm_data.meal_time,
|
||||||
|
quantity=tm_data.quantity,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
return {"status": "success", "message": "All data imported successfully."}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
db.rollback()
|
||||||
|
logging.error(f"Failed to import data: {e}")
|
||||||
|
raise HTTPException(status_code=400, detail=f"Failed to import data: {e}")
|
||||||
|
|
||||||
|
def validate_import_data(data: AllData):
|
||||||
|
"""Validate the integrity of the imported data."""
|
||||||
|
food_ids = {f.id for f in data.foods}
|
||||||
|
meal_ids = {m.id for m in data.meals}
|
||||||
|
template_ids = {t.id for t in data.templates}
|
||||||
|
|
||||||
|
# Validate Meals
|
||||||
|
for meal in data.meals:
|
||||||
|
for meal_food in meal.meal_foods:
|
||||||
|
if meal_food.food_id not in food_ids:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Invalid meal food: food_id {meal_food.food_id} not found.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Validate Plans
|
||||||
|
for plan in data.plans:
|
||||||
|
if plan.meal_id not in meal_ids:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Invalid plan: meal_id {plan.meal_id} not found.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Validate Templates
|
||||||
|
for template in data.templates:
|
||||||
|
for template_meal in template.template_meals:
|
||||||
|
if template_meal.meal_id not in meal_ids:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Invalid template meal: meal_id {template_meal.meal_id} not found.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Validate Weekly Menus
|
||||||
|
for weekly_menu in data.weekly_menus:
|
||||||
|
for day in weekly_menu.weekly_menu_days:
|
||||||
|
if day.template_id not in template_ids:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Invalid weekly menu day: template_id {day.template_id} not found.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Validate Tracked Days
|
||||||
|
for tracked_day in data.tracked_days:
|
||||||
|
for tracked_meal in tracked_day.tracked_meals:
|
||||||
|
if tracked_meal.meal_id not in meal_ids:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Invalid tracked meal: meal_id {tracked_meal.meal_id} not found.",
|
||||||
|
)
|
||||||
|
|
||||||
# Foods tab
|
# Foods tab
|
||||||
@app.get("/foods", response_class=HTMLResponse)
|
@app.get("/foods", response_class=HTMLResponse)
|
||||||
async def foods_page(request: Request, db: Session = Depends(get_db)):
|
async def foods_page(request: Request, db: Session = Depends(get_db)):
|
||||||
|
|||||||
@@ -4,4 +4,5 @@ sqlalchemy>=2.0.24
|
|||||||
#psycopg2-binary==2.9.9
|
#psycopg2-binary==2.9.9
|
||||||
python-multipart==0.0.6
|
python-multipart==0.0.6
|
||||||
jinja2==3.1.2
|
jinja2==3.1.2
|
||||||
openfoodfacts>=0.2.0
|
openfoodfacts>=0.2.0alembic>=1.13.1
|
||||||
|
mako>=1.3.2
|
||||||
|
|||||||
@@ -51,6 +51,22 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div class="row mt-4">
|
||||||
|
<div class="col-md-6">
|
||||||
|
<h3>Global Data Management</h3>
|
||||||
|
<div class="mb-3">
|
||||||
|
<button type="button" class="btn btn-info" onclick="exportAllData()">Export All Data</button>
|
||||||
|
</div>
|
||||||
|
<form action="/import/all" method="post" enctype="multipart/form-data" id="importAllForm">
|
||||||
|
<div class="mb-3">
|
||||||
|
<label class="form-label">Import All Data (JSON)</label>
|
||||||
|
<input type="file" class="form-control" name="file" accept=".json" required>
|
||||||
|
</div>
|
||||||
|
<button type="submit" class="btn btn-warning">Import All Data</button>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class="mt-4" id="upload-results" style="display: none;">
|
<div class="mt-4" id="upload-results" style="display: none;">
|
||||||
<div class="alert alert-success">
|
<div class="alert alert-success">
|
||||||
<strong>Upload Results:</strong>
|
<strong>Upload Results:</strong>
|
||||||
@@ -208,5 +224,47 @@ document.getElementById('offSearch').addEventListener('keypress', function(e) {
|
|||||||
searchOpenFoodFacts();
|
searchOpenFoodFacts();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
function exportAllData() {
|
||||||
|
window.location.href = '/export/all';
|
||||||
|
}
|
||||||
|
|
||||||
|
document.getElementById('importAllForm').addEventListener('submit', async function(e) {
|
||||||
|
e.preventDefault();
|
||||||
|
const form = e.target;
|
||||||
|
const formData = new FormData(form);
|
||||||
|
const fileInput = form.querySelector('input[type="file"]');
|
||||||
|
const file = fileInput.files[0];
|
||||||
|
|
||||||
|
if (file) {
|
||||||
|
if (confirm('Are you sure you want to import all data? This will overwrite existing data.')) {
|
||||||
|
const submitBtn = form.querySelector('button[type="submit"]');
|
||||||
|
submitBtn.disabled = true;
|
||||||
|
submitBtn.innerHTML = '<span class="spinner-border spinner-border-sm" role="status"></span> Importing...';
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch('/import/all', {
|
||||||
|
method: 'POST',
|
||||||
|
body: formData
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = await response.json();
|
||||||
|
throw new Error(errorData.detail || `HTTP error! status: ${response.status}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
alert('Import successful! The page will now reload.');
|
||||||
|
window.location.reload();
|
||||||
|
} catch (error) {
|
||||||
|
alert('Import failed: ' + error.message);
|
||||||
|
} finally {
|
||||||
|
submitBtn.disabled = false;
|
||||||
|
submitBtn.innerHTML = 'Import All Data';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
alert('Please select a JSON file to import.');
|
||||||
|
}
|
||||||
|
});
|
||||||
</script>
|
</script>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
Reference in New Issue
Block a user