mirror of
https://github.com/sstent/foodplanner.git
synced 2026-01-25 11:11:36 +00:00
reverted to manual db table creation, nned to fix alembic
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
import logging
|
||||
import os
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
@@ -16,8 +17,9 @@ if config.config_file_name is not None:
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
from main import Base
|
||||
target_metadata = Base.metadata
|
||||
# We create an empty metadata object since we're not using autogenerate
|
||||
# and we have explicit migration files
|
||||
target_metadata = None
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
@@ -37,10 +39,11 @@ def run_migrations_offline() -> None:
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
# Use environment variable if available, otherwise fall back to config
|
||||
url = os.getenv('DATABASE_URL', config.get_main_option("sqlalchemy.url"))
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
target_metadata=None,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
@@ -56,22 +59,33 @@ def run_migrations_online() -> None:
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
logging.info("DEBUG: Creating database engine for alembic")
|
||||
# Use environment variable for database URL if available
|
||||
db_url = os.getenv('DATABASE_URL', config.get_main_option("sqlalchemy.url"))
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
{'sqlalchemy.url': db_url},
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
logging.info("DEBUG: Database engine created successfully")
|
||||
|
||||
with connectable.connect() as connection:
|
||||
logging.info("DEBUG: Database connection established for alembic")
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
connection=connection, target_metadata=None
|
||||
)
|
||||
logging.info("DEBUG: Alembic context configured")
|
||||
|
||||
with context.begin_transaction():
|
||||
logging.info("DEBUG: Starting alembic transaction")
|
||||
context.run_migrations()
|
||||
logging.info("DEBUG: Alembic migrations completed within transaction")
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
logging.info("DEBUG: Running migrations in offline mode")
|
||||
run_migrations_offline()
|
||||
else:
|
||||
logging.info("DEBUG: Running migrations in online mode")
|
||||
run_migrations_online()
|
||||
logging.info("DEBUG: Online migrations completed")
|
||||
|
||||
@@ -20,11 +20,151 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
op.create_table('foods',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=True),
|
||||
sa.Column('serving_size', sa.String(), nullable=True),
|
||||
sa.Column('serving_unit', sa.String(), nullable=True),
|
||||
sa.Column('calories', sa.Float(), nullable=True),
|
||||
sa.Column('protein', sa.Float(), nullable=True),
|
||||
sa.Column('carbs', sa.Float(), nullable=True),
|
||||
sa.Column('fat', sa.Float(), nullable=True),
|
||||
sa.Column('fiber', sa.Float(), nullable=True),
|
||||
sa.Column('sugar', sa.Float(), nullable=True),
|
||||
sa.Column('sodium', sa.Float(), nullable=True),
|
||||
sa.Column('calcium', sa.Float(), nullable=True),
|
||||
sa.Column('source', sa.String(), nullable=True),
|
||||
sa.Column('brand', sa.String(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_foods_id'), 'foods', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_foods_name'), 'foods', ['name'], unique=True)
|
||||
|
||||
op.create_table('meals',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=True),
|
||||
sa.Column('meal_type', sa.String(), nullable=True),
|
||||
sa.Column('meal_time', sa.String(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_meals_id'), 'meals', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_meals_name'), 'meals', ['name'], unique=False)
|
||||
|
||||
op.create_table('meal_foods',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('meal_id', sa.Integer(), nullable=True),
|
||||
sa.Column('food_id', sa.Integer(), nullable=True),
|
||||
sa.Column('quantity', sa.Float(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['food_id'], ['foods.id'], ),
|
||||
sa.ForeignKeyConstraint(['meal_id'], ['meals.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_meal_foods_id'), 'meal_foods', ['id'], unique=False)
|
||||
|
||||
op.create_table('plans',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('person', sa.String(), nullable=True),
|
||||
sa.Column('date', sa.Date(), nullable=True),
|
||||
sa.Column('meal_id', sa.Integer(), nullable=True),
|
||||
sa.Column('meal_time', sa.String(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['meal_id'], ['meals.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_plans_id'), 'plans', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_plans_person'), 'plans', ['person'], unique=False)
|
||||
op.create_index(op.f('ix_plans_date'), 'plans', ['date'], unique=False)
|
||||
|
||||
op.create_table('templates',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_templates_id'), 'templates', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_templates_name'), 'templates', ['name'], unique=True)
|
||||
|
||||
op.create_table('template_meals',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('template_id', sa.Integer(), nullable=True),
|
||||
sa.Column('meal_id', sa.Integer(), nullable=True),
|
||||
sa.Column('meal_time', sa.String(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['meal_id'], ['meals.id'], ),
|
||||
sa.ForeignKeyConstraint(['template_id'], ['templates.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_template_meals_id'), 'template_meals', ['id'], unique=False)
|
||||
|
||||
op.create_table('weekly_menus',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_weekly_menus_id'), 'weekly_menus', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_weekly_menus_name'), 'weekly_menus', ['name'], unique=True)
|
||||
|
||||
op.create_table('weekly_menu_days',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('weekly_menu_id', sa.Integer(), nullable=True),
|
||||
sa.Column('day_of_week', sa.Integer(), nullable=True),
|
||||
sa.Column('template_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['template_id'], ['templates.id'], ),
|
||||
sa.ForeignKeyConstraint(['weekly_menu_id'], ['weekly_menus.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_weekly_menu_days_id'), 'weekly_menu_days', ['id'], unique=False)
|
||||
|
||||
op.create_table('tracked_days',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('person', sa.String(), nullable=True),
|
||||
sa.Column('date', sa.Date(), nullable=True),
|
||||
sa.Column('is_modified', sa.Boolean(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_tracked_days_id'), 'tracked_days', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_tracked_days_person'), 'tracked_days', ['person'], unique=False)
|
||||
op.create_index(op.f('ix_tracked_days_date'), 'tracked_days', ['date'], unique=False)
|
||||
|
||||
op.create_table('tracked_meals',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('tracked_day_id', sa.Integer(), nullable=True),
|
||||
sa.Column('meal_id', sa.Integer(), nullable=True),
|
||||
sa.Column('meal_time', sa.String(), nullable=True),
|
||||
sa.Column('quantity', sa.Float(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['meal_id'], ['meals.id'], ),
|
||||
sa.ForeignKeyConstraint(['tracked_day_id'], ['tracked_days.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_tracked_meals_id'), 'tracked_meals', ['id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
op.drop_index(op.f('ix_tracked_meals_id'), table_name='tracked_meals')
|
||||
op.drop_table('tracked_meals')
|
||||
op.drop_index(op.f('ix_tracked_days_date'), table_name='tracked_days')
|
||||
op.drop_index(op.f('ix_tracked_days_person'), table_name='tracked_days')
|
||||
op.drop_index(op.f('ix_tracked_days_id'), table_name='tracked_days')
|
||||
op.drop_table('tracked_days')
|
||||
op.drop_index(op.f('ix_weekly_menu_days_id'), table_name='weekly_menu_days')
|
||||
op.drop_table('weekly_menu_days')
|
||||
op.drop_index(op.f('ix_weekly_menus_name'), table_name='weekly_menus')
|
||||
op.drop_index(op.f('ix_weekly_menus_id'), table_name='weekly_menus')
|
||||
op.drop_table('weekly_menus')
|
||||
op.drop_index(op.f('ix_template_meals_id'), table_name='template_meals')
|
||||
op.drop_table('template_meals')
|
||||
op.drop_index(op.f('ix_templates_name'), table_name='templates')
|
||||
op.drop_index(op.f('ix_templates_id'), table_name='templates')
|
||||
op.drop_table('templates')
|
||||
op.drop_index(op.f('ix_plans_date'), table_name='plans')
|
||||
op.drop_index(op.f('ix_plans_person'), table_name='plans')
|
||||
op.drop_index(op.f('ix_plans_id'), table_name='plans')
|
||||
op.drop_table('plans')
|
||||
op.drop_index(op.f('ix_meal_foods_id'), table_name='meal_foods')
|
||||
op.drop_table('meal_foods')
|
||||
op.drop_index(op.f('ix_meals_name'), table_name='meals')
|
||||
op.drop_index(op.f('ix_meals_id'), table_name='meals')
|
||||
op.drop_table('meals')
|
||||
op.drop_index(op.f('ix_foods_name'), table_name='foods')
|
||||
op.drop_index(op.f('ix_foods_id'), table_name='foods')
|
||||
op.drop_table('foods')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
"""Create all tables
|
||||
|
||||
Revision ID: fd4f39482990
|
||||
Revises: cf94fca21104
|
||||
Create Date: 2025-09-28 12:15:02.412270
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'fd4f39482990'
|
||||
down_revision: Union[str, None] = 'cf94fca21104'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
72
main.py
72
main.py
@@ -343,17 +343,20 @@ def scheduled_backup():
|
||||
|
||||
@app.on_event("startup")
|
||||
def startup_event():
|
||||
logging.info("DEBUG: Startup event triggered")
|
||||
run_migrations()
|
||||
logging.info("DEBUG: Startup event completed")
|
||||
|
||||
# Schedule the backup job - temporarily disabled for debugging
|
||||
# scheduler = BackgroundScheduler()
|
||||
# scheduler.add_job(scheduled_backup, 'cron', hour=0)
|
||||
# scheduler.start()
|
||||
# logging.info("Scheduled backup job started.")
|
||||
logging.info("Startup completed - scheduler temporarily disabled")
|
||||
scheduler = BackgroundScheduler()
|
||||
scheduler.add_job(scheduled_backup, 'cron', hour=0)
|
||||
scheduler.start()
|
||||
logging.info("Scheduled backup job started.")
|
||||
# logging.info("Startup completed - scheduler temporarily disabled")
|
||||
|
||||
def test_sqlite_connection(db_path):
|
||||
"""Test if we can create and write to SQLite database file"""
|
||||
logging.info(f"DEBUG: Starting SQLite connection test for path: {db_path}")
|
||||
try:
|
||||
import sqlite3
|
||||
import stat
|
||||
@@ -361,33 +364,54 @@ def test_sqlite_connection(db_path):
|
||||
|
||||
# Log directory permissions
|
||||
db_dir = os.path.dirname(db_path)
|
||||
logging.info(f"DEBUG: Checking database directory: {db_dir}")
|
||||
if os.path.exists(db_dir):
|
||||
dir_stat = os.stat(db_dir)
|
||||
dir_perm = stat.filemode(dir_stat.st_mode)
|
||||
dir_uid = dir_stat.st_uid
|
||||
dir_gid = dir_stat.st_gid
|
||||
logging.info(f"Database directory permissions: {dir_perm}, UID:{dir_uid}, GID:{dir_gid}")
|
||||
logging.info(f"DEBUG: Database directory permissions: {dir_perm}, UID:{dir_uid}, GID:{dir_gid}")
|
||||
|
||||
# Test write access
|
||||
test_file = os.path.join(db_dir, "write_test.txt")
|
||||
logging.info(f"DEBUG: Testing write access with file: {test_file}")
|
||||
try:
|
||||
with open(test_file, "w") as f:
|
||||
f.write("test")
|
||||
os.remove(test_file)
|
||||
logging.info("Write test to directory succeeded")
|
||||
logging.info("DEBUG: Write test to directory succeeded")
|
||||
except Exception as e:
|
||||
logging.error(f"Write test to directory failed: {e}")
|
||||
logging.error(f"DEBUG: Write test to directory failed: {e}")
|
||||
return False
|
||||
else:
|
||||
logging.warning(f"Database directory does not exist: {db_dir}")
|
||||
logging.warning(f"DEBUG: Database directory does not exist: {db_dir}")
|
||||
return False
|
||||
|
||||
# Test SQLite operations
|
||||
logging.info("DEBUG: Attempting SQLite connection...")
|
||||
conn = sqlite3.connect(db_path)
|
||||
logging.info("DEBUG: SQLite connection established")
|
||||
|
||||
cursor = conn.cursor()
|
||||
logging.info("DEBUG: Creating test table...")
|
||||
cursor.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)")
|
||||
logging.info("DEBUG: Test table created")
|
||||
|
||||
logging.info("DEBUG: Inserting test data...")
|
||||
cursor.execute("INSERT INTO test VALUES (1)")
|
||||
logging.info("DEBUG: Test data inserted")
|
||||
|
||||
logging.info("DEBUG: Committing transaction...")
|
||||
conn.commit()
|
||||
logging.info("DEBUG: Transaction committed")
|
||||
|
||||
logging.info("DEBUG: Dropping test table...")
|
||||
cursor.execute("DROP TABLE test")
|
||||
logging.info("DEBUG: Test table dropped")
|
||||
|
||||
logging.info("DEBUG: Closing connection...")
|
||||
conn.close()
|
||||
logging.info("DEBUG: Connection closed")
|
||||
|
||||
# Log file permissions
|
||||
if os.path.exists(db_path):
|
||||
@@ -395,34 +419,46 @@ def test_sqlite_connection(db_path):
|
||||
file_perm = stat.filemode(file_stat.st_mode)
|
||||
file_uid = file_stat.st_uid
|
||||
file_gid = file_stat.st_gid
|
||||
logging.info(f"Database file permissions: {file_perm}, UID:{file_uid}, GID:{file_gid}")
|
||||
logging.info(f"DEBUG: Database file permissions: {file_perm}, UID:{file_uid}, GID:{file_gid}")
|
||||
|
||||
logging.info("DEBUG: SQLite connection test completed successfully")
|
||||
return True
|
||||
except Exception as e:
|
||||
logging.error(f"SQLite connection test failed: {e}", exc_info=True)
|
||||
logging.error(f"DEBUG: SQLite connection test failed: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
def run_migrations():
|
||||
logging.info("Running database migrations...")
|
||||
logging.info("DEBUG: Starting database setup...")
|
||||
try:
|
||||
# Extract database path from URL
|
||||
db_path = DATABASE_URL.split("///")[1]
|
||||
logging.info(f"Database path: {db_path}")
|
||||
logging.info(f"DEBUG: Database path extracted: {db_path}")
|
||||
|
||||
# Create directory if needed
|
||||
db_dir = os.path.dirname(db_path)
|
||||
logging.info(f"DEBUG: Database directory: {db_dir}")
|
||||
if not os.path.exists(db_dir):
|
||||
logging.info(f"Creating database directory: {db_dir}")
|
||||
logging.info(f"DEBUG: Creating database directory: {db_dir}")
|
||||
os.makedirs(db_dir, exist_ok=True)
|
||||
logging.info(f"DEBUG: Database directory created successfully")
|
||||
else:
|
||||
logging.info(f"DEBUG: Database directory already exists")
|
||||
|
||||
# Test SQLite connection
|
||||
logging.info("DEBUG: Testing SQLite connection...")
|
||||
if not test_sqlite_connection(db_path):
|
||||
logging.error("DEBUG: SQLite connection test failed")
|
||||
raise Exception("SQLite connection test failed")
|
||||
logging.info("DEBUG: SQLite connection test passed")
|
||||
|
||||
alembic_cfg = Config("alembic.ini")
|
||||
command.upgrade(alembic_cfg, "head")
|
||||
logging.info("Database migrations completed successfully.")
|
||||
# Create all tables using SQLAlchemy directly instead of alembic
|
||||
logging.info("DEBUG: Creating database tables using SQLAlchemy...")
|
||||
Base.metadata.create_all(bind=engine)
|
||||
logging.info("DEBUG: Database tables created successfully.")
|
||||
|
||||
logging.info("DEBUG: Database setup completed, returning to caller")
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to run database migrations: {e}", exc_info=True)
|
||||
logging.error(f"DEBUG: Failed to setup database: {e}", exc_info=True)
|
||||
raise
|
||||
|
||||
# Utility functions
|
||||
|
||||
Reference in New Issue
Block a user