before claude fix #1
This commit is contained in:
1
FitnessSync/backend/alembic/README
Normal file
1
FitnessSync/backend/alembic/README
Normal file
@@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
||||
BIN
FitnessSync/backend/alembic/__pycache__/env.cpython-313.pyc
Normal file
BIN
FitnessSync/backend/alembic/__pycache__/env.cpython-313.pyc
Normal file
Binary file not shown.
80
FitnessSync/backend/alembic/env.py
Normal file
80
FitnessSync/backend/alembic/env.py
Normal file
@@ -0,0 +1,80 @@
|
||||
from logging.config import fileConfig
|
||||
import os
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Set the database URL from environment variable if available
|
||||
database_url = os.getenv("DATABASE_URL")
|
||||
if database_url:
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
|
||||
# Import our models to ensure they are registered with Base
|
||||
from src.models import Base
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
FitnessSync/backend/alembic/script.py.mako
Normal file
26
FitnessSync/backend/alembic/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,151 @@
|
||||
"""Initial migration
|
||||
|
||||
Revision ID: 24df1381ac00
|
||||
Revises:
|
||||
Create Date: 2025-12-22 15:04:54.280508
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '24df1381ac00'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
# Create configurations table
|
||||
op.create_table('configurations',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('key', sa.String(), nullable=False),
|
||||
sa.Column('value', sa.String(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('key')
|
||||
)
|
||||
op.create_index(op.f('ix_configurations_id'), 'configurations', ['id'], unique=False)
|
||||
|
||||
# Create api_tokens table
|
||||
op.create_table('api_tokens',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('token_type', sa.String(), nullable=False),
|
||||
sa.Column('access_token', sa.String(), nullable=False),
|
||||
sa.Column('refresh_token', sa.String(), nullable=True),
|
||||
sa.Column('expires_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('scopes', sa.String(), nullable=True),
|
||||
sa.Column('garth_oauth1_token', sa.String(), nullable=True),
|
||||
sa.Column('garth_oauth2_token', sa.String(), nullable=True),
|
||||
sa.Column('last_used', sa.DateTime(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_api_tokens_id'), 'api_tokens', ['id'], unique=False)
|
||||
|
||||
# Create auth_status table
|
||||
op.create_table('auth_status',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('service', sa.String(), nullable=False),
|
||||
sa.Column('is_authenticated', sa.Boolean(), nullable=False),
|
||||
sa.Column('last_sync', sa.DateTime(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('service')
|
||||
)
|
||||
op.create_index(op.f('ix_auth_status_id'), 'auth_status', ['id'], unique=False)
|
||||
|
||||
# Create weight_records table
|
||||
op.create_table('weight_records',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('date', sa.Date(), nullable=False),
|
||||
sa.Column('weight', sa.Float(), nullable=False),
|
||||
sa.Column('bmi', sa.Float(), nullable=True),
|
||||
sa.Column('body_fat', sa.Float(), nullable=True),
|
||||
sa.Column('source', sa.String(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_weight_records_id'), 'weight_records', ['id'], unique=False)
|
||||
|
||||
# Create activities table
|
||||
op.create_table('activities',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('activity_name', sa.String(), nullable=False),
|
||||
sa.Column('start_time', sa.DateTime(), nullable=False),
|
||||
sa.Column('end_time', sa.DateTime(), nullable=True),
|
||||
sa.Column('duration', sa.Integer(), nullable=True),
|
||||
sa.Column('calories', sa.Integer(), nullable=True),
|
||||
sa.Column('distance', sa.Float(), nullable=True),
|
||||
sa.Column('source', sa.String(), nullable=False),
|
||||
sa.Column('activity_data', sa.JSON(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_activities_id'), 'activities', ['id'], unique=False)
|
||||
|
||||
# Create health_metrics table
|
||||
op.create_table('health_metrics',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('date', sa.Date(), nullable=False),
|
||||
sa.Column('metric_type', sa.String(), nullable=False),
|
||||
sa.Column('value', sa.Float(), nullable=False),
|
||||
sa.Column('unit', sa.String(), nullable=True),
|
||||
sa.Column('source', sa.String(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_health_metrics_id'), 'health_metrics', ['id'], unique=False)
|
||||
|
||||
# Create sync_logs table
|
||||
op.create_table('sync_logs',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('sync_type', sa.String(), nullable=False),
|
||||
sa.Column('source', sa.String(), nullable=False),
|
||||
sa.Column('destination', sa.String(), nullable=False),
|
||||
sa.Column('start_time', sa.DateTime(), nullable=False),
|
||||
sa.Column('end_time', sa.DateTime(), nullable=True),
|
||||
sa.Column('status', sa.String(), nullable=False),
|
||||
sa.Column('records_synced', sa.Integer(), nullable=True),
|
||||
sa.Column('error_message', sa.Text(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_sync_logs_id'), 'sync_logs', ['id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_sync_logs_id'), table_name='sync_logs')
|
||||
op.drop_table('sync_logs')
|
||||
|
||||
op.drop_index(op.f('ix_health_metrics_id'), table_name='health_metrics')
|
||||
op.drop_table('health_metrics')
|
||||
|
||||
op.drop_index(op.f('ix_activities_id'), table_name='activities')
|
||||
op.drop_table('activities')
|
||||
|
||||
op.drop_index(op.f('ix_weight_records_id'), table_name='weight_records')
|
||||
op.drop_table('weight_records')
|
||||
|
||||
op.drop_index(op.f('ix_auth_status_id'), table_name='auth_status')
|
||||
op.drop_table('auth_status')
|
||||
|
||||
op.drop_index(op.f('ix_api_tokens_id'), table_name='api_tokens')
|
||||
op.drop_table('api_tokens')
|
||||
|
||||
op.drop_index(op.f('ix_configurations_id'), table_name='configurations')
|
||||
op.drop_table('configurations')
|
||||
# ### end Alembic commands ###
|
||||
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,32 @@
|
||||
"""add_mfa_session_fields_to_api_tokens
|
||||
|
||||
Revision ID: ce0f0282a142
|
||||
Revises: 24df1381ac00
|
||||
Create Date: 2025-12-22 18:06:20.525940
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'ce0f0282a142'
|
||||
down_revision: Union[str, None] = '24df1381ac00'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Add MFA session columns to api_tokens table
|
||||
op.add_column('api_tokens', sa.Column('mfa_session_id', sa.String(), nullable=True))
|
||||
op.add_column('api_tokens', sa.Column('mfa_resume_data', sa.String(), nullable=True))
|
||||
op.add_column('api_tokens', sa.Column('mfa_expires_at', sa.DateTime(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Remove MFA session columns from api_tokens table
|
||||
op.drop_column('api_tokens', 'mfa_session_id')
|
||||
op.drop_column('api_tokens', 'mfa_resume_data')
|
||||
op.drop_column('api_tokens', 'mfa_expires_at')
|
||||
Reference in New Issue
Block a user