Files
FitTrack2/FitnessSync/backend/alembic/versions/8c5791dd193e_add_missing_activity_columns.py
2026-01-14 05:39:16 -08:00

219 lines
13 KiB
Python

"""add_missing_activity_columns
Revision ID: 8c5791dd193e
Revises: 62a16d820130
Create Date: 2026-01-13 19:30:07.100001
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
import geoalchemy2
# revision identifiers, used by Alembic.
revision: str = '8c5791dd193e'
down_revision: Union[str, None] = '62a16d820130'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('scheduled_jobs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('job_type', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('interval_minutes', sa.Integer(), nullable=False),
sa.Column('params', sa.Text(), nullable=True),
sa.Column('enabled', sa.Boolean(), nullable=True),
sa.Column('last_run', sa.DateTime(timezone=True), nullable=True),
sa.Column('next_run', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_scheduled_jobs_id'), 'scheduled_jobs', ['id'], unique=False)
# op.drop_table('spatial_ref_sys')
op.add_column('activities', sa.Column('garmin_activity_id', sa.String(), nullable=False))
op.add_column('activities', sa.Column('activity_type', sa.String(), nullable=True))
op.add_column('activities', sa.Column('start_lat', sa.Float(), nullable=True))
op.add_column('activities', sa.Column('start_lng', sa.Float(), nullable=True))
op.add_column('activities', sa.Column('file_content', sa.LargeBinary(), nullable=True))
op.add_column('activities', sa.Column('file_type', sa.String(), nullable=True))
op.add_column('activities', sa.Column('download_status', sa.String(), nullable=True))
op.add_column('activities', sa.Column('downloaded_at', sa.DateTime(), nullable=True))
op.add_column('activities', sa.Column('bike_match_confidence', sa.Float(), nullable=True))
op.alter_column('activities', 'activity_name',
existing_type=sa.VARCHAR(),
nullable=True)
op.alter_column('activities', 'start_time',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
op.alter_column('activities', 'calories',
existing_type=sa.INTEGER(),
type_=sa.Float(),
existing_nullable=True)
op.create_unique_constraint(None, 'activities', ['garmin_activity_id'])
op.drop_column('activities', 'source')
op.drop_column('activities', 'activity_data')
op.drop_column('activities', 'end_time')
op.create_index(op.f('ix_activity_streams_geom'), 'activity_streams', ['geom'], unique=False)
op.create_index(op.f('ix_activity_streams_id'), 'activity_streams', ['id'], unique=False)
op.add_column('auth_status', sa.Column('service_type', sa.String(), nullable=False))
op.add_column('auth_status', sa.Column('username', sa.String(), nullable=True))
op.add_column('auth_status', sa.Column('authenticated', sa.Boolean(), nullable=True))
op.add_column('auth_status', sa.Column('token_expires_at', sa.DateTime(), nullable=True))
op.add_column('auth_status', sa.Column('last_login', sa.DateTime(), nullable=True))
op.add_column('auth_status', sa.Column('is_china', sa.Boolean(), nullable=True))
op.add_column('auth_status', sa.Column('last_check', sa.DateTime(), nullable=True))
op.drop_constraint('auth_status_service_key', 'auth_status', type_='unique')
op.drop_column('auth_status', 'is_authenticated')
op.drop_column('auth_status', 'last_sync')
op.drop_column('auth_status', 'service')
op.add_column('configurations', sa.Column('fitbit_client_id', sa.String(), nullable=True))
op.add_column('configurations', sa.Column('fitbit_client_secret', sa.String(), nullable=True))
op.add_column('configurations', sa.Column('garmin_username', sa.String(), nullable=True))
op.add_column('configurations', sa.Column('garmin_password', sa.String(), nullable=True))
op.add_column('configurations', sa.Column('sync_settings', sa.JSON(), nullable=True))
op.drop_constraint('configurations_key_key', 'configurations', type_='unique')
op.drop_column('configurations', 'key')
op.drop_column('configurations', 'value')
op.add_column('health_metrics', sa.Column('metric_value', sa.Float(), nullable=False))
op.add_column('health_metrics', sa.Column('timestamp', sa.DateTime(), nullable=False))
op.add_column('health_metrics', sa.Column('detailed_data', sa.Text(), nullable=True))
op.alter_column('health_metrics', 'date',
existing_type=sa.DATE(),
type_=sa.DateTime(),
existing_nullable=False)
op.drop_column('health_metrics', 'value')
op.add_column('segments', sa.Column('geom', geoalchemy2.types.Geometry(geometry_type='LINESTRING', srid=4326, dimension=2, from_text='ST_GeomFromEWKT', name='geometry'), nullable=True))
# Check if index exists before creating
conn = op.get_bind()
idx_exists = conn.execute(sa.text("SELECT 1 FROM pg_indexes WHERE indexname = 'idx_segments_geom'")).scalar()
if not idx_exists:
op.create_index('idx_segments_geom', 'segments', ['geom'], unique=False, postgresql_using='gist')
ix_exists = conn.execute(sa.text("SELECT 1 FROM pg_indexes WHERE indexname = 'ix_segments_geom'")).scalar()
if not ix_exists:
op.create_index(op.f('ix_segments_geom'), 'segments', ['geom'], unique=False)
op.add_column('sync_logs', sa.Column('operation', sa.String(), nullable=False))
op.add_column('sync_logs', sa.Column('message', sa.Text(), nullable=True))
op.add_column('sync_logs', sa.Column('records_processed', sa.Integer(), nullable=True))
op.add_column('sync_logs', sa.Column('records_failed', sa.Integer(), nullable=True))
op.add_column('sync_logs', sa.Column('user_id', sa.Integer(), nullable=True))
op.drop_column('sync_logs', 'source')
op.drop_column('sync_logs', 'records_synced')
op.drop_column('sync_logs', 'error_message')
op.drop_column('sync_logs', 'destination')
op.drop_column('sync_logs', 'sync_type')
op.add_column('weight_records', sa.Column('fitbit_id', sa.String(), nullable=False))
op.add_column('weight_records', sa.Column('unit', sa.String(), nullable=False))
op.add_column('weight_records', sa.Column('timestamp', sa.DateTime(), nullable=False))
op.add_column('weight_records', sa.Column('sync_status', sa.String(), nullable=True))
op.add_column('weight_records', sa.Column('garmin_id', sa.String(), nullable=True))
op.alter_column('weight_records', 'date',
existing_type=sa.DATE(),
type_=sa.DateTime(),
existing_nullable=False)
op.create_unique_constraint(None, 'weight_records', ['fitbit_id'])
op.drop_column('weight_records', 'source')
op.drop_column('weight_records', 'body_fat')
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('weight_records', sa.Column('body_fat', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True))
op.add_column('weight_records', sa.Column('source', sa.VARCHAR(), autoincrement=False, nullable=False))
op.drop_constraint(None, 'weight_records', type_='unique')
op.alter_column('weight_records', 'date',
existing_type=sa.DateTime(),
type_=sa.DATE(),
existing_nullable=False)
op.drop_column('weight_records', 'garmin_id')
op.drop_column('weight_records', 'sync_status')
op.drop_column('weight_records', 'timestamp')
op.drop_column('weight_records', 'unit')
op.drop_column('weight_records', 'fitbit_id')
op.add_column('sync_logs', sa.Column('sync_type', sa.VARCHAR(), autoincrement=False, nullable=False))
op.add_column('sync_logs', sa.Column('destination', sa.VARCHAR(), autoincrement=False, nullable=False))
op.add_column('sync_logs', sa.Column('error_message', sa.TEXT(), autoincrement=False, nullable=True))
op.add_column('sync_logs', sa.Column('records_synced', sa.INTEGER(), autoincrement=False, nullable=True))
op.add_column('sync_logs', sa.Column('source', sa.VARCHAR(), autoincrement=False, nullable=False))
op.drop_column('sync_logs', 'user_id')
op.drop_column('sync_logs', 'records_failed')
op.drop_column('sync_logs', 'records_processed')
op.drop_column('sync_logs', 'message')
op.drop_column('sync_logs', 'operation')
op.drop_index(op.f('ix_segments_geom'), table_name='segments')
op.drop_index('idx_segments_geom', table_name='segments', postgresql_using='gist')
op.drop_column('segments', 'geom')
op.add_column('health_metrics', sa.Column('value', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=False))
op.alter_column('health_metrics', 'date',
existing_type=sa.DateTime(),
type_=sa.DATE(),
existing_nullable=False)
op.drop_column('health_metrics', 'detailed_data')
op.drop_column('health_metrics', 'timestamp')
op.drop_column('health_metrics', 'metric_value')
op.add_column('configurations', sa.Column('value', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('configurations', sa.Column('key', sa.VARCHAR(), autoincrement=False, nullable=False))
op.create_unique_constraint('configurations_key_key', 'configurations', ['key'])
op.drop_column('configurations', 'sync_settings')
op.drop_column('configurations', 'garmin_password')
op.drop_column('configurations', 'garmin_username')
op.drop_column('configurations', 'fitbit_client_secret')
op.drop_column('configurations', 'fitbit_client_id')
op.add_column('auth_status', sa.Column('service', sa.VARCHAR(), autoincrement=False, nullable=False))
op.add_column('auth_status', sa.Column('last_sync', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.add_column('auth_status', sa.Column('is_authenticated', sa.BOOLEAN(), autoincrement=False, nullable=False))
op.create_unique_constraint('auth_status_service_key', 'auth_status', ['service'])
op.drop_column('auth_status', 'last_check')
op.drop_column('auth_status', 'is_china')
op.drop_column('auth_status', 'last_login')
op.drop_column('auth_status', 'token_expires_at')
op.drop_column('auth_status', 'authenticated')
op.drop_column('auth_status', 'username')
op.drop_column('auth_status', 'service_type')
op.drop_index(op.f('ix_activity_streams_id'), table_name='activity_streams')
op.drop_index(op.f('ix_activity_streams_geom'), table_name='activity_streams')
op.add_column('activities', sa.Column('end_time', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.add_column('activities', sa.Column('activity_data', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=True))
op.add_column('activities', sa.Column('source', sa.VARCHAR(), autoincrement=False, nullable=False))
op.drop_constraint(None, 'activities', type_='unique')
op.alter_column('activities', 'calories',
existing_type=sa.Float(),
type_=sa.INTEGER(),
existing_nullable=True)
op.alter_column('activities', 'start_time',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
op.alter_column('activities', 'activity_name',
existing_type=sa.VARCHAR(),
nullable=False)
op.drop_column('activities', 'bike_match_confidence')
op.drop_column('activities', 'downloaded_at')
op.drop_column('activities', 'download_status')
op.drop_column('activities', 'file_type')
op.drop_column('activities', 'file_content')
op.drop_column('activities', 'start_lng')
op.drop_column('activities', 'start_lat')
op.drop_column('activities', 'activity_type')
op.drop_column('activities', 'garmin_activity_id')
# op.create_table('spatial_ref_sys',
# sa.Column('srid', sa.INTEGER(), autoincrement=False, nullable=False),
# sa.Column('auth_name', sa.VARCHAR(length=256), autoincrement=False, nullable=True),
# sa.Column('auth_srid', sa.INTEGER(), autoincrement=False, nullable=True),
# sa.Column('srtext', sa.VARCHAR(length=2048), autoincrement=False, nullable=True),
# sa.Column('proj4text', sa.VARCHAR(length=2048), autoincrement=False, nullable=True),
# sa.CheckConstraint('srid > 0 AND srid <= 998999', name='spatial_ref_sys_srid_check'),
# sa.PrimaryKeyConstraint('srid', name='spatial_ref_sys_pkey')
# )
op.drop_index(op.f('ix_scheduled_jobs_id'), table_name='scheduled_jobs')
op.drop_table('scheduled_jobs')
# ### end Alembic commands ###