This commit is contained in:
2025-09-11 07:45:25 -07:00
parent f443e7a64e
commit 651ce46183
46 changed files with 5063 additions and 164 deletions

View File

@@ -0,0 +1,40 @@
from fastapi import APIRouter, Query, HTTPException
from fastapi.responses import FileResponse
from app.services.export_service import ExportService
from pathlib import Path
import logging
router = APIRouter()
logger = logging.getLogger(__name__)
@router.get("/export")
async def export_data(
types: str = Query(..., description="Comma-separated list of data types to export"),
format: str = Query('json', description="Export format (json, zip, gpx)")
):
valid_types = {'routes', 'rules', 'plans'}
requested_types = set(types.split(','))
# Validate requested types
if not requested_types.issubset(valid_types):
raise HTTPException(
status_code=400,
detail=f"Invalid export types. Valid types are: {', '.join(valid_types)}"
)
try:
exporter = ExportService()
export_path = await exporter.create_export(
export_types=list(requested_types),
export_format=format
)
return FileResponse(
export_path,
media_type="application/zip" if format == 'zip' else "application/json",
filename=f"export_{'_'.join(requested_types)}.{format}"
)
except Exception as e:
logger.error(f"Export failed: {str(e)}")
raise HTTPException(status_code=500, detail="Export failed") from e

View File

@@ -0,0 +1,38 @@
from fastapi import APIRouter, UploadFile, File, Form, HTTPException
from fastapi.responses import JSONResponse
from app.services.import_service import ImportService
import logging
from typing import Optional
router = APIRouter()
logger = logging.getLogger(__name__)
@router.post("/import/validate")
async def validate_import(
file: UploadFile = File(...),
):
try:
importer = ImportService()
validation_result = await importer.validate_import(file)
return JSONResponse(content=validation_result)
except Exception as e:
logger.error(f"Import validation failed: {str(e)}")
raise HTTPException(status_code=400, detail=str(e)) from e
@router.post("/import")
async def execute_import(
file: UploadFile = File(...),
conflict_resolution: str = Form("skip"),
resolutions: Optional[str] = Form(None),
):
try:
importer = ImportService()
import_result = await importer.execute_import(
file,
conflict_resolution,
resolutions
)
return JSONResponse(content=import_result)
except Exception as e:
logger.error(f"Import failed: {str(e)}")
raise HTTPException(status_code=500, detail=str(e)) from e

View File

@@ -0,0 +1,138 @@
import json
from pathlib import Path
from datetime import datetime
import zipfile
from app.database import SessionLocal
from app.models import Route, Rule, Plan
import tempfile
import logging
import shutil
logger = logging.getLogger(__name__)
class ExportService:
def __init__(self):
self.temp_dir = Path(tempfile.gettempdir()) / "cycling_exports"
self.temp_dir.mkdir(exist_ok=True)
async def create_export(self, export_types, export_format):
"""Main export creation entry point"""
export_data = await self._fetch_export_data(export_types)
export_path = self._generate_export_file(export_data, export_format, export_types)
return export_path
async def _fetch_export_data(self, export_types):
"""Fetch data from database based on requested types"""
db = SessionLocal()
try:
data = {}
if 'routes' in export_types:
routes = db.query(Route).all()
data['routes'] = [self._serialize_route(r) for r in routes]
if 'rules' in export_types:
rules = db.query(Rule).all()
data['rules'] = [self._serialize_rule(r) for r in rules]
if 'plans' in export_types:
plans = db.query(Plan).all()
data['plans'] = [self._serialize_plan(p) for p in plans]
return data
finally:
db.close()
def _generate_export_file(self, data, format, types):
"""Generate the export file in specified format"""
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
base_name = f"export_{'_'.join(types)}_{timestamp}"
if format == 'json':
return self._create_json_export(data, base_name)
elif format == 'zip':
return self._create_zip_export(data, base_name)
elif format == 'gpx':
return self._create_gpx_export(data, base_name)
else:
raise ValueError(f"Unsupported format: {format}")
def _create_json_export(self, data, base_name):
"""Create single JSON file export"""
export_path = self.temp_dir / f"{base_name}.json"
with open(export_path, 'w') as f:
json.dump(data, f, indent=2)
return export_path
def _create_zip_export(self, data, base_name):
"""Create ZIP archive with JSON and GPX files"""
zip_path = self.temp_dir / f"{base_name}.zip"
with zipfile.ZipFile(zip_path, 'w') as zipf:
# Add JSON data
json_path = self._create_json_export(data, base_name)
zipf.write(json_path, arcname=json_path.name)
# Add GPX files if exporting routes
if 'routes' in data:
gpx_dir = Path("/app/data/gpx")
for route in data['routes']:
gpx_path = gpx_dir / route['gpx_file_path']
if gpx_path.exists():
zipf.write(gpx_path, arcname=f"gpx/{gpx_path.name}")
return zip_path
def _create_gpx_export(self, data, base_name):
"""Export only GPX files from routes"""
if 'routes' not in data:
raise ValueError("GPX export requires routes to be selected")
zip_path = self.temp_dir / f"{base_name}.zip"
with zipfile.ZipFile(zip_path, 'w') as zipf:
gpx_dir = Path("/app/data/gpx")
for route in data['routes']:
gpx_path = gpx_dir / route['gpx_file_path']
if gpx_path.exists():
zipf.write(gpx_path, arcname=gpx_path.name)
return zip_path
def _serialize_route(self, route):
return {
"id": route.id,
"name": route.name,
"description": route.description,
"category": route.category,
"gpx_file_path": route.gpx_file_path,
"created_at": route.created_at.isoformat(),
"updated_at": route.updated_at.isoformat()
}
def _serialize_rule(self, rule):
return {
"id": rule.id,
"name": rule.name,
"natural_language": rule.natural_language,
"jsonb_rules": rule.jsonb_rules,
"version": rule.version,
"created_at": rule.created_at.isoformat()
}
def _serialize_plan(self, plan):
return {
"id": plan.id,
"name": plan.name,
"jsonb_plan": plan.jsonb_plan,
"version": plan.version,
"created_at": plan.created_at.isoformat()
}
def cleanup_temp_files(self):
"""Clean up temporary export files older than 1 hour"""
cutoff = datetime.now().timestamp() - 3600
for file in self.temp_dir.glob("*"):
if file.stat().st_mtime < cutoff:
try:
file.unlink()
except Exception as e:
logger.warning(f"Failed to clean up temp file {file}: {str(e)}")

View File

@@ -0,0 +1,259 @@
import json
import zipfile
from pathlib import Path
import tempfile
from datetime import datetime
from app.database import SessionLocal
from app.models import Route, Rule, Plan
import shutil
import logging
from sqlalchemy import and_
from typing import Dict, List
logger = logging.getLogger(__name__)
class ImportService:
def __init__(self):
self.temp_dir = Path(tempfile.gettempdir()) / "cycling_imports"
self.temp_dir.mkdir(exist_ok=True)
async def validate_import(self, file: UploadFile) -> dict:
"""Validate import file and detect conflicts"""
try:
# Save uploaded file to temp location
file_path = self.temp_dir / file.filename
with open(file_path, "wb") as f:
shutil.copyfileobj(file.file, f)
# Extract data based on file type
if file.filename.endswith('.zip'):
data = self._process_zip_import(file_path)
elif file.filename.endswith('.json'):
data = self._process_json_import(file_path)
else:
raise ValueError("Unsupported file format")
# Detect conflicts
conflicts = []
if 'routes' in data:
conflicts += self._detect_route_conflicts(data['routes'])
if 'rules' in data:
conflicts += self._detect_rule_conflicts(data['rules'])
if 'plans' in data:
conflicts += self._detect_plan_conflicts(data['plans'])
return {
"valid": True,
"conflicts": conflicts,
"summary": {
"routes": len(data.get('routes', [])),
"rules": len(data.get('rules', [])),
"plans": len(data.get('plans', []))
}
}
except Exception as e:
logger.error(f"Validation error: {str(e)}")
return {"valid": False, "error": str(e)}
async def execute_import(self, file: UploadFile,
conflict_resolution: str,
resolutions: List[dict]) -> dict:
"""Execute the import with specified conflict resolution"""
db = SessionLocal()
try:
db.begin()
# Process file
file_path = self.temp_dir / file.filename
with open(file_path, "wb") as f:
shutil.copyfileobj(file.file, f)
if file.filename.endswith('.zip'):
data = self._process_zip_import(file_path)
gpx_files = self._extract_gpx_files(file_path)
elif file.filename.endswith('.json'):
data = self._process_json_import(file_path)
gpx_files = []
else:
raise ValueError("Unsupported file format")
# Apply resolutions
resolution_map = {r['id']: r['action'] for r in resolutions}
# Import data
results = {
"imported": {"routes": 0, "rules": 0, "plans": 0},
"skipped": {"routes": 0, "rules": 0, "plans": 0},
"errors": []
}
# Import routes
if 'routes' in data:
for route_data in data['routes']:
action = resolution_map.get(route_data['id'], conflict_resolution)
try:
if self._should_import_route(route_data, action, db):
self._import_route(route_data, db)
results["imported"]["routes"] += 1
else:
results["skipped"]["routes"] += 1
except Exception as e:
results["errors"].append(f"Route {route_data['id']}: {str(e)}")
# Import rules
if 'rules' in data:
for rule_data in data['rules']:
action = resolution_map.get(rule_data['id'], conflict_resolution)
try:
if self._should_import_rule(rule_data, action, db):
self._import_rule(rule_data, db)
results["imported"]["rules"] += 1
else:
results["skipped"]["rules"] += 1
except Exception as e:
results["errors"].append(f"Rule {rule_data['id']}: {str(e)}")
# Import plans
if 'plans' in data:
for plan_data in data['plans']:
action = resolution_map.get(plan_data['id'], conflict_resolution)
try:
if self._should_import_plan(plan_data, action, db):
self._import_plan(plan_data, db)
results["imported"]["plans"] += 1
else:
results["skipped"]["plans"] += 1
except Exception as e:
results["errors"].append(f"Plan {plan_data['id']}: {str(e)}")
# Save GPX files
if gpx_files:
gpx_dir = Path("/app/data/gpx")
for gpx in gpx_files:
shutil.move(gpx, gpx_dir / gpx.name)
db.commit()
return results
except Exception as e:
db.rollback()
logger.error(f"Import failed: {str(e)}")
return {"error": str(e)}
finally:
db.close()
self._cleanup_temp_files()
def _process_zip_import(self, file_path: Path) -> dict:
"""Extract and process ZIP file import"""
data = {}
with zipfile.ZipFile(file_path, 'r') as zipf:
# Find data.json
json_files = [f for f in zipf.namelist() if f.endswith('.json')]
if not json_files:
raise ValueError("No JSON data found in ZIP file")
with zipf.open(json_files[0]) as f:
data = json.load(f)
return data
def _process_json_import(self, file_path: Path) -> dict:
"""Process JSON file import"""
with open(file_path) as f:
return json.load(f)
def _extract_gpx_files(self, file_path: Path) -> List[Path]:
"""Extract GPX files from ZIP archive"""
gpx_files = []
extract_dir = self.temp_dir / "gpx"
extract_dir.mkdir(exist_ok=True)
with zipfile.ZipFile(file_path, 'r') as zipf:
for file in zipf.namelist():
if file.startswith('gpx/') and file.endswith('.gpx'):
zipf.extract(file, extract_dir)
gpx_files.append(extract_dir / file)
return gpx_files
def _detect_route_conflicts(self, routes: List[dict]) -> List[dict]:
conflicts = []
db = SessionLocal()
try:
for route in routes:
existing = db.query(Route).filter(
(Route.id == route['id']) |
(Route.name == route['name'])
).first()
if existing:
conflict = {
"type": "route",
"id": route['id'],
"name": route['name'],
"existing_version": existing.updated_at,
"import_version": datetime.fromisoformat(route['updated_at']),
"resolution_options": ["overwrite", "rename", "skip"]
}
conflicts.append(conflict)
finally:
db.close()
return conflicts
def _should_import_route(self, route_data: dict, action: str, db) -> bool:
existing = db.query(Route).filter(
(Route.id == route_data['id']) |
(Route.name == route_data['name'])
).first()
if not existing:
return True
if action == 'overwrite':
return True
elif action == 'rename':
route_data['name'] = f"{route_data['name']} (Imported)"
return True
elif action == 'skip':
return False
return False
def _import_route(self, route_data: dict, db):
"""Import a single route"""
existing = db.query(Route).get(route_data['id'])
if existing:
# Update existing route
existing.name = route_data['name']
existing.description = route_data['description']
existing.category = route_data['category']
existing.gpx_file_path = route_data['gpx_file_path']
existing.updated_at = datetime.fromisoformat(route_data['updated_at'])
else:
# Create new route
route = Route(
id=route_data['id'],
name=route_data['name'],
description=route_data['description'],
category=route_data['category'],
gpx_file_path=route_data['gpx_file_path'],
created_at=datetime.fromisoformat(route_data['created_at']),
updated_at=datetime.fromisoformat(route_data['updated_at'])
)
db.add(route)
# Similar methods for rules and plans would follow...
def _cleanup_temp_files(self):
"""Clean up temporary files older than 1 hour"""
cutoff = datetime.now().timestamp() - 3600
for file in self.temp_dir.glob("*"):
if file.stat().st_mtime < cutoff:
try:
if file.is_dir():
shutil.rmtree(file)
else:
file.unlink()
except Exception as e:
logger.warning(f"Failed to clean temp file {file}: {str(e)}")