ready to test the import
This commit is contained in:
@@ -10,9 +10,15 @@ from sqlalchemy.orm import Session
|
||||
from app.database.base import get_db
|
||||
from app.auth.security import get_current_user
|
||||
from app.models.user import User
|
||||
from app.models import *
|
||||
from app.models.rolodex import Rolodex, Phone
|
||||
from app.models.files import File
|
||||
from app.models.ledger import Ledger
|
||||
from app.models.qdro import QDRO
|
||||
from app.models.pensions import Pension, PensionSchedule, MarriageHistory, DeathBenefit, SeparationAgreement, LifeTable, NumberTable
|
||||
from app.models.lookups import Employee, FileType, FileStatus, TransactionType, TransactionCode, State, GroupLookup, Footer, PlanInfo, FormIndex, FormList, PrinterSetup, SystemSetup
|
||||
from app.models.additional import Payment, Deposit, FileNote, FormVariable, ReportVariable
|
||||
|
||||
router = APIRouter(prefix="/api/import", tags=["import"])
|
||||
router = APIRouter(tags=["import"])
|
||||
|
||||
|
||||
# CSV to Model mapping
|
||||
@@ -658,4 +664,144 @@ async def validate_csv_file(
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Validation failed: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail=f"Validation failed: {str(e)}")
|
||||
|
||||
|
||||
@router.get("/progress/{import_id}")
|
||||
async def get_import_progress(
|
||||
import_id: str,
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Get import progress status (placeholder for future implementation)"""
|
||||
# This would be used for long-running imports with background tasks
|
||||
return {
|
||||
"import_id": import_id,
|
||||
"status": "not_implemented",
|
||||
"message": "Real-time progress tracking not yet implemented"
|
||||
}
|
||||
|
||||
|
||||
@router.post("/batch-upload")
|
||||
async def batch_import_csv_files(
|
||||
files: List[UploadFile] = UploadFileForm(...),
|
||||
replace_existing: bool = Form(False),
|
||||
db: Session = Depends(get_db),
|
||||
current_user: User = Depends(get_current_user)
|
||||
):
|
||||
"""Import multiple CSV files in optimal order"""
|
||||
|
||||
if len(files) > 20:
|
||||
raise HTTPException(status_code=400, detail="Maximum 20 files allowed per batch")
|
||||
|
||||
# Define optimal import order based on dependencies
|
||||
import_order = [
|
||||
"STATES.csv", "GRUPLKUP.csv", "EMPLOYEE.csv", "FILETYPE.csv", "FILESTAT.csv",
|
||||
"TRNSTYPE.csv", "TRNSLKUP.csv", "FOOTERS.csv", "SETUP.csv", "PRINTERS.csv",
|
||||
"ROLODEX.csv", "PHONE.csv", "FILES.csv", "LEDGER.csv", "TRNSACTN.csv",
|
||||
"QDROS.csv", "PENSIONS.csv", "PLANINFO.csv", "PAYMENTS.csv", "DEPOSITS.csv",
|
||||
"FILENOTS.csv", "FORM_INX.csv", "FORM_LST.csv", "FVARLKUP.csv", "RVARLKUP.csv"
|
||||
]
|
||||
|
||||
# Sort uploaded files by optimal import order
|
||||
file_map = {f.filename: f for f in files}
|
||||
ordered_files = []
|
||||
|
||||
for file_type in import_order:
|
||||
if file_type in file_map:
|
||||
ordered_files.append((file_type, file_map[file_type]))
|
||||
del file_map[file_type]
|
||||
|
||||
# Add any remaining files not in the predefined order
|
||||
for filename, file in file_map.items():
|
||||
ordered_files.append((filename, file))
|
||||
|
||||
results = []
|
||||
total_imported = 0
|
||||
total_errors = 0
|
||||
|
||||
for file_type, file in ordered_files:
|
||||
if file_type not in CSV_MODEL_MAPPING:
|
||||
results.append({
|
||||
"file_type": file_type,
|
||||
"status": "skipped",
|
||||
"message": f"Unsupported file type: {file_type}"
|
||||
})
|
||||
continue
|
||||
|
||||
try:
|
||||
# Reset file pointer
|
||||
await file.seek(0)
|
||||
|
||||
# Import this file using simplified logic
|
||||
model_class = CSV_MODEL_MAPPING[file_type]
|
||||
field_mapping = FIELD_MAPPINGS.get(file_type, {})
|
||||
|
||||
content = await file.read()
|
||||
csv_content = content.decode('utf-8-sig')
|
||||
csv_reader = csv.DictReader(io.StringIO(csv_content))
|
||||
|
||||
imported_count = 0
|
||||
errors = []
|
||||
|
||||
# If replace_existing is True and this is the first file of this type
|
||||
if replace_existing:
|
||||
db.query(model_class).delete()
|
||||
db.commit()
|
||||
|
||||
for row_num, row in enumerate(csv_reader, start=2):
|
||||
try:
|
||||
model_data = {}
|
||||
for csv_field, db_field in field_mapping.items():
|
||||
if csv_field in row:
|
||||
converted_value = convert_value(row[csv_field], csv_field)
|
||||
if converted_value is not None:
|
||||
model_data[db_field] = converted_value
|
||||
|
||||
if not any(model_data.values()):
|
||||
continue
|
||||
|
||||
instance = model_class(**model_data)
|
||||
db.add(instance)
|
||||
imported_count += 1
|
||||
|
||||
if imported_count % 100 == 0:
|
||||
db.commit()
|
||||
|
||||
except Exception as e:
|
||||
errors.append({
|
||||
"row": row_num,
|
||||
"error": str(e)
|
||||
})
|
||||
continue
|
||||
|
||||
db.commit()
|
||||
|
||||
total_imported += imported_count
|
||||
total_errors += len(errors)
|
||||
|
||||
results.append({
|
||||
"file_type": file_type,
|
||||
"status": "success" if len(errors) == 0 else "completed_with_errors",
|
||||
"imported_count": imported_count,
|
||||
"errors": len(errors),
|
||||
"message": f"Imported {imported_count} records" + (f" with {len(errors)} errors" if errors else "")
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
results.append({
|
||||
"file_type": file_type,
|
||||
"status": "failed",
|
||||
"message": f"Import failed: {str(e)}"
|
||||
})
|
||||
|
||||
return {
|
||||
"batch_results": results,
|
||||
"summary": {
|
||||
"total_files": len(files),
|
||||
"successful_files": len([r for r in results if r["status"] in ["success", "completed_with_errors"]]),
|
||||
"failed_files": len([r for r in results if r["status"] == "failed"]),
|
||||
"total_imported": total_imported,
|
||||
"total_errors": total_errors
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user