feat(import): add real-time progress tracking for CSV imports

This commit is contained in:
HotSwapp
2025-09-04 14:34:14 -05:00
parent 4cc5296268
commit 48ca876123
2 changed files with 203 additions and 8 deletions

View File

@@ -1639,6 +1639,108 @@ async def get_import_progress(
}
@router.get("/current-batch")
async def get_current_batch(
db: Session = Depends(get_db),
current_user: User = Depends(get_current_user)
):
"""Return the most recent running batch import for the current user, if any."""
try:
row = (
db.query(ImportAudit)
.filter(ImportAudit.status == "running")
.filter(ImportAudit.initiated_by_user_id == getattr(current_user, "id", None))
.order_by(ImportAudit.started_at.desc())
.first()
)
if not row:
return {"running": False}
return {
"running": True,
"audit_id": row.id,
"started_at": row.started_at.isoformat() if row.started_at else None,
"total_files": row.total_files,
"message": row.message,
}
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to get current batch: {str(e)}")
@router.get("/batch-progress/{audit_id}")
async def get_batch_progress(
audit_id: int,
db: Session = Depends(get_db),
current_user: User = Depends(get_current_user)
):
"""Return real-time progress for a batch import using audit tables as the source of truth."""
audit = db.query(ImportAudit).filter(ImportAudit.id == audit_id).first()
if not audit:
raise HTTPException(status_code=404, detail="Batch not found")
# Authorization: allow only the initiating user or admins to view progress
try:
from app.utils.enhanced_auth import is_admin_user
is_admin = is_admin_user(current_user)
except Exception:
is_admin = False
if not is_admin and getattr(current_user, "id", None) != getattr(audit, "initiated_by_user_id", None):
raise HTTPException(status_code=403, detail="Not authorized to view this batch progress")
# Aggregate per-file results to compute progress
processed_files = db.query(ImportAuditFile).filter(ImportAuditFile.audit_id == audit.id).count()
successful_files = db.query(ImportAuditFile).filter(
ImportAuditFile.audit_id == audit.id,
ImportAuditFile.status.in_(["success", "completed_with_errors", "skipped"])
).count()
failed_files = db.query(ImportAuditFile).filter(
ImportAuditFile.audit_id == audit.id,
ImportAuditFile.status == "failed"
).count()
total_files = audit.total_files or 0
percent_complete: float = 0.0
if total_files > 0:
try:
percent_complete = round((processed_files / total_files) * 100, 1)
except Exception:
percent_complete = 0.0
data = {
"audit_id": audit.id,
"status": audit.status,
"total_files": total_files,
"processed_files": processed_files,
"successful_files": successful_files,
"failed_files": failed_files,
"started_at": audit.started_at.isoformat() if audit.started_at else None,
"finished_at": audit.finished_at.isoformat() if audit.finished_at else None,
"percent": percent_complete,
"message": audit.message,
}
# Include a brief summary of last processed file if desired (best-effort)
try:
last_file = (
db.query(ImportAuditFile)
.filter(ImportAuditFile.audit_id == audit.id)
.order_by(ImportAuditFile.id.desc())
.first()
)
if last_file:
data["last_file"] = {
"file_type": last_file.file_type,
"status": last_file.status,
"imported_count": last_file.imported_count,
"errors": last_file.errors,
"message": last_file.message,
"created_at": last_file.created_at.isoformat() if last_file.created_at else None,
}
except Exception:
pass
return data
@router.post("/batch-validate")
async def batch_validate_csv_files(
files: List[UploadFile] = UploadFileForm(...),

View File

@@ -432,6 +432,8 @@ document.addEventListener('DOMContentLoaded', function() {
setTimeout(() => {
document.getElementById('uploadMode').value = 'batch';
switchUploadMode();
// Resume progress monitoring if a batch is already running for this user
resumeBatchProgressIfRunning();
}, 100);
});
@@ -857,20 +859,109 @@ function displayImportResults(result) {
panel.classList.remove('hidden');
}
function showProgress(show, message = '') {
function showProgress(show, message = '', percent = null) {
const panel = document.getElementById('progressPanel');
const status = document.getElementById('progressStatus');
const bar = document.getElementById('progressBar');
if (show) {
status.textContent = message;
bar.style.width = '100%';
if (percent === null || isNaN(percent)) {
bar.style.width = '100%';
} else {
const clamped = Math.max(0, Math.min(100, Number(percent)));
bar.style.width = clamped + '%';
}
panel.classList.remove('hidden');
} else {
panel.classList.add('hidden');
}
}
// -----------------------------
// Batch progress monitoring
// -----------------------------
const TERMINAL_BATCH_STATUSES = new Set(['success', 'completed_with_errors', 'failed']);
let batchProgress = { timer: null, auditId: null };
async function fetchCurrentBatch() {
try {
const resp = await window.http.wrappedFetch('/api/import/current-batch');
if (!resp.ok) return null;
const json = await resp.json();
return json && json.running ? json : null;
} catch (_) { return null; }
}
function stopBatchProgressPolling() {
if (batchProgress.timer) {
clearInterval(batchProgress.timer);
batchProgress.timer = null;
}
batchProgress.auditId = null;
}
async function pollBatchProgressOnce(auditId) {
try {
const resp = await window.http.wrappedFetch(`/api/import/batch-progress/${encodeURIComponent(auditId)}`);
if (!resp.ok) return;
const p = await resp.json();
const percent = Number(p.percent || 0);
const total = Number(p.total_files || 0);
const processed = Number(p.processed_files || 0);
const status = String(p.status || 'running');
const statusNice = status.replaceAll('_', ' ');
const msg = total > 0
? `Processing ${processed}/${total} (${percent.toFixed(1)}%) · ${statusNice}`
: `Processing… ${statusNice}`;
showProgress(true, msg, percent);
if (TERMINAL_BATCH_STATUSES.has(status)) {
stopBatchProgressPolling();
}
} catch (_) { /* ignore */ }
}
function startBatchProgressPolling(auditId) {
stopBatchProgressPolling();
batchProgress.auditId = auditId;
// immediate + interval polling
pollBatchProgressOnce(auditId);
batchProgress.timer = setInterval(() => pollBatchProgressOnce(auditId), 1500);
}
async function ensureAuditIdWithRetry(maxAttempts = 10, delayMs = 500) {
for (let attempt = 0; attempt < maxAttempts; attempt++) {
const cur = await fetchCurrentBatch();
if (cur && cur.audit_id) return cur.audit_id;
await new Promise(r => setTimeout(r, delayMs));
}
return null;
}
async function monitorBatchProgressDuring(promise) {
try {
const id = await ensureAuditIdWithRetry();
if (id) {
startBatchProgressPolling(id);
}
} catch (_) {}
try {
await promise; // wait for upload completion
} finally {
stopBatchProgressPolling();
}
}
async function resumeBatchProgressIfRunning() {
try {
const cur = await fetchCurrentBatch();
if (cur && cur.audit_id) {
showProgress(true, 'Resuming import progress…', 0);
startBatchProgressPolling(cur.audit_id);
}
} catch (_) {}
}
async function clearTable() {
const fileType = document.getElementById('clearTableType').value;
@@ -1389,12 +1480,14 @@ async function handleBatchImport(event) {
formData.append('replace_existing', replaceExisting);
try {
showProgress(true, 'Processing batch import...');
const response = await window.http.wrappedFetch('/api/import/batch-upload', {
method: 'POST',
body: formData
});
showProgress(true, 'Processing batch import...', 0);
// Kick off upload and monitor progress concurrently
const uploadPromise = window.http.wrappedFetch('/api/import/batch-upload', {
method: 'POST',
body: formData
});
monitorBatchProgressDuring(uploadPromise);
const response = await uploadPromise;
if (!response.ok) {
const error = await response.json();