Compare commits
10 Commits
29e33356c5
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c68e36e6c6 | ||
|
|
1116658d40 | ||
|
|
830ddcc4d1 | ||
|
|
f0eecdf83e | ||
|
|
c30c1b1653 | ||
|
|
7e9bfcec5e | ||
|
|
f7644a4f67 | ||
|
|
16d7455f85 | ||
|
|
a7a03f8369 | ||
|
|
e69f2fe700 |
79
DOCKER_USAGE.md
Normal file
79
DOCKER_USAGE.md
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
# Docker Usage Guide
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
### Development Environment
|
||||||
|
```bash
|
||||||
|
# Build and start development container
|
||||||
|
docker-compose -f docker-compose.dev.yml up --build
|
||||||
|
|
||||||
|
# Or use the build script
|
||||||
|
./docker-build.sh
|
||||||
|
docker-compose -f docker-compose.dev.yml up
|
||||||
|
```
|
||||||
|
|
||||||
|
### Production Environment
|
||||||
|
```bash
|
||||||
|
# Build and start production container
|
||||||
|
docker-compose up --build
|
||||||
|
|
||||||
|
# Or use the build script
|
||||||
|
./docker-build.sh
|
||||||
|
docker-compose up
|
||||||
|
```
|
||||||
|
|
||||||
|
## Available Images
|
||||||
|
|
||||||
|
- `delphi-database:dev` - Development image with source code mounting and auto-reload
|
||||||
|
- `delphi-database:latest` - Production image optimized for performance
|
||||||
|
- `delphi-database:<version>` - Tagged production image with version
|
||||||
|
|
||||||
|
## Environment Variables
|
||||||
|
|
||||||
|
### Required for Production
|
||||||
|
- `SECRET_KEY` - JWT secret key (generate with `openssl rand -base64 32`)
|
||||||
|
- `ADMIN_PASSWORD` - Initial admin user password
|
||||||
|
|
||||||
|
### Optional Configuration
|
||||||
|
- `DATABASE_URL` - Database connection string (default: sqlite:///app/data/delphi_database.db)
|
||||||
|
- `DEBUG` - Enable debug mode (default: False for production, True for development)
|
||||||
|
- `EXTERNAL_PORT` - External port mapping (default: 6920)
|
||||||
|
- `WORKERS` - Number of Gunicorn workers (default: 4)
|
||||||
|
- `LOG_LEVEL` - Logging level (default: INFO for production, DEBUG for development)
|
||||||
|
|
||||||
|
## Container Access
|
||||||
|
|
||||||
|
The application runs on port 8000 inside the container and is mapped to port 6920 on the host by default.
|
||||||
|
|
||||||
|
- Application: http://localhost:6920
|
||||||
|
- Health check: http://localhost:6920/health
|
||||||
|
|
||||||
|
## Data Persistence
|
||||||
|
|
||||||
|
### Development
|
||||||
|
- Source code: Mounted from host for live reload
|
||||||
|
- Database: Docker volume `delphi_dev_data`
|
||||||
|
- Uploads: Docker volume `delphi_dev_uploads`
|
||||||
|
- Backups: Docker volume `delphi_dev_backups`
|
||||||
|
|
||||||
|
### Production
|
||||||
|
- Database: Host directory `./data` (for easy backup)
|
||||||
|
- Uploads: Docker volume `delphi_uploads`
|
||||||
|
- Backups: Docker volume `delphi_backups`
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Container won't start
|
||||||
|
1. Check if port 6920 is already in use
|
||||||
|
2. Ensure required environment variables are set
|
||||||
|
3. Check Docker logs: `docker-compose logs`
|
||||||
|
|
||||||
|
### Database issues
|
||||||
|
1. Check database file permissions in `./data` directory
|
||||||
|
2. Ensure the container can write to mounted volumes
|
||||||
|
3. Check initialization logs for admin user creation
|
||||||
|
|
||||||
|
### Build failures
|
||||||
|
1. Ensure Docker daemon is running
|
||||||
|
2. Clear Docker cache: `docker system prune -f`
|
||||||
|
3. Rebuild without cache: `docker-compose build --no-cache`
|
||||||
@@ -49,7 +49,7 @@ USER delphi
|
|||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
# Health check
|
# Health check
|
||||||
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
|
||||||
CMD curl -f http://localhost:8000/health || exit 1
|
CMD curl -f http://localhost:8000/health || exit 1
|
||||||
|
|
||||||
# Start command
|
# Start command
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
|
# syntax=docker/dockerfile:1.5
|
||||||
# Production Dockerfile for Delphi Consulting Group Database System
|
# Production Dockerfile for Delphi Consulting Group Database System
|
||||||
FROM python:3.12-slim as builder
|
ARG BASE_IMAGE=python:3.12-slim
|
||||||
|
FROM ${BASE_IMAGE} as builder
|
||||||
|
|
||||||
# Set build arguments
|
# Set build arguments
|
||||||
ARG BUILD_DATE
|
ARG BUILD_DATE
|
||||||
@@ -19,10 +21,12 @@ WORKDIR /app
|
|||||||
|
|
||||||
# Copy requirements and install dependencies
|
# Copy requirements and install dependencies
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
RUN pip install --no-cache-dir --user -r requirements.txt
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
|
pip install --upgrade pip \
|
||||||
|
&& pip install --user -r requirements.txt
|
||||||
|
|
||||||
# Production stage
|
# Production stage
|
||||||
FROM python:3.12-slim
|
FROM ${BASE_IMAGE}
|
||||||
|
|
||||||
# Set labels
|
# Set labels
|
||||||
LABEL maintainer="Delphi Consulting Group Inc." \
|
LABEL maintainer="Delphi Consulting Group Inc." \
|
||||||
@@ -41,6 +45,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 \
|
|||||||
# Install runtime dependencies only
|
# Install runtime dependencies only
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install -y --no-install-recommends \
|
&& apt-get install -y --no-install-recommends \
|
||||||
|
bash \
|
||||||
curl \
|
curl \
|
||||||
sqlite3 \
|
sqlite3 \
|
||||||
tini \
|
tini \
|
||||||
@@ -54,15 +59,16 @@ RUN addgroup --system --gid 1001 delphi \
|
|||||||
# Set work directory
|
# Set work directory
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Copy Python packages from builder
|
# Copy Python packages from builder into system prefix so non-root user can access them
|
||||||
COPY --from=builder /root/.local /root/.local
|
COPY --from=builder /root/.local /usr/local
|
||||||
|
|
||||||
# Copy application code
|
# Copy application code
|
||||||
COPY --chown=delphi:delphi . .
|
COPY --chown=delphi:delphi . .
|
||||||
|
|
||||||
# Copy and set up initialization script
|
# Copy and set up initialization script
|
||||||
COPY scripts/init-container.sh /usr/local/bin/init-container.sh
|
COPY scripts/init-container.sh /usr/local/bin/init-container.sh
|
||||||
RUN chmod +x /usr/local/bin/init-container.sh
|
# Normalize line endings to avoid shebang issues and ensure executable
|
||||||
|
RUN sed -i 's/\r$//' /usr/local/bin/init-container.sh && chmod +x /usr/local/bin/init-container.sh
|
||||||
|
|
||||||
# Create necessary directories
|
# Create necessary directories
|
||||||
RUN mkdir -p /app/data /app/uploads /app/backups /app/exports /app/logs \
|
RUN mkdir -p /app/data /app/uploads /app/backups /app/exports /app/logs \
|
||||||
@@ -85,4 +91,4 @@ HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
|
|||||||
ENTRYPOINT ["/usr/bin/tini", "--", "/usr/local/bin/init-container.sh"]
|
ENTRYPOINT ["/usr/bin/tini", "--", "/usr/local/bin/init-container.sh"]
|
||||||
|
|
||||||
# Start with gunicorn for production
|
# Start with gunicorn for production
|
||||||
CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker", "--bind", "0.0.0.0:8000", "--timeout", "120", "--keepalive", "5"]
|
CMD ["gunicorn", "app.main:app", "-w", "4", "-k", "uvicorn.workers.UvicornWorker", "--bind", "0.0.0.0:8000", "--timeout", "120", "--keep-alive", "5"]
|
||||||
@@ -1,281 +0,0 @@
|
|||||||
"""
|
|
||||||
Flexible Imports admin API: list, filter, and export unmapped rows captured during CSV imports.
|
|
||||||
"""
|
|
||||||
from typing import Optional, Dict, Any, List
|
|
||||||
from datetime import datetime
|
|
||||||
import csv
|
|
||||||
import io
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, Query, HTTPException
|
|
||||||
from fastapi.responses import StreamingResponse
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from sqlalchemy import func, or_, cast, String
|
|
||||||
|
|
||||||
from app.database.base import get_db
|
|
||||||
from app.auth.security import get_admin_user
|
|
||||||
from app.models.flexible import FlexibleImport
|
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/flexible", tags=["flexible"])
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/imports")
|
|
||||||
async def list_flexible_imports(
|
|
||||||
file_type: Optional[str] = Query(None, description="Filter by CSV file type (e.g., FILES.csv)"),
|
|
||||||
target_table: Optional[str] = Query(None, description="Filter by target model table name"),
|
|
||||||
q: Optional[str] = Query(None, description="Quick text search across file type, target table, and unmapped data"),
|
|
||||||
has_keys: Optional[List[str]] = Query(
|
|
||||||
None,
|
|
||||||
description="Filter rows where extra_data (or its 'unmapped' payload) contains these keys. Repeat param for multiple keys.",
|
|
||||||
),
|
|
||||||
skip: int = Query(0, ge=0),
|
|
||||||
limit: int = Query(50, ge=1, le=500),
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
current_user=Depends(get_admin_user),
|
|
||||||
):
|
|
||||||
"""List flexible import rows with optional filtering, quick search, and pagination."""
|
|
||||||
query = db.query(FlexibleImport)
|
|
||||||
if file_type:
|
|
||||||
query = query.filter(FlexibleImport.file_type == file_type)
|
|
||||||
if target_table:
|
|
||||||
query = query.filter(FlexibleImport.target_table == target_table)
|
|
||||||
if q:
|
|
||||||
pattern = f"%{q.strip()}%"
|
|
||||||
# Search across file_type, target_table, and serialized JSON extra_data
|
|
||||||
query = query.filter(
|
|
||||||
or_(
|
|
||||||
FlexibleImport.file_type.ilike(pattern),
|
|
||||||
FlexibleImport.target_table.ilike(pattern),
|
|
||||||
cast(FlexibleImport.extra_data, String).ilike(pattern),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Filter by key presence inside JSON payload by string matching of the serialized JSON
|
|
||||||
# This is DB-agnostic and works across SQLite/Postgres, though not as precise as JSON operators.
|
|
||||||
if has_keys:
|
|
||||||
for k in [k for k in has_keys if k is not None and str(k).strip() != ""]:
|
|
||||||
key = str(k).strip()
|
|
||||||
# Look for the JSON key token followed by a colon, e.g. "key":
|
|
||||||
query = query.filter(cast(FlexibleImport.extra_data, String).ilike(f'%"{key}":%'))
|
|
||||||
|
|
||||||
total = query.count()
|
|
||||||
items = (
|
|
||||||
query.order_by(FlexibleImport.id.desc())
|
|
||||||
.offset(skip)
|
|
||||||
.limit(limit)
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
|
|
||||||
def serialize(item: FlexibleImport) -> Dict[str, Any]:
|
|
||||||
return {
|
|
||||||
"id": item.id,
|
|
||||||
"file_type": item.file_type,
|
|
||||||
"target_table": item.target_table,
|
|
||||||
"primary_key_field": item.primary_key_field,
|
|
||||||
"primary_key_value": item.primary_key_value,
|
|
||||||
"extra_data": item.extra_data,
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
"total": total,
|
|
||||||
"skip": skip,
|
|
||||||
"limit": limit,
|
|
||||||
"items": [serialize(i) for i in items],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/options")
|
|
||||||
async def flexible_options(
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
current_user=Depends(get_admin_user),
|
|
||||||
):
|
|
||||||
"""Return distinct file types and target tables for filter dropdowns."""
|
|
||||||
file_types: List[str] = [
|
|
||||||
ft for (ft,) in db.query(func.distinct(FlexibleImport.file_type)).order_by(FlexibleImport.file_type.asc()).all()
|
|
||||||
if ft is not None
|
|
||||||
]
|
|
||||||
target_tables: List[str] = [
|
|
||||||
tt for (tt,) in db.query(func.distinct(FlexibleImport.target_table)).order_by(FlexibleImport.target_table.asc()).all()
|
|
||||||
if tt is not None and tt != ""
|
|
||||||
]
|
|
||||||
return {"file_types": file_types, "target_tables": target_tables}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/export")
|
|
||||||
async def export_unmapped_csv(
|
|
||||||
file_type: Optional[str] = Query(None, description="Filter by CSV file type (e.g., FILES.csv)"),
|
|
||||||
target_table: Optional[str] = Query(None, description="Filter by target model table name"),
|
|
||||||
has_keys: Optional[List[str]] = Query(
|
|
||||||
None,
|
|
||||||
description="Filter rows where extra_data (or its 'unmapped' payload) contains these keys. Repeat param for multiple keys.",
|
|
||||||
),
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
current_user=Depends(get_admin_user),
|
|
||||||
):
|
|
||||||
"""Export unmapped rows as CSV for review. Includes basic metadata columns and unmapped fields.
|
|
||||||
|
|
||||||
If FlexibleImport.extra_data contains a nested 'unmapped' dict, those keys are exported.
|
|
||||||
Otherwise, all keys of extra_data are exported.
|
|
||||||
"""
|
|
||||||
query = db.query(FlexibleImport)
|
|
||||||
if file_type:
|
|
||||||
query = query.filter(FlexibleImport.file_type == file_type)
|
|
||||||
if target_table:
|
|
||||||
query = query.filter(FlexibleImport.target_table == target_table)
|
|
||||||
if has_keys:
|
|
||||||
for k in [k for k in has_keys if k is not None and str(k).strip() != ""]:
|
|
||||||
key = str(k).strip()
|
|
||||||
query = query.filter(cast(FlexibleImport.extra_data, String).ilike(f'%"{key}":%'))
|
|
||||||
|
|
||||||
rows: List[FlexibleImport] = query.order_by(FlexibleImport.id.asc()).all()
|
|
||||||
if not rows:
|
|
||||||
raise HTTPException(status_code=404, detail="No matching flexible imports to export")
|
|
||||||
|
|
||||||
# Determine union of unmapped keys across all rows
|
|
||||||
unmapped_keys: List[str] = []
|
|
||||||
key_set = set()
|
|
||||||
for r in rows:
|
|
||||||
data = r.extra_data or {}
|
|
||||||
payload = data.get("unmapped") if isinstance(data, dict) and isinstance(data.get("unmapped"), dict) else data
|
|
||||||
if isinstance(payload, dict):
|
|
||||||
for k in payload.keys():
|
|
||||||
if k not in key_set:
|
|
||||||
key_set.add(k)
|
|
||||||
unmapped_keys.append(k)
|
|
||||||
|
|
||||||
# Prepare CSV
|
|
||||||
meta_headers = [
|
|
||||||
"id",
|
|
||||||
"file_type",
|
|
||||||
"target_table",
|
|
||||||
"primary_key_field",
|
|
||||||
"primary_key_value",
|
|
||||||
]
|
|
||||||
fieldnames = meta_headers + unmapped_keys
|
|
||||||
|
|
||||||
output = io.StringIO()
|
|
||||||
writer = csv.DictWriter(output, fieldnames=fieldnames)
|
|
||||||
writer.writeheader()
|
|
||||||
|
|
||||||
for r in rows:
|
|
||||||
row_out: Dict[str, Any] = {
|
|
||||||
"id": r.id,
|
|
||||||
"file_type": r.file_type,
|
|
||||||
"target_table": r.target_table or "",
|
|
||||||
"primary_key_field": r.primary_key_field or "",
|
|
||||||
"primary_key_value": r.primary_key_value or "",
|
|
||||||
}
|
|
||||||
data = r.extra_data or {}
|
|
||||||
payload = data.get("unmapped") if isinstance(data, dict) and isinstance(data.get("unmapped"), dict) else data
|
|
||||||
if isinstance(payload, dict):
|
|
||||||
for k in unmapped_keys:
|
|
||||||
v = payload.get(k)
|
|
||||||
# Normalize lists/dicts to JSON strings for CSV safety
|
|
||||||
if isinstance(v, (dict, list)):
|
|
||||||
try:
|
|
||||||
import json as _json
|
|
||||||
row_out[k] = _json.dumps(v, ensure_ascii=False)
|
|
||||||
except Exception:
|
|
||||||
row_out[k] = str(v)
|
|
||||||
else:
|
|
||||||
row_out[k] = v if v is not None else ""
|
|
||||||
writer.writerow(row_out)
|
|
||||||
|
|
||||||
output.seek(0)
|
|
||||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
||||||
filename_parts = ["flexible_unmapped"]
|
|
||||||
if file_type:
|
|
||||||
filename_parts.append(file_type.replace("/", "-").replace(" ", "_"))
|
|
||||||
if target_table:
|
|
||||||
filename_parts.append(target_table.replace("/", "-").replace(" ", "_"))
|
|
||||||
filename = "_".join(filename_parts) + f"_{timestamp}.csv"
|
|
||||||
|
|
||||||
return StreamingResponse(
|
|
||||||
iter([output.getvalue()]),
|
|
||||||
media_type="text/csv",
|
|
||||||
headers={
|
|
||||||
"Content-Disposition": f"attachment; filename=\"{filename}\"",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/export/{row_id}")
|
|
||||||
async def export_single_row_csv(
|
|
||||||
row_id: int,
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
current_user=Depends(get_admin_user),
|
|
||||||
):
|
|
||||||
"""Export a single flexible import row as CSV.
|
|
||||||
|
|
||||||
Includes metadata columns plus keys from the row's unmapped payload.
|
|
||||||
If FlexibleImport.extra_data contains a nested 'unmapped' dict, those keys are exported;
|
|
||||||
otherwise, all keys of extra_data are exported.
|
|
||||||
"""
|
|
||||||
row: Optional[FlexibleImport] = (
|
|
||||||
db.query(FlexibleImport).filter(FlexibleImport.id == row_id).first()
|
|
||||||
)
|
|
||||||
if not row:
|
|
||||||
raise HTTPException(status_code=404, detail="Flexible import row not found")
|
|
||||||
|
|
||||||
data = row.extra_data or {}
|
|
||||||
payload = (
|
|
||||||
data.get("unmapped")
|
|
||||||
if isinstance(data, dict) and isinstance(data.get("unmapped"), dict)
|
|
||||||
else data
|
|
||||||
)
|
|
||||||
|
|
||||||
unmapped_keys: List[str] = []
|
|
||||||
if isinstance(payload, dict):
|
|
||||||
for k in payload.keys():
|
|
||||||
unmapped_keys.append(k)
|
|
||||||
|
|
||||||
meta_headers = [
|
|
||||||
"id",
|
|
||||||
"file_type",
|
|
||||||
"target_table",
|
|
||||||
"primary_key_field",
|
|
||||||
"primary_key_value",
|
|
||||||
]
|
|
||||||
fieldnames = meta_headers + unmapped_keys
|
|
||||||
|
|
||||||
output = io.StringIO()
|
|
||||||
writer = csv.DictWriter(output, fieldnames=fieldnames)
|
|
||||||
writer.writeheader()
|
|
||||||
|
|
||||||
row_out: Dict[str, Any] = {
|
|
||||||
"id": row.id,
|
|
||||||
"file_type": row.file_type,
|
|
||||||
"target_table": row.target_table or "",
|
|
||||||
"primary_key_field": row.primary_key_field or "",
|
|
||||||
"primary_key_value": row.primary_key_value or "",
|
|
||||||
}
|
|
||||||
if isinstance(payload, dict):
|
|
||||||
for k in unmapped_keys:
|
|
||||||
v = payload.get(k)
|
|
||||||
if isinstance(v, (dict, list)):
|
|
||||||
try:
|
|
||||||
import json as _json
|
|
||||||
row_out[k] = _json.dumps(v, ensure_ascii=False)
|
|
||||||
except Exception:
|
|
||||||
row_out[k] = str(v)
|
|
||||||
else:
|
|
||||||
row_out[k] = v if v is not None else ""
|
|
||||||
|
|
||||||
writer.writerow(row_out)
|
|
||||||
output.seek(0)
|
|
||||||
|
|
||||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
||||||
filename = (
|
|
||||||
f"flexible_row_{row.id}_{row.file_type.replace('/', '-').replace(' ', '_')}_{timestamp}.csv"
|
|
||||||
if row.file_type
|
|
||||||
else f"flexible_row_{row.id}_{timestamp}.csv"
|
|
||||||
)
|
|
||||||
|
|
||||||
return StreamingResponse(
|
|
||||||
iter([output.getvalue()]),
|
|
||||||
media_type="text/csv",
|
|
||||||
headers={
|
|
||||||
"Content-Disposition": f"attachment; filename=\"{filename}\"",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
771
app/api/import_csv.py
Normal file
771
app/api/import_csv.py
Normal file
@@ -0,0 +1,771 @@
|
|||||||
|
"""
|
||||||
|
CSV Import API Endpoints
|
||||||
|
"""
|
||||||
|
from fastapi import APIRouter, Depends, File, UploadFile, Form, HTTPException, BackgroundTasks, Body
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from typing import List, Optional, Dict, Any
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from app.database.base import get_db
|
||||||
|
from app.auth.security import get_admin_user
|
||||||
|
from app.models.user import User
|
||||||
|
from app.import_export.import_service import ImportService, TableType
|
||||||
|
from app.core.logging import get_logger
|
||||||
|
|
||||||
|
logger = get_logger("import_api")
|
||||||
|
router = APIRouter()
|
||||||
|
|
||||||
|
# In-memory storage for import progress (could be moved to Redis in production)
|
||||||
|
import_progress = {}
|
||||||
|
|
||||||
|
|
||||||
|
class ImportStatus:
|
||||||
|
"""Track import operation status"""
|
||||||
|
def __init__(self, import_id: str, table_name: str):
|
||||||
|
self.import_id = import_id
|
||||||
|
self.table_name = table_name
|
||||||
|
self.status = "PROCESSING"
|
||||||
|
self.started_at = datetime.utcnow()
|
||||||
|
self.completed_at = None
|
||||||
|
self.result = None
|
||||||
|
self.error = None
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/tables")
|
||||||
|
async def get_supported_tables(
|
||||||
|
current_user: User = Depends(get_admin_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Get list of supported tables for import"""
|
||||||
|
try:
|
||||||
|
service = ImportService(db)
|
||||||
|
tables = service.get_supported_tables()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"tables": tables,
|
||||||
|
"total": len(tables)
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting supported tables: {str(e)}")
|
||||||
|
raise HTTPException(status_code=500, detail="Failed to get supported tables")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/discover-files")
|
||||||
|
async def discover_csv_files(
|
||||||
|
current_user: User = Depends(get_admin_user)
|
||||||
|
):
|
||||||
|
"""Discover available CSV files in the old database directory"""
|
||||||
|
try:
|
||||||
|
import os
|
||||||
|
import glob
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Look for CSV files in the old database directory
|
||||||
|
base_dir = Path("old database/Office")
|
||||||
|
csv_files = []
|
||||||
|
|
||||||
|
if base_dir.exists():
|
||||||
|
# Find all CSV files
|
||||||
|
for csv_file in glob.glob(str(base_dir / "**/*.csv"), recursive=True):
|
||||||
|
file_path = Path(csv_file)
|
||||||
|
relative_path = file_path.relative_to(base_dir)
|
||||||
|
|
||||||
|
# Try to map to known table types
|
||||||
|
filename = file_path.stem.upper()
|
||||||
|
table_mapping = {
|
||||||
|
"ROLODEX": "rolodex",
|
||||||
|
"ROLEX_V": "rolodex", # ROLEX_V variant
|
||||||
|
"PHONE": "phone",
|
||||||
|
"FILES": "files",
|
||||||
|
"FILES_R": "files",
|
||||||
|
"FILES_V": "files",
|
||||||
|
"LEDGER": "ledger",
|
||||||
|
"QDROS": "qdros",
|
||||||
|
"PAYMENTS": "ledger",
|
||||||
|
"DEPOSITS": "ledger",
|
||||||
|
"EMPLOYEE": "employee",
|
||||||
|
"SETUP": "setup",
|
||||||
|
"FILETYPE": "filetype",
|
||||||
|
"TRNSTYPE": "trnstype",
|
||||||
|
"TRNSACTN": "trnsactn",
|
||||||
|
"TRNSLKUP": "trnslkup",
|
||||||
|
"PENSIONS": "pensions"
|
||||||
|
}
|
||||||
|
|
||||||
|
suggested_table = table_mapping.get(filename, "unknown")
|
||||||
|
|
||||||
|
csv_files.append({
|
||||||
|
"filename": file_path.name,
|
||||||
|
"path": str(relative_path),
|
||||||
|
"full_path": str(file_path),
|
||||||
|
"suggested_table": suggested_table,
|
||||||
|
"size": file_path.stat().st_size if file_path.exists() else 0
|
||||||
|
})
|
||||||
|
|
||||||
|
# Sort by filename
|
||||||
|
csv_files.sort(key=lambda x: x["filename"])
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"files": csv_files,
|
||||||
|
"total": len(csv_files),
|
||||||
|
"base_directory": str(base_dir)
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error discovering CSV files: {str(e)}")
|
||||||
|
raise HTTPException(status_code=500, detail="Failed to discover CSV files")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/tables/{table_name}/schema")
|
||||||
|
async def get_table_schema(
|
||||||
|
table_name: str,
|
||||||
|
current_user: User = Depends(get_admin_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Get schema information for a specific table"""
|
||||||
|
try:
|
||||||
|
service = ImportService(db)
|
||||||
|
schema = service.get_table_schema(table_name)
|
||||||
|
|
||||||
|
if not schema:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Table '{table_name}' not found")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"schema": schema
|
||||||
|
}
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting table schema for {table_name}: {str(e)}")
|
||||||
|
raise HTTPException(status_code=500, detail="Failed to get table schema")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/validate")
|
||||||
|
async def validate_csv_headers(
|
||||||
|
table_name: str = Form(...),
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
current_user: User = Depends(get_admin_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Validate CSV headers without importing data"""
|
||||||
|
try:
|
||||||
|
# Read file content with encoding detection
|
||||||
|
content = await file.read()
|
||||||
|
|
||||||
|
# Try multiple encodings
|
||||||
|
encodings = ['utf-8', 'windows-1252', 'iso-8859-1', 'cp1252', 'latin-1']
|
||||||
|
csv_content = None
|
||||||
|
used_encoding = None
|
||||||
|
|
||||||
|
for encoding in encodings:
|
||||||
|
try:
|
||||||
|
csv_content = content.decode(encoding)
|
||||||
|
used_encoding = encoding
|
||||||
|
break
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if csv_content is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="Could not decode file. Please ensure it's a valid text file."
|
||||||
|
)
|
||||||
|
|
||||||
|
service = ImportService(db)
|
||||||
|
result = service.validate_csv_headers(table_name, csv_content)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": result.success,
|
||||||
|
"table_name": table_name,
|
||||||
|
"filename": file.filename,
|
||||||
|
"validation_result": result.to_dict()
|
||||||
|
}
|
||||||
|
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
raise HTTPException(status_code=400, detail="Could not decode file. Please ensure it's a valid text file.")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error validating CSV headers: {str(e)}")
|
||||||
|
raise HTTPException(status_code=500, detail="Failed to validate CSV headers")
|
||||||
|
|
||||||
|
|
||||||
|
async def process_import_background(
|
||||||
|
import_id: str,
|
||||||
|
table_name: str,
|
||||||
|
csv_content: str,
|
||||||
|
db: Session
|
||||||
|
):
|
||||||
|
"""Background task to process CSV import"""
|
||||||
|
try:
|
||||||
|
logger.info(f"Starting background import {import_id} for table {table_name}")
|
||||||
|
print(f"[IMPORT] Starting background import {import_id} for table {table_name}")
|
||||||
|
|
||||||
|
service = ImportService(db)
|
||||||
|
result = service.import_csv(table_name, csv_content, import_id=import_id)
|
||||||
|
|
||||||
|
# Update progress
|
||||||
|
if import_id in import_progress:
|
||||||
|
progress = import_progress[import_id]
|
||||||
|
progress.status = "COMPLETED" if result.success else "FAILED"
|
||||||
|
progress.completed_at = datetime.utcnow()
|
||||||
|
progress.result = result
|
||||||
|
|
||||||
|
logger.info(f"Import {import_id} completed with {result.imported_rows} rows imported")
|
||||||
|
print(f"[IMPORT] Import {import_id} completed: success={result.success}, rows={result.imported_rows}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Background import {import_id} failed: {str(e)}")
|
||||||
|
print(f"[IMPORT] Background import {import_id} failed: {str(e)}")
|
||||||
|
if import_id in import_progress:
|
||||||
|
progress = import_progress[import_id]
|
||||||
|
progress.status = "FAILED"
|
||||||
|
progress.completed_at = datetime.utcnow()
|
||||||
|
progress.error = str(e)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/csv")
|
||||||
|
async def import_csv_file(
|
||||||
|
background_tasks: BackgroundTasks,
|
||||||
|
table_name: str = Form(...),
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
current_user: User = Depends(get_admin_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Import CSV file to specified table"""
|
||||||
|
try:
|
||||||
|
logger.info(f"Received CSV import request: table={table_name}, file={file.filename}, user={current_user.username}")
|
||||||
|
print(f"[IMPORT API] CSV import request: table={table_name}, file={file.filename}")
|
||||||
|
|
||||||
|
# Validate table name
|
||||||
|
if table_name.lower() not in [t.value for t in TableType]:
|
||||||
|
print(f"[IMPORT API] Invalid table name: {table_name}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Unsupported table: {table_name}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Validate file type
|
||||||
|
if not file.filename.lower().endswith('.csv'):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="File must be a CSV file"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Read file content with encoding detection
|
||||||
|
content = await file.read()
|
||||||
|
|
||||||
|
# Try multiple encodings
|
||||||
|
encodings = ['utf-8', 'windows-1252', 'iso-8859-1', 'cp1252', 'latin-1']
|
||||||
|
csv_content = None
|
||||||
|
used_encoding = None
|
||||||
|
|
||||||
|
for encoding in encodings:
|
||||||
|
try:
|
||||||
|
csv_content = content.decode(encoding)
|
||||||
|
used_encoding = encoding
|
||||||
|
break
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if csv_content is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="Could not decode file. Please ensure it's a valid text file."
|
||||||
|
)
|
||||||
|
|
||||||
|
if not csv_content.strip():
|
||||||
|
raise HTTPException(status_code=400, detail="File is empty")
|
||||||
|
|
||||||
|
# Generate import ID
|
||||||
|
import_id = str(uuid.uuid4())
|
||||||
|
print(f"[IMPORT API] Generated import ID: {import_id}")
|
||||||
|
|
||||||
|
# Create progress tracker
|
||||||
|
progress = ImportStatus(import_id, table_name)
|
||||||
|
import_progress[import_id] = progress
|
||||||
|
|
||||||
|
# Start background import
|
||||||
|
background_tasks.add_task(
|
||||||
|
process_import_background,
|
||||||
|
import_id,
|
||||||
|
table_name,
|
||||||
|
csv_content,
|
||||||
|
db
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Started CSV import {import_id} for table {table_name}")
|
||||||
|
print(f"[IMPORT API] Background task queued for import {import_id}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"import_id": import_id,
|
||||||
|
"table_name": table_name,
|
||||||
|
"filename": file.filename,
|
||||||
|
"status": "PROCESSING",
|
||||||
|
"message": "Import started successfully"
|
||||||
|
}
|
||||||
|
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
raise HTTPException(status_code=400, detail="Could not decode file. Please ensure it's a valid text file.")
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error starting CSV import: {str(e)}")
|
||||||
|
raise HTTPException(status_code=500, detail="Failed to start import")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/status/{import_id}")
|
||||||
|
async def get_import_status(
|
||||||
|
import_id: str,
|
||||||
|
current_user: User = Depends(get_admin_user)
|
||||||
|
):
|
||||||
|
"""Get status of an import operation"""
|
||||||
|
try:
|
||||||
|
if import_id not in import_progress:
|
||||||
|
raise HTTPException(status_code=404, detail="Import not found")
|
||||||
|
|
||||||
|
progress = import_progress[import_id]
|
||||||
|
|
||||||
|
response = {
|
||||||
|
"import_id": import_id,
|
||||||
|
"table_name": progress.table_name,
|
||||||
|
"status": progress.status,
|
||||||
|
"started_at": progress.started_at.isoformat(),
|
||||||
|
"completed_at": progress.completed_at.isoformat() if progress.completed_at else None
|
||||||
|
}
|
||||||
|
|
||||||
|
if progress.result:
|
||||||
|
response["result"] = progress.result.to_dict()
|
||||||
|
# Also include error details if the import failed
|
||||||
|
if not progress.result.success and progress.result.errors:
|
||||||
|
response["error"] = "; ".join(progress.result.errors[:3])
|
||||||
|
elif progress.error:
|
||||||
|
response["error"] = progress.error
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting import status: {str(e)}")
|
||||||
|
raise HTTPException(status_code=500, detail="Failed to get import status")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/batch")
|
||||||
|
async def batch_import_csv(
|
||||||
|
background_tasks: BackgroundTasks,
|
||||||
|
files: List[UploadFile] = File(...),
|
||||||
|
table_names: List[str] = Form(...),
|
||||||
|
current_user: User = Depends(get_admin_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Import multiple CSV files in batch"""
|
||||||
|
try:
|
||||||
|
if len(files) != len(table_names):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="Number of files must match number of table names"
|
||||||
|
)
|
||||||
|
|
||||||
|
imports = []
|
||||||
|
import_ids = []
|
||||||
|
|
||||||
|
for i, (file, table_name) in enumerate(zip(files, table_names)):
|
||||||
|
# Validate table name
|
||||||
|
if table_name.lower() not in [t.value for t in TableType]:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Unsupported table: {table_name}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Validate file type
|
||||||
|
if not file.filename.lower().endswith('.csv'):
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"File {file.filename} must be a CSV file"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Read file content with encoding detection
|
||||||
|
content = await file.read()
|
||||||
|
|
||||||
|
# Try multiple encodings
|
||||||
|
encodings = ['utf-8', 'windows-1252', 'iso-8859-1', 'cp1252', 'latin-1']
|
||||||
|
csv_content = None
|
||||||
|
used_encoding = None
|
||||||
|
|
||||||
|
for encoding in encodings:
|
||||||
|
try:
|
||||||
|
csv_content = content.decode(encoding)
|
||||||
|
used_encoding = encoding
|
||||||
|
break
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if csv_content is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="Could not decode file. Please ensure it's a valid text file."
|
||||||
|
)
|
||||||
|
|
||||||
|
if not csv_content.strip():
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"File {file.filename} is empty"
|
||||||
|
)
|
||||||
|
|
||||||
|
imports.append({
|
||||||
|
"table_name": table_name,
|
||||||
|
"csv_content": csv_content,
|
||||||
|
"filename": file.filename
|
||||||
|
})
|
||||||
|
|
||||||
|
# Generate import ID for tracking
|
||||||
|
import_id = str(uuid.uuid4())
|
||||||
|
import_ids.append(import_id)
|
||||||
|
|
||||||
|
# Create progress tracker
|
||||||
|
progress = ImportStatus(import_id, table_name)
|
||||||
|
import_progress[import_id] = progress
|
||||||
|
|
||||||
|
# Process batch import in background
|
||||||
|
async def process_batch_background():
|
||||||
|
try:
|
||||||
|
service = ImportService(db)
|
||||||
|
results = service.batch_import(imports)
|
||||||
|
|
||||||
|
# Update progress for each import
|
||||||
|
for i, import_id in enumerate(import_ids):
|
||||||
|
if import_id in import_progress:
|
||||||
|
progress = import_progress[import_id]
|
||||||
|
table_name = progress.table_name
|
||||||
|
|
||||||
|
# Find result for this table
|
||||||
|
result = None
|
||||||
|
for key, res in results.items():
|
||||||
|
if key.startswith(table_name):
|
||||||
|
result = res
|
||||||
|
break
|
||||||
|
|
||||||
|
if result:
|
||||||
|
progress.status = "COMPLETED" if result.success else "FAILED"
|
||||||
|
progress.result = result
|
||||||
|
# If import failed, capture the error details
|
||||||
|
if not result.success and result.errors:
|
||||||
|
progress.error = "; ".join(result.errors[:3]) # Show first 3 errors
|
||||||
|
else:
|
||||||
|
progress.status = "FAILED"
|
||||||
|
progress.error = "No result found"
|
||||||
|
|
||||||
|
progress.completed_at = datetime.utcnow()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Batch import failed: {str(e)}")
|
||||||
|
for import_id in import_ids:
|
||||||
|
if import_id in import_progress:
|
||||||
|
progress = import_progress[import_id]
|
||||||
|
progress.status = "FAILED"
|
||||||
|
progress.error = str(e)
|
||||||
|
progress.completed_at = datetime.utcnow()
|
||||||
|
|
||||||
|
background_tasks.add_task(process_batch_background)
|
||||||
|
|
||||||
|
logger.info(f"Started batch import with {len(files)} files")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"import_ids": import_ids,
|
||||||
|
"total_files": len(files),
|
||||||
|
"status": "PROCESSING",
|
||||||
|
"message": "Batch import started successfully"
|
||||||
|
}
|
||||||
|
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid file encoding. Please use UTF-8.")
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error starting batch import: {str(e)}")
|
||||||
|
raise HTTPException(status_code=500, detail="Failed to start batch import")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/batch-from-files")
|
||||||
|
async def batch_import_from_files(
|
||||||
|
background_tasks: BackgroundTasks,
|
||||||
|
file_mappings: List[Dict[str, str]] = Body(...),
|
||||||
|
current_user: User = Depends(get_admin_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Import multiple CSV files from discovered file paths"""
|
||||||
|
try:
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
if not file_mappings:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="No file mappings provided"
|
||||||
|
)
|
||||||
|
|
||||||
|
imports = []
|
||||||
|
import_ids = []
|
||||||
|
|
||||||
|
for mapping in file_mappings:
|
||||||
|
file_path = mapping.get("file_path")
|
||||||
|
table_name = mapping.get("table_name")
|
||||||
|
|
||||||
|
if not file_path or not table_name:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail="Each mapping must have file_path and table_name"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Validate table name
|
||||||
|
if table_name.lower() not in [t.value for t in TableType]:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Unsupported table: {table_name}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Read file content
|
||||||
|
full_path = Path(file_path)
|
||||||
|
if not full_path.exists():
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"File not found: {file_path}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Read file content with encoding detection
|
||||||
|
encodings = ['utf-8', 'windows-1252', 'iso-8859-1', 'cp1252', 'latin-1']
|
||||||
|
csv_content = None
|
||||||
|
used_encoding = None
|
||||||
|
|
||||||
|
for encoding in encodings:
|
||||||
|
try:
|
||||||
|
with open(full_path, 'r', encoding=encoding) as f:
|
||||||
|
csv_content = f.read()
|
||||||
|
used_encoding = encoding
|
||||||
|
break
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if csv_content is None:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"Could not decode file {file_path}. Please ensure it's a valid text file."
|
||||||
|
)
|
||||||
|
|
||||||
|
if not csv_content.strip():
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"File {file_path} is empty"
|
||||||
|
)
|
||||||
|
|
||||||
|
imports.append({
|
||||||
|
"table_name": table_name,
|
||||||
|
"csv_content": csv_content,
|
||||||
|
"filename": full_path.name
|
||||||
|
})
|
||||||
|
|
||||||
|
# Generate import ID for tracking
|
||||||
|
import_id = str(uuid.uuid4())
|
||||||
|
import_ids.append(import_id)
|
||||||
|
|
||||||
|
# Create progress tracker
|
||||||
|
progress = ImportStatus(import_id, table_name)
|
||||||
|
import_progress[import_id] = progress
|
||||||
|
|
||||||
|
# Process batch import in background
|
||||||
|
async def process_batch_background():
|
||||||
|
try:
|
||||||
|
service = ImportService(db)
|
||||||
|
results = service.batch_import(imports)
|
||||||
|
|
||||||
|
# Update progress for each import
|
||||||
|
for i, import_id in enumerate(import_ids):
|
||||||
|
if import_id in import_progress:
|
||||||
|
progress = import_progress[import_id]
|
||||||
|
table_name = progress.table_name
|
||||||
|
|
||||||
|
# Find result for this table
|
||||||
|
result = None
|
||||||
|
for key, res in results.items():
|
||||||
|
if key.startswith(table_name):
|
||||||
|
result = res
|
||||||
|
break
|
||||||
|
|
||||||
|
if result:
|
||||||
|
progress.status = "COMPLETED" if result.success else "FAILED"
|
||||||
|
progress.result = result
|
||||||
|
# If import failed, capture the error details
|
||||||
|
if not result.success and result.errors:
|
||||||
|
progress.error = "; ".join(result.errors[:3]) # Show first 3 errors
|
||||||
|
else:
|
||||||
|
progress.status = "FAILED"
|
||||||
|
progress.error = "No result found"
|
||||||
|
|
||||||
|
progress.completed_at = datetime.utcnow()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Batch import failed: {str(e)}")
|
||||||
|
for import_id in import_ids:
|
||||||
|
if import_id in import_progress:
|
||||||
|
progress = import_progress[import_id]
|
||||||
|
progress.status = "FAILED"
|
||||||
|
progress.error = str(e)
|
||||||
|
progress.completed_at = datetime.utcnow()
|
||||||
|
|
||||||
|
background_tasks.add_task(process_batch_background)
|
||||||
|
|
||||||
|
logger.info(f"Started batch import from files with {len(imports)} files")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"import_ids": import_ids,
|
||||||
|
"total_files": len(imports),
|
||||||
|
"status": "PROCESSING",
|
||||||
|
"message": "Batch import from files started successfully"
|
||||||
|
}
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error starting batch import from files: {str(e)}")
|
||||||
|
raise HTTPException(status_code=500, detail="Failed to start batch import")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/status")
|
||||||
|
async def get_import_status_overview(
|
||||||
|
current_user: User = Depends(get_admin_user),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
):
|
||||||
|
"""Get overview of which tables have been successfully imported"""
|
||||||
|
try:
|
||||||
|
from sqlalchemy import inspect, text
|
||||||
|
|
||||||
|
# Get list of all tables in the database
|
||||||
|
inspector = inspect(db.bind)
|
||||||
|
existing_tables = inspector.get_table_names()
|
||||||
|
|
||||||
|
# Define all possible table types and their display names
|
||||||
|
table_info = {
|
||||||
|
"rolodex": {"name": "ROLODEX (Contacts)", "category": "Core", "expected_files": ["ROLODEX.csv", "ROLEX_V.csv"]},
|
||||||
|
"phone": {"name": "PHONE (Phone Numbers)", "category": "Core", "expected_files": ["PHONE.csv"]},
|
||||||
|
"files": {"name": "FILES (Case Files)", "category": "Core", "expected_files": ["FILES.csv", "FILES_R.csv", "FILES_V.csv"]},
|
||||||
|
"ledger": {"name": "LEDGER (Financial)", "category": "Core", "expected_files": ["LEDGER.csv"]},
|
||||||
|
"qdros": {"name": "QDROS (Documents)", "category": "Core", "expected_files": ["QDROS.csv"]},
|
||||||
|
"gruplkup": {"name": "GRUPLKUP", "category": "Lookup", "expected_files": ["GRUPLKUP.csv"]},
|
||||||
|
"employee": {"name": "EMPLOYEE", "category": "Lookup", "expected_files": ["EMPLOYEE.csv"]},
|
||||||
|
"filetype": {"name": "FILETYPE", "category": "Lookup", "expected_files": ["FILETYPE.csv"]},
|
||||||
|
"trnstype": {"name": "TRNSTYPE", "category": "Lookup", "expected_files": ["TRNSTYPE.csv"]},
|
||||||
|
"trnslkup": {"name": "TRNSLKUP", "category": "Lookup", "expected_files": ["TRNSLKUP.csv"]},
|
||||||
|
"rvarlkup": {"name": "RVARLKUP", "category": "Lookup", "expected_files": ["RVARLKUP.csv"]},
|
||||||
|
"fvarlkup": {"name": "FVARLKUP", "category": "Lookup", "expected_files": ["FVARLKUP.csv"]},
|
||||||
|
"filenots": {"name": "FILENOTS", "category": "Lookup", "expected_files": ["FILENOTS.csv"]},
|
||||||
|
"planinfo": {"name": "PLANINFO", "category": "Lookup", "expected_files": ["PLANINFO.csv"]},
|
||||||
|
"setup": {"name": "SETUP", "category": "Configuration", "expected_files": ["SETUP.csv"]},
|
||||||
|
"deposits": {"name": "DEPOSITS", "category": "Financial", "expected_files": ["DEPOSITS.csv"]},
|
||||||
|
"payments": {"name": "PAYMENTS", "category": "Financial", "expected_files": ["PAYMENTS.csv"]},
|
||||||
|
"trnsactn": {"name": "TRNSACTN", "category": "Financial", "expected_files": ["TRNSACTN.csv"]},
|
||||||
|
"pensions": {"name": "PENSIONS", "category": "Pension", "expected_files": ["PENSIONS.csv"]},
|
||||||
|
"marriage": {"name": "MARRIAGE", "category": "Pension", "expected_files": ["MARRIAGE.csv"]},
|
||||||
|
"death": {"name": "DEATH", "category": "Pension", "expected_files": ["DEATH.csv"]},
|
||||||
|
"separate": {"name": "SEPARATE", "category": "Pension", "expected_files": ["SEPARATE.csv"]},
|
||||||
|
"schedule": {"name": "SCHEDULE", "category": "Pension", "expected_files": ["SCHEDULE.csv"]},
|
||||||
|
"numberal": {"name": "NUMBERAL", "category": "Forms", "expected_files": ["NUMBERAL.csv"]},
|
||||||
|
"inx_lkup": {"name": "INX_LKUP", "category": "Forms", "expected_files": ["INX_LKUP.csv"]},
|
||||||
|
"form_lst": {"name": "FORM_LST", "category": "Forms", "expected_files": ["FORM_LST.csv"]},
|
||||||
|
"form_inx": {"name": "FORM_INX", "category": "Forms", "expected_files": ["FORM_INX.csv"]},
|
||||||
|
"lifetabl": {"name": "LIFETABL", "category": "Forms", "expected_files": ["LIFETABL.csv"]}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check status of each table
|
||||||
|
table_status = []
|
||||||
|
for table_name, info in table_info.items():
|
||||||
|
status = {
|
||||||
|
"table_name": table_name,
|
||||||
|
"display_name": info["name"],
|
||||||
|
"category": info["category"],
|
||||||
|
"expected_files": info["expected_files"],
|
||||||
|
"exists": table_name in existing_tables,
|
||||||
|
"row_count": 0,
|
||||||
|
"imported": False
|
||||||
|
}
|
||||||
|
|
||||||
|
if status["exists"]:
|
||||||
|
try:
|
||||||
|
# Get row count
|
||||||
|
result = db.execute(text(f"SELECT COUNT(*) FROM {table_name}"))
|
||||||
|
status["row_count"] = result.scalar()
|
||||||
|
status["imported"] = status["row_count"] > 0
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Could not get row count for {table_name}: {e}")
|
||||||
|
status["row_count"] = -1 # Error getting count
|
||||||
|
|
||||||
|
table_status.append(status)
|
||||||
|
|
||||||
|
# Group by category
|
||||||
|
categories = {}
|
||||||
|
for status in table_status:
|
||||||
|
category = status["category"]
|
||||||
|
if category not in categories:
|
||||||
|
categories[category] = []
|
||||||
|
categories[category].append(status)
|
||||||
|
|
||||||
|
# Calculate summary stats
|
||||||
|
total_tables = len(table_status)
|
||||||
|
imported_tables = len([s for s in table_status if s["imported"]])
|
||||||
|
total_rows = sum(s["row_count"] for s in table_status if s["row_count"] > 0)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"summary": {
|
||||||
|
"total_tables": total_tables,
|
||||||
|
"imported_tables": imported_tables,
|
||||||
|
"empty_tables": len([s for s in table_status if s["exists"] and s["row_count"] == 0]),
|
||||||
|
"missing_tables": len([s for s in table_status if not s["exists"]]),
|
||||||
|
"total_rows": total_rows,
|
||||||
|
"completion_percentage": round((imported_tables / total_tables) * 100, 1) if total_tables > 0 else 0
|
||||||
|
},
|
||||||
|
"categories": categories,
|
||||||
|
"tables": table_status
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error getting import status overview: {str(e)}")
|
||||||
|
raise HTTPException(status_code=500, detail="Failed to get import status")
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete("/progress")
|
||||||
|
async def cleanup_import_progress(
|
||||||
|
current_user: User = Depends(get_admin_user)
|
||||||
|
):
|
||||||
|
"""Clean up completed import progress records"""
|
||||||
|
try:
|
||||||
|
completed_count = 0
|
||||||
|
to_remove = []
|
||||||
|
|
||||||
|
for import_id, progress in import_progress.items():
|
||||||
|
if progress.status in ["COMPLETED", "FAILED"]:
|
||||||
|
# Remove progress older than 1 hour
|
||||||
|
if progress.completed_at:
|
||||||
|
age = datetime.utcnow() - progress.completed_at
|
||||||
|
if age.total_seconds() > 3600: # 1 hour
|
||||||
|
to_remove.append(import_id)
|
||||||
|
completed_count += 1
|
||||||
|
|
||||||
|
for import_id in to_remove:
|
||||||
|
del import_progress[import_id]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"cleaned_up": completed_count,
|
||||||
|
"remaining": len(import_progress)
|
||||||
|
}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error cleaning up import progress: {str(e)}")
|
||||||
|
raise HTTPException(status_code=500, detail="Failed to cleanup progress")
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -20,6 +20,7 @@ pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
|||||||
|
|
||||||
# JWT Security
|
# JWT Security
|
||||||
security = HTTPBearer()
|
security = HTTPBearer()
|
||||||
|
optional_security = HTTPBearer(auto_error=False)
|
||||||
|
|
||||||
|
|
||||||
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||||
@@ -190,6 +191,31 @@ def get_current_user(
|
|||||||
return user
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
def get_optional_current_user(
|
||||||
|
credentials: Optional[HTTPAuthorizationCredentials] = Depends(optional_security),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
) -> Optional[User]:
|
||||||
|
"""Get current authenticated user, but allow None if not authenticated"""
|
||||||
|
if not credentials:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
token = credentials.credentials
|
||||||
|
username = verify_token(token)
|
||||||
|
|
||||||
|
if username is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
user = db.query(User).filter(User.username == username).first()
|
||||||
|
if user is None or not user.is_active:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return user
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_admin_user(current_user: User = Depends(get_current_user)) -> User:
|
def get_admin_user(current_user: User = Depends(get_current_user)) -> User:
|
||||||
"""Require admin privileges"""
|
"""Require admin privileges"""
|
||||||
if not current_user.is_admin:
|
if not current_user.is_admin:
|
||||||
|
|||||||
@@ -9,7 +9,15 @@ from app.config import settings
|
|||||||
|
|
||||||
engine = create_engine(
|
engine = create_engine(
|
||||||
settings.database_url,
|
settings.database_url,
|
||||||
connect_args={"check_same_thread": False} if "sqlite" in settings.database_url else {}
|
connect_args={
|
||||||
|
"check_same_thread": False,
|
||||||
|
# SQLite performance optimizations for bulk imports
|
||||||
|
"timeout": 30,
|
||||||
|
} if "sqlite" in settings.database_url else {},
|
||||||
|
# Performance settings for bulk operations
|
||||||
|
pool_pre_ping=True,
|
||||||
|
pool_recycle=3600, # Recycle connections after 1 hour
|
||||||
|
echo=False # Set to True for SQL debugging
|
||||||
)
|
)
|
||||||
|
|
||||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
|
|||||||
6
app/import_export/__init__.py
Normal file
6
app/import_export/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
"""
|
||||||
|
Import/Export module for Delphi Database System
|
||||||
|
|
||||||
|
This module provides clean, modular CSV import functionality
|
||||||
|
for all database tables.
|
||||||
|
"""
|
||||||
306
app/import_export/base.py
Normal file
306
app/import_export/base.py
Normal file
@@ -0,0 +1,306 @@
|
|||||||
|
"""
|
||||||
|
Base classes for CSV import functionality
|
||||||
|
"""
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Dict, List, Any, Optional, Tuple
|
||||||
|
import csv
|
||||||
|
import io
|
||||||
|
from datetime import datetime, date
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from sqlalchemy.exc import IntegrityError, SQLAlchemyError
|
||||||
|
|
||||||
|
from .logging_config import create_import_logger, ImportMetrics
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ImportResult:
|
||||||
|
"""Container for import operation results"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.success = False
|
||||||
|
self.total_rows = 0
|
||||||
|
self.imported_rows = 0
|
||||||
|
self.skipped_rows = 0
|
||||||
|
self.error_rows = 0
|
||||||
|
self.errors: List[str] = []
|
||||||
|
self.warnings: List[str] = []
|
||||||
|
self.import_id = None
|
||||||
|
|
||||||
|
def add_error(self, error: str):
|
||||||
|
"""Add an error message"""
|
||||||
|
self.errors.append(error)
|
||||||
|
self.error_rows += 1
|
||||||
|
|
||||||
|
def add_warning(self, warning: str):
|
||||||
|
"""Add a warning message"""
|
||||||
|
self.warnings.append(warning)
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
|
"""Convert result to dictionary for JSON response"""
|
||||||
|
return {
|
||||||
|
"success": self.success,
|
||||||
|
"total_rows": self.total_rows,
|
||||||
|
"imported_rows": self.imported_rows,
|
||||||
|
"skipped_rows": self.skipped_rows,
|
||||||
|
"error_rows": self.error_rows,
|
||||||
|
"errors": self.errors,
|
||||||
|
"warnings": self.warnings,
|
||||||
|
"import_id": self.import_id
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class BaseCSVImporter(ABC):
|
||||||
|
"""Abstract base class for all CSV importers"""
|
||||||
|
|
||||||
|
def __init__(self, db_session: Session, import_id: Optional[str] = None):
|
||||||
|
self.db_session = db_session
|
||||||
|
self.result = ImportResult()
|
||||||
|
self.import_id = import_id or str(uuid.uuid4())
|
||||||
|
self.result.import_id = self.import_id
|
||||||
|
self.import_logger = create_import_logger(self.import_id, self.table_name)
|
||||||
|
self.metrics = ImportMetrics()
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def table_name(self) -> str:
|
||||||
|
"""Name of the database table being imported to"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def required_fields(self) -> List[str]:
|
||||||
|
"""List of required field names"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def field_mapping(self) -> Dict[str, str]:
|
||||||
|
"""Mapping from CSV headers to database field names"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def create_model_instance(self, row_data: Dict[str, Any]) -> Any:
|
||||||
|
"""Create a model instance from processed row data"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def parse_date(self, date_str: str) -> Optional[date]:
|
||||||
|
"""Parse date string to date object"""
|
||||||
|
if not date_str or date_str.strip() == "":
|
||||||
|
return None
|
||||||
|
|
||||||
|
date_str = date_str.strip()
|
||||||
|
|
||||||
|
# Try common date formats
|
||||||
|
formats = [
|
||||||
|
"%Y-%m-%d", # ISO format
|
||||||
|
"%m/%d/%Y", # US format
|
||||||
|
"%m/%d/%y", # US format 2-digit year
|
||||||
|
"%d/%m/%Y", # European format
|
||||||
|
"%Y%m%d", # Compact format
|
||||||
|
]
|
||||||
|
|
||||||
|
for fmt in formats:
|
||||||
|
try:
|
||||||
|
return datetime.strptime(date_str, fmt).date()
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
raise ValueError(f"Unable to parse date: {date_str}")
|
||||||
|
|
||||||
|
def parse_float(self, value_str: str) -> float:
|
||||||
|
"""Parse string to float, handling empty values"""
|
||||||
|
if not value_str or value_str.strip() == "":
|
||||||
|
return 0.0
|
||||||
|
value_str = value_str.strip().replace(",", "") # Remove commas
|
||||||
|
try:
|
||||||
|
return float(value_str)
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError(f"Unable to parse float: {value_str}")
|
||||||
|
|
||||||
|
def parse_int(self, value_str: str) -> int:
|
||||||
|
"""Parse string to int, handling empty values"""
|
||||||
|
if not value_str or value_str.strip() == "":
|
||||||
|
return 0
|
||||||
|
value_str = value_str.strip().replace(",", "") # Remove commas
|
||||||
|
try:
|
||||||
|
return int(float(value_str)) # Handle "1.0" format
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError(f"Unable to parse integer: {value_str}")
|
||||||
|
|
||||||
|
def normalize_string(self, value: str, max_length: Optional[int] = None) -> str:
|
||||||
|
"""Normalize string value"""
|
||||||
|
if not value:
|
||||||
|
return ""
|
||||||
|
value = str(value).strip()
|
||||||
|
if max_length and len(value) > max_length:
|
||||||
|
self.result.add_warning(f"String truncated from {len(value)} to {max_length} characters: {value[:50]}...")
|
||||||
|
value = value[:max_length]
|
||||||
|
return value
|
||||||
|
|
||||||
|
def detect_delimiter(self, csv_content: str) -> str:
|
||||||
|
"""Auto-detect CSV delimiter"""
|
||||||
|
sample = csv_content[:1024] # Check first 1KB
|
||||||
|
sniffer = csv.Sniffer()
|
||||||
|
try:
|
||||||
|
dialect = sniffer.sniff(sample, delimiters=",;\t|")
|
||||||
|
return dialect.delimiter
|
||||||
|
except:
|
||||||
|
return "," # Default to comma
|
||||||
|
|
||||||
|
def validate_headers(self, headers: List[str]) -> bool:
|
||||||
|
"""Validate that required headers are present"""
|
||||||
|
missing_required = []
|
||||||
|
|
||||||
|
# Create case-insensitive mapping of headers
|
||||||
|
header_map = {h.lower().strip(): h for h in headers}
|
||||||
|
|
||||||
|
for required_field in self.required_fields:
|
||||||
|
# Check direct match first
|
||||||
|
if required_field in headers:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check if there's a mapping for this field
|
||||||
|
mapped_name = self.field_mapping.get(required_field, required_field)
|
||||||
|
if mapped_name.lower() in header_map:
|
||||||
|
continue
|
||||||
|
|
||||||
|
missing_required.append(required_field)
|
||||||
|
|
||||||
|
if missing_required:
|
||||||
|
self.result.add_error(f"Missing required columns: {', '.join(missing_required)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def map_row_data(self, row: Dict[str, str], headers: List[str]) -> Dict[str, Any]:
|
||||||
|
"""Map CSV row data to database field names"""
|
||||||
|
mapped_data = {}
|
||||||
|
|
||||||
|
# Create case-insensitive lookup
|
||||||
|
row_lookup = {k.lower().strip(): v for k, v in row.items() if k}
|
||||||
|
|
||||||
|
for db_field, csv_field in self.field_mapping.items():
|
||||||
|
csv_field_lower = csv_field.lower().strip()
|
||||||
|
|
||||||
|
# Try exact match first
|
||||||
|
if csv_field in row:
|
||||||
|
mapped_data[db_field] = row[csv_field]
|
||||||
|
# Try case-insensitive match
|
||||||
|
elif csv_field_lower in row_lookup:
|
||||||
|
mapped_data[db_field] = row_lookup[csv_field_lower]
|
||||||
|
else:
|
||||||
|
mapped_data[db_field] = ""
|
||||||
|
|
||||||
|
return mapped_data
|
||||||
|
|
||||||
|
def process_csv_content(self, csv_content: str, encoding: str = "utf-8") -> ImportResult:
|
||||||
|
"""Process CSV content and import data"""
|
||||||
|
self.import_logger.info(f"Starting CSV import for {self.table_name}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Detect delimiter
|
||||||
|
delimiter = self.detect_delimiter(csv_content)
|
||||||
|
self.import_logger.debug(f"Detected CSV delimiter: '{delimiter}'")
|
||||||
|
|
||||||
|
# Parse CSV
|
||||||
|
csv_reader = csv.DictReader(
|
||||||
|
io.StringIO(csv_content),
|
||||||
|
delimiter=delimiter
|
||||||
|
)
|
||||||
|
|
||||||
|
headers = csv_reader.fieldnames or []
|
||||||
|
if not headers:
|
||||||
|
error_msg = "No headers found in CSV file"
|
||||||
|
self.result.add_error(error_msg)
|
||||||
|
self.import_logger.error(error_msg)
|
||||||
|
return self.result
|
||||||
|
|
||||||
|
self.import_logger.info(f"Found headers: {headers}")
|
||||||
|
|
||||||
|
# Validate headers
|
||||||
|
if not self.validate_headers(headers):
|
||||||
|
self.import_logger.error("Header validation failed")
|
||||||
|
return self.result
|
||||||
|
|
||||||
|
self.import_logger.info("Header validation passed")
|
||||||
|
|
||||||
|
# Process rows
|
||||||
|
imported_count = 0
|
||||||
|
total_count = 0
|
||||||
|
|
||||||
|
for row_num, row in enumerate(csv_reader, 1):
|
||||||
|
total_count += 1
|
||||||
|
self.metrics.total_rows = total_count
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Map CSV data to database fields
|
||||||
|
mapped_data = self.map_row_data(row, headers)
|
||||||
|
|
||||||
|
# Create model instance
|
||||||
|
model_instance = self.create_model_instance(mapped_data)
|
||||||
|
|
||||||
|
# Add to session
|
||||||
|
self.db_session.add(model_instance)
|
||||||
|
imported_count += 1
|
||||||
|
|
||||||
|
self.import_logger.log_row_processed(row_num, success=True)
|
||||||
|
self.metrics.record_row_processed(success=True)
|
||||||
|
|
||||||
|
except ImportValidationError as e:
|
||||||
|
error_msg = f"Row {row_num}: {str(e)}"
|
||||||
|
self.result.add_error(error_msg)
|
||||||
|
self.import_logger.log_row_processed(row_num, success=False)
|
||||||
|
self.import_logger.log_validation_error(row_num, "validation", row, str(e))
|
||||||
|
self.metrics.record_validation_error(row_num, str(e))
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"Row {row_num}: Unexpected error - {str(e)}"
|
||||||
|
self.result.add_error(error_msg)
|
||||||
|
self.import_logger.log_row_processed(row_num, success=False)
|
||||||
|
self.import_logger.error(error_msg, row_number=row_num, exception_type=type(e).__name__)
|
||||||
|
self.metrics.record_validation_error(row_num, str(e))
|
||||||
|
|
||||||
|
# Commit transaction
|
||||||
|
try:
|
||||||
|
self.db_session.commit()
|
||||||
|
self.result.success = True
|
||||||
|
self.result.imported_rows = imported_count
|
||||||
|
|
||||||
|
self.import_logger.info(f"Successfully committed {imported_count} rows to database")
|
||||||
|
logger.info(f"Successfully imported {imported_count} rows to {self.table_name}")
|
||||||
|
|
||||||
|
except (IntegrityError, SQLAlchemyError) as e:
|
||||||
|
self.db_session.rollback()
|
||||||
|
error_msg = f"Database error during commit: {str(e)}"
|
||||||
|
self.result.add_error(error_msg)
|
||||||
|
self.import_logger.error(error_msg)
|
||||||
|
self.metrics.record_database_error(str(e))
|
||||||
|
logger.error(f"Database error importing to {self.table_name}: {str(e)}")
|
||||||
|
|
||||||
|
self.result.total_rows = total_count
|
||||||
|
self.metrics.finalize()
|
||||||
|
|
||||||
|
# Log final summary
|
||||||
|
self.import_logger.log_import_summary(
|
||||||
|
total_count,
|
||||||
|
imported_count,
|
||||||
|
self.result.error_rows
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.db_session.rollback()
|
||||||
|
error_msg = f"Failed to process CSV: {str(e)}"
|
||||||
|
self.result.add_error(error_msg)
|
||||||
|
self.import_logger.error(error_msg, exception_type=type(e).__name__)
|
||||||
|
self.metrics.record_database_error(str(e))
|
||||||
|
logger.error(f"CSV processing error for {self.table_name}: {str(e)}")
|
||||||
|
|
||||||
|
return self.result
|
||||||
|
|
||||||
|
|
||||||
|
class ImportValidationError(Exception):
|
||||||
|
"""Exception raised for validation errors during import"""
|
||||||
|
pass
|
||||||
144
app/import_export/files_importer.py
Normal file
144
app/import_export/files_importer.py
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
"""
|
||||||
|
FILES CSV Importer
|
||||||
|
"""
|
||||||
|
from typing import Dict, List, Any
|
||||||
|
from datetime import date
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from .base import BaseCSVImporter, ImportValidationError
|
||||||
|
from app.models.files import File
|
||||||
|
from app.models.rolodex import Rolodex
|
||||||
|
|
||||||
|
|
||||||
|
class FilesCSVImporter(BaseCSVImporter):
|
||||||
|
"""CSV importer for FILES table"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def table_name(self) -> str:
|
||||||
|
return "files"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def required_fields(self) -> List[str]:
|
||||||
|
return ["file_no", "id", "empl_num", "file_type", "opened", "status", "rate_per_hour"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def field_mapping(self) -> Dict[str, str]:
|
||||||
|
"""Map CSV headers to database field names"""
|
||||||
|
return {
|
||||||
|
"file_no": "file_no",
|
||||||
|
"id": "id",
|
||||||
|
"regarding": "regarding",
|
||||||
|
"empl_num": "empl_num",
|
||||||
|
"file_type": "file_type",
|
||||||
|
"opened": "opened",
|
||||||
|
"closed": "closed",
|
||||||
|
"status": "status",
|
||||||
|
"footer_code": "footer_code",
|
||||||
|
"opposing": "opposing",
|
||||||
|
"rate_per_hour": "rate_per_hour",
|
||||||
|
# Financial balance fields (previously billed)
|
||||||
|
"trust_bal_p": "trust_bal_p",
|
||||||
|
"hours_p": "hours_p",
|
||||||
|
"hourly_fees_p": "hourly_fees_p",
|
||||||
|
"flat_fees_p": "flat_fees_p",
|
||||||
|
"disbursements_p": "disbursements_p",
|
||||||
|
"credit_bal_p": "credit_bal_p",
|
||||||
|
"total_charges_p": "total_charges_p",
|
||||||
|
"amount_owing_p": "amount_owing_p",
|
||||||
|
# Financial balance fields (current totals)
|
||||||
|
"trust_bal": "trust_bal",
|
||||||
|
"hours": "hours",
|
||||||
|
"hourly_fees": "hourly_fees",
|
||||||
|
"flat_fees": "flat_fees",
|
||||||
|
"disbursements": "disbursements",
|
||||||
|
"credit_bal": "credit_bal",
|
||||||
|
"total_charges": "total_charges",
|
||||||
|
"amount_owing": "amount_owing",
|
||||||
|
"transferable": "transferable",
|
||||||
|
"memo": "memo"
|
||||||
|
}
|
||||||
|
|
||||||
|
def create_model_instance(self, row_data: Dict[str, Any]) -> File:
|
||||||
|
"""Create a Files instance from processed row data"""
|
||||||
|
|
||||||
|
# Validate required fields
|
||||||
|
required_checks = [
|
||||||
|
("file_no", "File number"),
|
||||||
|
("id", "Rolodex ID"),
|
||||||
|
("empl_num", "Employee number"),
|
||||||
|
("file_type", "File type"),
|
||||||
|
("opened", "Opened date"),
|
||||||
|
("status", "Status"),
|
||||||
|
("rate_per_hour", "Rate per hour")
|
||||||
|
]
|
||||||
|
|
||||||
|
for field, display_name in required_checks:
|
||||||
|
if not row_data.get(field):
|
||||||
|
raise ImportValidationError(f"{display_name} is required")
|
||||||
|
|
||||||
|
# Check for duplicate file number
|
||||||
|
existing = self.db_session.query(File).filter_by(file_no=row_data["file_no"]).first()
|
||||||
|
if existing:
|
||||||
|
raise ImportValidationError(f"File number '{row_data['file_no']}' already exists")
|
||||||
|
|
||||||
|
# Validate foreign key exists (rolodex ID)
|
||||||
|
rolodex_exists = self.db_session.query(Rolodex).filter_by(id=row_data["id"]).first()
|
||||||
|
if not rolodex_exists:
|
||||||
|
raise ImportValidationError(f"Rolodex ID '{row_data['id']}' does not exist")
|
||||||
|
|
||||||
|
# Parse dates
|
||||||
|
opened_date = None
|
||||||
|
closed_date = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
opened_date = self.parse_date(row_data["opened"])
|
||||||
|
except ValueError as e:
|
||||||
|
raise ImportValidationError(f"Invalid opened date: {e}")
|
||||||
|
|
||||||
|
if row_data.get("closed"):
|
||||||
|
try:
|
||||||
|
closed_date = self.parse_date(row_data["closed"])
|
||||||
|
except ValueError as e:
|
||||||
|
raise ImportValidationError(f"Invalid closed date: {e}")
|
||||||
|
|
||||||
|
# Parse financial fields
|
||||||
|
try:
|
||||||
|
rate_per_hour = self.parse_float(row_data["rate_per_hour"])
|
||||||
|
if rate_per_hour < 0:
|
||||||
|
raise ImportValidationError("Rate per hour cannot be negative")
|
||||||
|
except ValueError as e:
|
||||||
|
raise ImportValidationError(f"Invalid rate per hour: {e}")
|
||||||
|
|
||||||
|
# Parse all financial balance fields
|
||||||
|
financial_fields = [
|
||||||
|
"trust_bal_p", "hours_p", "hourly_fees_p", "flat_fees_p",
|
||||||
|
"disbursements_p", "credit_bal_p", "total_charges_p", "amount_owing_p",
|
||||||
|
"trust_bal", "hours", "hourly_fees", "flat_fees",
|
||||||
|
"disbursements", "credit_bal", "total_charges", "amount_owing", "transferable"
|
||||||
|
]
|
||||||
|
|
||||||
|
financial_data = {}
|
||||||
|
for field in financial_fields:
|
||||||
|
try:
|
||||||
|
financial_data[field] = self.parse_float(row_data.get(field, "0"))
|
||||||
|
except ValueError as e:
|
||||||
|
raise ImportValidationError(f"Invalid {field}: {e}")
|
||||||
|
|
||||||
|
# Create instance
|
||||||
|
files = File(
|
||||||
|
file_no=self.normalize_string(row_data["file_no"], 45),
|
||||||
|
id=self.normalize_string(row_data["id"], 80),
|
||||||
|
regarding=row_data.get("regarding", ""), # Text field
|
||||||
|
empl_num=self.normalize_string(row_data["empl_num"], 10),
|
||||||
|
file_type=self.normalize_string(row_data["file_type"], 45),
|
||||||
|
opened=opened_date,
|
||||||
|
closed=closed_date,
|
||||||
|
status=self.normalize_string(row_data["status"], 45),
|
||||||
|
footer_code=self.normalize_string(row_data.get("footer_code", ""), 45),
|
||||||
|
opposing=self.normalize_string(row_data.get("opposing", ""), 80),
|
||||||
|
rate_per_hour=rate_per_hour,
|
||||||
|
memo=row_data.get("memo", ""), # Text field
|
||||||
|
**financial_data # Unpack all financial fields
|
||||||
|
)
|
||||||
|
|
||||||
|
return files
|
||||||
447
app/import_export/generic_importer.py
Normal file
447
app/import_export/generic_importer.py
Normal file
@@ -0,0 +1,447 @@
|
|||||||
|
"""
|
||||||
|
Generic CSV Importer - handles any CSV structure dynamically
|
||||||
|
"""
|
||||||
|
import csv
|
||||||
|
import io
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
from typing import Dict, Any, List, Optional
|
||||||
|
from datetime import datetime
|
||||||
|
from sqlalchemy import text, Column, String, Integer, Text, MetaData, Table, create_engine, Date
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
|
||||||
|
from .base import BaseCSVImporter, ImportResult
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# SQL reserved keywords that need to be quoted when used as column names
|
||||||
|
SQL_RESERVED_KEYWORDS = {
|
||||||
|
'ABORT', 'ACTION', 'ADD', 'AFTER', 'ALL', 'ALTER', 'ALWAYS', 'ANALYZE', 'AND', 'AS', 'ASC',
|
||||||
|
'ATTACH', 'AUTOINCREMENT', 'BEFORE', 'BEGIN', 'BETWEEN', 'BY', 'CASCADE', 'CASE', 'CAST',
|
||||||
|
'CHECK', 'COLLATE', 'COLUMN', 'COMMIT', 'CONFLICT', 'CONSTRAINT', 'CREATE', 'CROSS',
|
||||||
|
'CURRENT', 'CURRENT_DATE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', 'DATABASE', 'DEFAULT',
|
||||||
|
'DEFERRABLE', 'DEFERRED', 'DELETE', 'DESC', 'DETACH', 'DISTINCT', 'DO', 'DROP', 'EACH',
|
||||||
|
'ELSE', 'END', 'ESCAPE', 'EXCEPT', 'EXCLUDE', 'EXISTS', 'EXPLAIN', 'FAIL', 'FILTER',
|
||||||
|
'FIRST', 'FOLLOWING', 'FOR', 'FOREIGN', 'FROM', 'FULL', 'GENERATED', 'GLOB', 'GROUP',
|
||||||
|
'GROUPS', 'HAVING', 'IF', 'IGNORE', 'IMMEDIATE', 'IN', 'INDEX', 'INDEXED', 'INITIALLY',
|
||||||
|
'INNER', 'INSERT', 'INSTEAD', 'INTERSECT', 'INTO', 'IS', 'ISNULL', 'JOIN', 'KEY',
|
||||||
|
'LAST', 'LEFT', 'LIKE', 'LIMIT', 'MATCH', 'NATURAL', 'NO', 'NOT', 'NOTHING', 'NOTNULL',
|
||||||
|
'NULL', 'NULLS', 'OF', 'OFFSET', 'ON', 'OR', 'ORDER', 'OUTER', 'OVER', 'PARTITION',
|
||||||
|
'PLAN', 'PRAGMA', 'PRECEDING', 'PRIMARY', 'QUERY', 'RAISE', 'RECURSIVE', 'REFERENCES',
|
||||||
|
'REGEXP', 'REINDEX', 'RELEASE', 'RENAME', 'REPLACE', 'RESTRICT', 'RIGHT', 'ROLLBACK',
|
||||||
|
'ROW', 'ROWS', 'SAVEPOINT', 'SELECT', 'SET', 'TABLE', 'TEMP', 'TEMPORARY', 'THEN',
|
||||||
|
'TIES', 'TO', 'TRANSACTION', 'TRIGGER', 'UNBOUNDED', 'UNION', 'UNIQUE', 'UPDATE',
|
||||||
|
'USING', 'VACUUM', 'VALUES', 'VIEW', 'VIRTUAL', 'WHEN', 'WHERE', 'WINDOW', 'WITH',
|
||||||
|
'WITHOUT'
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class GenericCSVImporter(BaseCSVImporter):
|
||||||
|
"""Generic importer that can handle any CSV structure by creating tables dynamically"""
|
||||||
|
|
||||||
|
def __init__(self, db_session: Session, table_name: str, import_id: str = None):
|
||||||
|
# Set table name first, before calling super().__init__()
|
||||||
|
# because BaseCSVImporter.__init__ calls self.table_name
|
||||||
|
self._table_name = table_name.lower()
|
||||||
|
self.dynamic_table = None
|
||||||
|
self.csv_headers = []
|
||||||
|
super().__init__(db_session, import_id)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def table_name(self) -> str:
|
||||||
|
return self._table_name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def required_fields(self) -> List[str]:
|
||||||
|
"""No required fields for generic import"""
|
||||||
|
return []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def field_mapping(self) -> Dict[str, str]:
|
||||||
|
"""Dynamic mapping based on CSV headers"""
|
||||||
|
if self.csv_headers:
|
||||||
|
mapping = {}
|
||||||
|
for header in self.csv_headers:
|
||||||
|
safe_name = self._make_safe_name(header)
|
||||||
|
# Handle 'id' column renaming for conflict avoidance
|
||||||
|
if safe_name.lower() == 'id':
|
||||||
|
safe_name = 'csv_id'
|
||||||
|
mapping[header] = safe_name
|
||||||
|
return mapping
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def create_model_instance(self, row_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""For generic import, just return the processed row data"""
|
||||||
|
return row_data
|
||||||
|
|
||||||
|
def create_dynamic_table(self, headers: List[str]) -> Table:
|
||||||
|
"""Create a table dynamically based on CSV headers"""
|
||||||
|
try:
|
||||||
|
# Create metadata
|
||||||
|
metadata = MetaData()
|
||||||
|
|
||||||
|
# Clean table name
|
||||||
|
safe_table_name = self._make_safe_name(self.table_name)
|
||||||
|
|
||||||
|
# Check if table already exists BEFORE creating the Table object
|
||||||
|
from sqlalchemy import inspect
|
||||||
|
inspector = inspect(self.db_session.bind)
|
||||||
|
existing_tables = inspector.get_table_names()
|
||||||
|
|
||||||
|
if safe_table_name in existing_tables:
|
||||||
|
logger.info(f"Table '{safe_table_name}' already exists, will use existing table structure")
|
||||||
|
# Reflect the existing table to get its structure
|
||||||
|
metadata.reflect(bind=self.db_session.bind, only=[safe_table_name])
|
||||||
|
existing_table = metadata.tables[safe_table_name]
|
||||||
|
|
||||||
|
# Store the actual table name for use in data insertion
|
||||||
|
self.actual_table_name = safe_table_name
|
||||||
|
self._table_name = safe_table_name
|
||||||
|
logger.info(f"Using existing table: '{safe_table_name}'")
|
||||||
|
return existing_table
|
||||||
|
else:
|
||||||
|
logger.info(f"Creating new table: '{safe_table_name}'")
|
||||||
|
|
||||||
|
# Create columns dynamically
|
||||||
|
columns = [Column('id', Integer, primary_key=True, autoincrement=True)]
|
||||||
|
|
||||||
|
for header in headers:
|
||||||
|
if header and header.strip():
|
||||||
|
safe_column_name = self._make_safe_name(header.strip())
|
||||||
|
# Skip if this would create a duplicate 'id' column
|
||||||
|
if safe_column_name.lower() == 'id':
|
||||||
|
# Rename the CSV column to avoid conflict with auto-generated id
|
||||||
|
safe_column_name = 'csv_id'
|
||||||
|
columns.append(Column(safe_column_name, Text))
|
||||||
|
|
||||||
|
# Create table with the final table name
|
||||||
|
table = Table(safe_table_name, metadata, *columns)
|
||||||
|
|
||||||
|
# Store the actual table name for use in data insertion
|
||||||
|
self.actual_table_name = safe_table_name
|
||||||
|
self._table_name = safe_table_name # Update the stored table name to use the timestamped version
|
||||||
|
logger.info(f"Using table name for data insertion: '{safe_table_name}'")
|
||||||
|
|
||||||
|
# Create the table in the database with retry logic for locks
|
||||||
|
max_retries = 3
|
||||||
|
retry_delay = 1.0
|
||||||
|
|
||||||
|
for attempt in range(max_retries):
|
||||||
|
try:
|
||||||
|
# Use explicit transaction to avoid deadlocks
|
||||||
|
self.db_session.begin()
|
||||||
|
metadata.create_all(self.db_session.bind)
|
||||||
|
self.db_session.commit()
|
||||||
|
|
||||||
|
logger.info(f"Created dynamic table '{safe_table_name}' with {len(columns)} columns")
|
||||||
|
return table
|
||||||
|
|
||||||
|
except Exception as create_error:
|
||||||
|
self.db_session.rollback()
|
||||||
|
|
||||||
|
if "database is locked" in str(create_error).lower() and attempt < max_retries - 1:
|
||||||
|
import time
|
||||||
|
logger.warning(f"Database locked, retrying in {retry_delay}s (attempt {attempt + 1}/{max_retries})")
|
||||||
|
time.sleep(retry_delay)
|
||||||
|
retry_delay *= 2 # Exponential backoff
|
||||||
|
continue
|
||||||
|
elif "already present" in str(create_error).lower():
|
||||||
|
# Table was created by another process, reflect it
|
||||||
|
logger.info(f"Table '{safe_table_name}' created by another process, reflecting existing table")
|
||||||
|
try:
|
||||||
|
metadata.reflect(bind=self.db_session.bind, only=[safe_table_name])
|
||||||
|
return metadata.tables[safe_table_name]
|
||||||
|
except Exception:
|
||||||
|
# If reflection fails, re-raise original error
|
||||||
|
raise create_error
|
||||||
|
else:
|
||||||
|
# Re-raise if not a recoverable error
|
||||||
|
raise create_error
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error creating dynamic table: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _make_safe_name(self, name: str) -> str:
|
||||||
|
"""Make a database-safe name from any string"""
|
||||||
|
import re
|
||||||
|
# Remove special characters and replace with underscore
|
||||||
|
safe_name = re.sub(r'[^a-zA-Z0-9_]', '_', name)
|
||||||
|
# Remove multiple underscores
|
||||||
|
safe_name = re.sub(r'_+', '_', safe_name)
|
||||||
|
# Remove trailing underscore
|
||||||
|
safe_name = safe_name.strip('_')
|
||||||
|
# Ensure it's not empty
|
||||||
|
if not safe_name:
|
||||||
|
safe_name = 'unnamed_column'
|
||||||
|
# Special handling for purely numeric names or names starting with numbers
|
||||||
|
if safe_name.isdigit() or (safe_name and safe_name[0].isdigit()):
|
||||||
|
safe_name = f'col_{safe_name}'
|
||||||
|
# Ensure it starts with a letter or underscore (final check)
|
||||||
|
elif safe_name and not (safe_name[0].isalpha() or safe_name[0] == '_'):
|
||||||
|
safe_name = 'col_' + safe_name
|
||||||
|
return safe_name.lower()
|
||||||
|
|
||||||
|
def _quote_column_name(self, column_name: str) -> str:
|
||||||
|
"""Quote column name if it's a SQL reserved keyword"""
|
||||||
|
if column_name.upper() in SQL_RESERVED_KEYWORDS:
|
||||||
|
return f'"{column_name}"'
|
||||||
|
return column_name
|
||||||
|
|
||||||
|
def _parse_date_value(self, value: str) -> Optional[str]:
|
||||||
|
"""Try to parse a date value and return it in ISO format"""
|
||||||
|
if not value or value.strip() == '':
|
||||||
|
return None
|
||||||
|
|
||||||
|
value = value.strip()
|
||||||
|
|
||||||
|
# Common date formats to try
|
||||||
|
date_formats = [
|
||||||
|
'%m/%d/%Y', # MM/DD/YYYY
|
||||||
|
'%m/%d/%y', # MM/DD/YY
|
||||||
|
'%Y-%m-%d', # YYYY-MM-DD
|
||||||
|
'%d/%m/%Y', # DD/MM/YYYY
|
||||||
|
'%d-%m-%Y', # DD-MM-YYYY
|
||||||
|
'%Y/%m/%d', # YYYY/MM/DD
|
||||||
|
]
|
||||||
|
|
||||||
|
for fmt in date_formats:
|
||||||
|
try:
|
||||||
|
parsed_date = datetime.strptime(value, fmt)
|
||||||
|
return parsed_date.strftime('%Y-%m-%d') # Return in ISO format
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If no format matches, return the original value
|
||||||
|
return value
|
||||||
|
|
||||||
|
def process_csv_content(self, csv_content: str, encoding: str = "utf-8") -> ImportResult:
|
||||||
|
"""Override the main processing method to handle dynamic table creation"""
|
||||||
|
try:
|
||||||
|
# Preprocess CSV content to handle common issues
|
||||||
|
# Remove trailing empty lines and normalize line endings
|
||||||
|
lines = csv_content.strip().splitlines()
|
||||||
|
# Remove empty lines that might cause parsing issues
|
||||||
|
non_empty_lines = [line for line in lines if line.strip()]
|
||||||
|
if not non_empty_lines:
|
||||||
|
result = ImportResult()
|
||||||
|
result.add_error("CSV file is empty or contains only empty lines")
|
||||||
|
return result
|
||||||
|
|
||||||
|
# Reconstruct CSV content with clean line endings
|
||||||
|
cleaned_csv_content = '\n'.join(non_empty_lines)
|
||||||
|
|
||||||
|
# Parse CSV and get headers with flexible parsing
|
||||||
|
# Handle various CSV format issues including embedded newlines
|
||||||
|
csv_file = io.StringIO(cleaned_csv_content)
|
||||||
|
|
||||||
|
# Try with different CSV dialect configurations
|
||||||
|
headers = None
|
||||||
|
parsing_strategies = [
|
||||||
|
# Strategy 1: Standard CSV parsing
|
||||||
|
lambda f: csv.DictReader(f),
|
||||||
|
# Strategy 2: Handle newlines in fields with strict quoting
|
||||||
|
lambda f: csv.DictReader(f, skipinitialspace=True, quoting=csv.QUOTE_MINIMAL, strict=False),
|
||||||
|
# Strategy 3: More flexible quoting
|
||||||
|
lambda f: csv.DictReader(f, quoting=csv.QUOTE_ALL, strict=False),
|
||||||
|
# Strategy 4: Excel dialect
|
||||||
|
lambda f: csv.DictReader(f, dialect='excel'),
|
||||||
|
# Strategy 5: Unix dialect
|
||||||
|
lambda f: csv.DictReader(f, dialect='unix'),
|
||||||
|
# Strategy 6: Very permissive - ignore malformed lines
|
||||||
|
lambda f: csv.DictReader(f, quoting=csv.QUOTE_NONE, escapechar='\\', strict=False)
|
||||||
|
]
|
||||||
|
|
||||||
|
for i, strategy in enumerate(parsing_strategies):
|
||||||
|
try:
|
||||||
|
csv_file.seek(0)
|
||||||
|
csv_reader = strategy(csv_file)
|
||||||
|
headers = csv_reader.fieldnames
|
||||||
|
if headers:
|
||||||
|
logger.debug(f"CSV parsing successful with strategy {i+1}")
|
||||||
|
break
|
||||||
|
except (csv.Error, UnicodeDecodeError) as e:
|
||||||
|
logger.debug(f"CSV parsing strategy {i+1} failed: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not headers:
|
||||||
|
result = ImportResult()
|
||||||
|
result.add_error("No headers found in CSV file")
|
||||||
|
return result
|
||||||
|
|
||||||
|
# Store headers and create dynamic table
|
||||||
|
self.csv_headers = [h.strip() for h in headers if h and h.strip()]
|
||||||
|
|
||||||
|
if not self.csv_headers:
|
||||||
|
result = ImportResult()
|
||||||
|
result.add_error("No valid headers found in CSV file")
|
||||||
|
return result
|
||||||
|
|
||||||
|
self.dynamic_table = self.create_dynamic_table(self.csv_headers)
|
||||||
|
|
||||||
|
# Reset reader and process rows with the same successful parsing strategy
|
||||||
|
csv_file = io.StringIO(cleaned_csv_content)
|
||||||
|
csv_reader = None
|
||||||
|
|
||||||
|
# Use the same parsing strategies to ensure consistency
|
||||||
|
for i, strategy in enumerate(parsing_strategies):
|
||||||
|
try:
|
||||||
|
csv_file.seek(0)
|
||||||
|
csv_reader = strategy(csv_file)
|
||||||
|
# Test that it works by trying to read headers
|
||||||
|
test_headers = csv_reader.fieldnames
|
||||||
|
if test_headers:
|
||||||
|
logger.debug(f"Data parsing using strategy {i+1}")
|
||||||
|
break
|
||||||
|
except (csv.Error, UnicodeDecodeError) as e:
|
||||||
|
logger.debug(f"Data parsing strategy {i+1} failed: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not csv_reader:
|
||||||
|
result = ImportResult()
|
||||||
|
result.add_error("Unable to parse CSV file with any available strategy")
|
||||||
|
return result
|
||||||
|
|
||||||
|
imported_count = 0
|
||||||
|
error_count = 0
|
||||||
|
total_count = 0
|
||||||
|
|
||||||
|
# Check if file has any data rows
|
||||||
|
rows = list(csv_reader)
|
||||||
|
if not rows:
|
||||||
|
logger.info(f"CSV file for table '{self.table_name}' contains headers only, no data rows to import")
|
||||||
|
self.result.success = True
|
||||||
|
self.result.total_rows = 0
|
||||||
|
self.result.imported_rows = 0
|
||||||
|
self.result.error_rows = 0
|
||||||
|
self.result.add_warning("File contains headers only, no data rows found")
|
||||||
|
return self.result
|
||||||
|
|
||||||
|
# Process all rows (transaction managed by session)
|
||||||
|
try:
|
||||||
|
|
||||||
|
for row_num, row in enumerate(rows, start=2):
|
||||||
|
total_count += 1
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Prepare row data
|
||||||
|
row_data = {}
|
||||||
|
|
||||||
|
# Get existing table columns if using existing table
|
||||||
|
existing_columns = set()
|
||||||
|
if hasattr(self, 'dynamic_table') and self.dynamic_table is not None:
|
||||||
|
# Convert column keys to strings for comparison
|
||||||
|
existing_columns = set(str(col) for col in self.dynamic_table.columns.keys())
|
||||||
|
|
||||||
|
for header in self.csv_headers:
|
||||||
|
try:
|
||||||
|
safe_column_name = self._make_safe_name(header)
|
||||||
|
|
||||||
|
# Handle 'id' column mapping for existing tables
|
||||||
|
if safe_column_name.lower() == 'id' and 'id' in existing_columns:
|
||||||
|
# For existing tables, try to map the CSV 'id' to the actual 'id' column
|
||||||
|
# Check if id column has autoincrement - but handle this safely
|
||||||
|
try:
|
||||||
|
id_col = self.dynamic_table.columns.id
|
||||||
|
# Check if autoincrement is True (SQLAlchemy may not define this attribute)
|
||||||
|
is_autoincrement = getattr(id_col, 'autoincrement', False) is True
|
||||||
|
if is_autoincrement:
|
||||||
|
safe_column_name = 'csv_id' # Avoid conflict with auto-increment
|
||||||
|
else:
|
||||||
|
safe_column_name = 'id' # Use the actual id column
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
# If we can't determine autoincrement, default to using 'id'
|
||||||
|
safe_column_name = 'id'
|
||||||
|
elif safe_column_name.lower() == 'id':
|
||||||
|
safe_column_name = 'csv_id' # Default fallback
|
||||||
|
|
||||||
|
# Only include columns that exist in the target table (if using existing table)
|
||||||
|
if existing_columns and safe_column_name not in existing_columns:
|
||||||
|
logger.debug(f"Skipping column '{safe_column_name}' (from '{header}') - not found in target table")
|
||||||
|
continue
|
||||||
|
|
||||||
|
value = row.get(header, '').strip() if row.get(header) else None
|
||||||
|
# Convert empty strings to None for better database handling
|
||||||
|
if value == '':
|
||||||
|
value = None
|
||||||
|
elif value and ('date' in header.lower() or 'time' in header.lower()):
|
||||||
|
# Try to parse date values for better format consistency
|
||||||
|
value = self._parse_date_value(value)
|
||||||
|
row_data[safe_column_name] = value
|
||||||
|
|
||||||
|
except Exception as header_error:
|
||||||
|
logger.error(f"Error processing header '{header}': {header_error}")
|
||||||
|
# Continue to next header instead of failing the whole row
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Insert into database with conflict resolution
|
||||||
|
# Use INSERT OR IGNORE to handle potential duplicates gracefully
|
||||||
|
# Use the actual table name (which may have timestamp suffix) instead of dynamic_table.name
|
||||||
|
table_name = getattr(self, 'actual_table_name', self.dynamic_table.name)
|
||||||
|
logger.debug(f"Inserting into table: '{table_name}' (original: '{self._table_name}', dynamic: '{self.dynamic_table.name}')")
|
||||||
|
|
||||||
|
if not row_data:
|
||||||
|
logger.warning(f"Row {row_num}: No valid columns found for insertion")
|
||||||
|
continue
|
||||||
|
|
||||||
|
columns = list(row_data.keys())
|
||||||
|
values = list(row_data.values())
|
||||||
|
placeholders = ', '.join([':param' + str(i) for i in range(len(values))])
|
||||||
|
# Quote column names that are reserved keywords
|
||||||
|
quoted_columns = [self._quote_column_name(col) for col in columns]
|
||||||
|
column_names = ', '.join(quoted_columns)
|
||||||
|
|
||||||
|
# Create parameter dictionary for SQLAlchemy
|
||||||
|
params = {f'param{i}': value for i, value in enumerate(values)}
|
||||||
|
|
||||||
|
ignore_sql = f"INSERT OR IGNORE INTO {table_name} ({column_names}) VALUES ({placeholders})"
|
||||||
|
result = self.db_session.execute(text(ignore_sql), params)
|
||||||
|
|
||||||
|
# Check if the row was actually inserted (rowcount > 0) or ignored (rowcount = 0)
|
||||||
|
if result.rowcount == 0:
|
||||||
|
logger.debug(f"Row {row_num}: Skipped duplicate record")
|
||||||
|
else:
|
||||||
|
logger.debug(f"Row {row_num}: Inserted successfully")
|
||||||
|
|
||||||
|
imported_count += 1
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_count += 1
|
||||||
|
error_msg = str(e)
|
||||||
|
|
||||||
|
# Provide more specific error messages for common database issues
|
||||||
|
if "NOT NULL constraint failed" in error_msg:
|
||||||
|
self.result.add_error(f"Row {row_num}: Missing required value in column")
|
||||||
|
elif "UNIQUE constraint failed" in error_msg:
|
||||||
|
self.result.add_error(f"Row {row_num}: Duplicate value detected")
|
||||||
|
elif "no such column" in error_msg:
|
||||||
|
self.result.add_error(f"Row {row_num}: Column structure mismatch")
|
||||||
|
else:
|
||||||
|
self.result.add_error(f"Row {row_num}: {error_msg}")
|
||||||
|
|
||||||
|
logger.warning(f"Error importing row {row_num}: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Changes are automatically committed by the session manager
|
||||||
|
pass
|
||||||
|
|
||||||
|
except Exception as transaction_error:
|
||||||
|
logger.error(f"Import processing failed: {transaction_error}")
|
||||||
|
self.result.add_error(f"Import failed: {str(transaction_error)}")
|
||||||
|
|
||||||
|
# Update result
|
||||||
|
self.result.success = imported_count > 0
|
||||||
|
self.result.total_rows = total_count
|
||||||
|
self.result.imported_rows = imported_count
|
||||||
|
self.result.error_rows = error_count
|
||||||
|
|
||||||
|
if imported_count > 0:
|
||||||
|
logger.info(f"Successfully imported {imported_count} rows into {self.table_name}")
|
||||||
|
|
||||||
|
return self.result
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error during CSV import: {e}")
|
||||||
|
self.result.add_error(f"Import failed: {str(e)}")
|
||||||
|
return self.result
|
||||||
334
app/import_export/import_service.py
Normal file
334
app/import_export/import_service.py
Normal file
@@ -0,0 +1,334 @@
|
|||||||
|
"""
|
||||||
|
Main Import Service - coordinates all CSV importers
|
||||||
|
"""
|
||||||
|
from typing import Dict, List, Any, Optional, Union
|
||||||
|
import logging
|
||||||
|
from enum import Enum
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from .base import ImportResult
|
||||||
|
from .rolodex_importer import RolodexCSVImporter
|
||||||
|
from .phone_importer import PhoneCSVImporter
|
||||||
|
from .files_importer import FilesCSVImporter
|
||||||
|
from .ledger_importer import LedgerCSVImporter
|
||||||
|
from .qdros_importer import QdrosCSVImporter
|
||||||
|
from .generic_importer import GenericCSVImporter
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class TableType(Enum):
|
||||||
|
"""Supported table types for import"""
|
||||||
|
ROLODEX = "rolodex"
|
||||||
|
PHONE = "phone"
|
||||||
|
FILES = "files"
|
||||||
|
LEDGER = "ledger"
|
||||||
|
QDROS = "qdros"
|
||||||
|
# Generic table types for all other CSV files
|
||||||
|
GRUPLKUP = "gruplkup"
|
||||||
|
EMPLOYEE = "employee"
|
||||||
|
SETUP = "setup"
|
||||||
|
FILETYPE = "filetype"
|
||||||
|
TRNSTYPE = "trnstype"
|
||||||
|
TRNSACTN = "trnsactn"
|
||||||
|
TRNSLKUP = "trnslkup"
|
||||||
|
RVARLKUP = "rvarlkup"
|
||||||
|
FVARLKUP = "fvarlkup"
|
||||||
|
FILENOTS = "filenots"
|
||||||
|
DEPOSITS = "deposits"
|
||||||
|
PAYMENTS = "payments"
|
||||||
|
PENSIONS = "pensions"
|
||||||
|
PLANINFO = "planinfo"
|
||||||
|
# Form tables
|
||||||
|
NUMBERAL = "numberal"
|
||||||
|
INX_LKUP = "inx_lkup"
|
||||||
|
FORM_LST = "form_lst"
|
||||||
|
FORM_INX = "form_inx"
|
||||||
|
LIFETABL = "lifetabl"
|
||||||
|
# Pension tables
|
||||||
|
MARRIAGE = "marriage"
|
||||||
|
DEATH = "death"
|
||||||
|
SEPARATE = "separate"
|
||||||
|
SCHEDULE = "schedule"
|
||||||
|
|
||||||
|
|
||||||
|
class ImportService:
|
||||||
|
"""Main service for handling CSV imports"""
|
||||||
|
|
||||||
|
def __init__(self, db_session: Session):
|
||||||
|
self.db_session = db_session
|
||||||
|
self._importers = {
|
||||||
|
# Use generic importers for all tables to handle legacy CSV structure variations
|
||||||
|
TableType.ROLODEX: GenericCSVImporter, # Use generic importer for rolodex (more flexible)
|
||||||
|
TableType.PHONE: GenericCSVImporter, # Use generic importer for phone
|
||||||
|
TableType.FILES: GenericCSVImporter, # Use generic importer for files
|
||||||
|
TableType.LEDGER: GenericCSVImporter, # Use generic importer for ledger (to avoid FK issues)
|
||||||
|
TableType.QDROS: GenericCSVImporter, # Use generic importer for qdros (to avoid FK issues)
|
||||||
|
# Generic importer for all other tables
|
||||||
|
TableType.GRUPLKUP: GenericCSVImporter,
|
||||||
|
TableType.EMPLOYEE: GenericCSVImporter,
|
||||||
|
TableType.SETUP: GenericCSVImporter,
|
||||||
|
TableType.FILETYPE: GenericCSVImporter,
|
||||||
|
TableType.TRNSTYPE: GenericCSVImporter,
|
||||||
|
TableType.TRNSACTN: GenericCSVImporter,
|
||||||
|
TableType.TRNSLKUP: GenericCSVImporter,
|
||||||
|
TableType.RVARLKUP: GenericCSVImporter,
|
||||||
|
TableType.FVARLKUP: GenericCSVImporter,
|
||||||
|
TableType.FILENOTS: GenericCSVImporter,
|
||||||
|
TableType.DEPOSITS: GenericCSVImporter,
|
||||||
|
TableType.PAYMENTS: GenericCSVImporter,
|
||||||
|
TableType.PENSIONS: GenericCSVImporter,
|
||||||
|
TableType.PLANINFO: GenericCSVImporter,
|
||||||
|
TableType.NUMBERAL: GenericCSVImporter,
|
||||||
|
TableType.INX_LKUP: GenericCSVImporter,
|
||||||
|
TableType.FORM_LST: GenericCSVImporter,
|
||||||
|
TableType.FORM_INX: GenericCSVImporter,
|
||||||
|
TableType.LIFETABL: GenericCSVImporter,
|
||||||
|
TableType.MARRIAGE: GenericCSVImporter,
|
||||||
|
TableType.DEATH: GenericCSVImporter,
|
||||||
|
TableType.SEPARATE: GenericCSVImporter,
|
||||||
|
TableType.SCHEDULE: GenericCSVImporter,
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_supported_tables(self) -> List[str]:
|
||||||
|
"""Get list of supported table names"""
|
||||||
|
return [table.value for table in TableType]
|
||||||
|
|
||||||
|
def get_table_schema(self, table_name: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Get schema information for a table"""
|
||||||
|
try:
|
||||||
|
table_type = TableType(table_name.lower())
|
||||||
|
importer_class = self._importers[table_type]
|
||||||
|
|
||||||
|
# Handle generic importer differently
|
||||||
|
if importer_class == GenericCSVImporter:
|
||||||
|
temp_importer = importer_class(self.db_session, table_name, "temp_schema_check")
|
||||||
|
else:
|
||||||
|
temp_importer = importer_class(self.db_session, "temp_schema_check")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"table_name": temp_importer.table_name,
|
||||||
|
"required_fields": temp_importer.required_fields,
|
||||||
|
"field_mapping": temp_importer.field_mapping,
|
||||||
|
"sample_headers": list(temp_importer.field_mapping.keys())
|
||||||
|
}
|
||||||
|
except (ValueError, KeyError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
def import_csv(
|
||||||
|
self,
|
||||||
|
table_name: str,
|
||||||
|
csv_content: str,
|
||||||
|
encoding: str = "utf-8",
|
||||||
|
import_id: Optional[str] = None
|
||||||
|
) -> ImportResult:
|
||||||
|
"""Import CSV data to specified table"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Validate table name
|
||||||
|
table_type = TableType(table_name.lower())
|
||||||
|
except ValueError:
|
||||||
|
result = ImportResult()
|
||||||
|
result.add_error(f"Unsupported table: {table_name}")
|
||||||
|
return result
|
||||||
|
|
||||||
|
# Get appropriate importer
|
||||||
|
importer_class = self._importers[table_type]
|
||||||
|
|
||||||
|
# Handle generic importer differently
|
||||||
|
if importer_class == GenericCSVImporter:
|
||||||
|
importer = importer_class(self.db_session, table_name, import_id)
|
||||||
|
else:
|
||||||
|
importer = importer_class(self.db_session, import_id)
|
||||||
|
|
||||||
|
logger.info(f"Starting CSV import for table: {table_name} (import_id: {importer.import_id})")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Process the CSV
|
||||||
|
result = importer.process_csv_content(csv_content, encoding)
|
||||||
|
|
||||||
|
if result.success:
|
||||||
|
logger.info(
|
||||||
|
f"Successfully imported {result.imported_rows} rows to {table_name}"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
f"Import failed for {table_name}: {len(result.errors)} errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unexpected error during import to {table_name}: {str(e)}")
|
||||||
|
result = ImportResult()
|
||||||
|
result.add_error(f"Unexpected error: {str(e)}")
|
||||||
|
return result
|
||||||
|
|
||||||
|
def batch_import(
|
||||||
|
self,
|
||||||
|
imports: List[Dict[str, Any]]
|
||||||
|
) -> Dict[str, ImportResult]:
|
||||||
|
"""
|
||||||
|
Import multiple CSV files in a batch
|
||||||
|
|
||||||
|
Args:
|
||||||
|
imports: List of dicts with keys: table_name, csv_content, encoding
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict mapping table names to ImportResult objects
|
||||||
|
"""
|
||||||
|
results = {}
|
||||||
|
|
||||||
|
# Recommended import order (dependencies first)
|
||||||
|
import_order = [
|
||||||
|
# Core tables with dependencies
|
||||||
|
TableType.ROLODEX, # No dependencies
|
||||||
|
TableType.PHONE, # Depends on ROLODEX
|
||||||
|
TableType.FILES, # Depends on ROLODEX
|
||||||
|
TableType.LEDGER, # Depends on FILES
|
||||||
|
TableType.QDROS, # Depends on FILES
|
||||||
|
# Lookup and reference tables (no dependencies)
|
||||||
|
TableType.GRUPLKUP,
|
||||||
|
TableType.EMPLOYEE,
|
||||||
|
TableType.SETUP,
|
||||||
|
TableType.FILETYPE,
|
||||||
|
TableType.TRNSTYPE,
|
||||||
|
TableType.TRNSACTN,
|
||||||
|
TableType.TRNSLKUP,
|
||||||
|
TableType.RVARLKUP,
|
||||||
|
TableType.FVARLKUP,
|
||||||
|
TableType.FILENOTS,
|
||||||
|
TableType.PLANINFO,
|
||||||
|
# Financial tables
|
||||||
|
TableType.DEPOSITS,
|
||||||
|
TableType.PAYMENTS,
|
||||||
|
TableType.PENSIONS,
|
||||||
|
# Form tables
|
||||||
|
TableType.NUMBERAL,
|
||||||
|
TableType.INX_LKUP,
|
||||||
|
TableType.FORM_LST,
|
||||||
|
TableType.FORM_INX,
|
||||||
|
TableType.LIFETABL,
|
||||||
|
# Pension tables
|
||||||
|
TableType.MARRIAGE,
|
||||||
|
TableType.DEATH,
|
||||||
|
TableType.SEPARATE,
|
||||||
|
TableType.SCHEDULE
|
||||||
|
]
|
||||||
|
|
||||||
|
# Group imports by table type
|
||||||
|
imports_by_table = {}
|
||||||
|
for import_data in imports:
|
||||||
|
table_name = import_data["table_name"].lower()
|
||||||
|
if table_name not in imports_by_table:
|
||||||
|
imports_by_table[table_name] = []
|
||||||
|
imports_by_table[table_name].append(import_data)
|
||||||
|
|
||||||
|
# Track processed tables
|
||||||
|
processed_tables = set()
|
||||||
|
|
||||||
|
# Process in dependency order
|
||||||
|
for table_type in import_order:
|
||||||
|
table_name = table_type.value
|
||||||
|
if table_name in imports_by_table:
|
||||||
|
table_imports = imports_by_table[table_name]
|
||||||
|
processed_tables.add(table_name)
|
||||||
|
|
||||||
|
for import_data in table_imports:
|
||||||
|
result = self.import_csv(
|
||||||
|
table_name,
|
||||||
|
import_data["csv_content"],
|
||||||
|
import_data.get("encoding", "utf-8")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use a unique key if multiple imports for same table
|
||||||
|
key = table_name
|
||||||
|
counter = 1
|
||||||
|
while key in results:
|
||||||
|
counter += 1
|
||||||
|
key = f"{table_name}_{counter}"
|
||||||
|
|
||||||
|
results[key] = result
|
||||||
|
|
||||||
|
# Stop processing if critical import fails
|
||||||
|
if not result.success and table_type in [TableType.ROLODEX, TableType.FILES]:
|
||||||
|
logger.error(f"Critical import failed for {table_name}, stopping batch")
|
||||||
|
break
|
||||||
|
|
||||||
|
# Small delay to reduce database lock contention
|
||||||
|
import time
|
||||||
|
time.sleep(0.1)
|
||||||
|
|
||||||
|
# Process any remaining tables not in the explicit order
|
||||||
|
for table_name, table_imports in imports_by_table.items():
|
||||||
|
if table_name not in processed_tables:
|
||||||
|
logger.info(f"Processing table {table_name} (not in explicit order)")
|
||||||
|
|
||||||
|
for import_data in table_imports:
|
||||||
|
result = self.import_csv(
|
||||||
|
table_name,
|
||||||
|
import_data["csv_content"],
|
||||||
|
import_data.get("encoding", "utf-8")
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use a unique key if multiple imports for same table
|
||||||
|
key = table_name
|
||||||
|
counter = 1
|
||||||
|
while key in results:
|
||||||
|
counter += 1
|
||||||
|
key = f"{table_name}_{counter}"
|
||||||
|
|
||||||
|
results[key] = result
|
||||||
|
|
||||||
|
# Small delay to reduce database lock contention
|
||||||
|
import time
|
||||||
|
time.sleep(0.1)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def validate_csv_headers(self, table_name: str, csv_content: str) -> ImportResult:
|
||||||
|
"""Validate CSV headers without importing data"""
|
||||||
|
try:
|
||||||
|
table_type = TableType(table_name.lower())
|
||||||
|
except ValueError:
|
||||||
|
result = ImportResult()
|
||||||
|
result.add_error(f"Unsupported table: {table_name}")
|
||||||
|
return result
|
||||||
|
|
||||||
|
# Get appropriate importer
|
||||||
|
importer_class = self._importers[table_type]
|
||||||
|
|
||||||
|
# Handle generic importer differently
|
||||||
|
if importer_class == GenericCSVImporter:
|
||||||
|
importer = importer_class(self.db_session, table_name, "validation_check")
|
||||||
|
else:
|
||||||
|
importer = importer_class(self.db_session, "validation_check")
|
||||||
|
|
||||||
|
# Parse headers only
|
||||||
|
import csv
|
||||||
|
import io
|
||||||
|
|
||||||
|
try:
|
||||||
|
delimiter = importer.detect_delimiter(csv_content)
|
||||||
|
csv_reader = csv.DictReader(io.StringIO(csv_content), delimiter=delimiter)
|
||||||
|
headers = csv_reader.fieldnames or []
|
||||||
|
|
||||||
|
if not headers:
|
||||||
|
result = ImportResult()
|
||||||
|
result.add_error("No headers found in CSV file")
|
||||||
|
return result
|
||||||
|
|
||||||
|
# Validate headers
|
||||||
|
result = ImportResult()
|
||||||
|
is_valid = importer.validate_headers(headers)
|
||||||
|
result.success = is_valid
|
||||||
|
|
||||||
|
if is_valid:
|
||||||
|
result.add_warning(f"Headers validated successfully for {table_name}")
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
result = ImportResult()
|
||||||
|
result.add_error(f"Error validating headers: {str(e)}")
|
||||||
|
return result
|
||||||
113
app/import_export/ledger_importer.py
Normal file
113
app/import_export/ledger_importer.py
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
"""
|
||||||
|
LEDGER CSV Importer
|
||||||
|
"""
|
||||||
|
from typing import Dict, List, Any
|
||||||
|
from datetime import date
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from .base import BaseCSVImporter, ImportValidationError
|
||||||
|
from app.models.ledger import Ledger
|
||||||
|
from app.models.files import File
|
||||||
|
|
||||||
|
|
||||||
|
class LedgerCSVImporter(BaseCSVImporter):
|
||||||
|
"""CSV importer for LEDGER table"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def table_name(self) -> str:
|
||||||
|
return "ledger"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def required_fields(self) -> List[str]:
|
||||||
|
return ["file_no", "date", "t_code", "t_type", "empl_num", "amount"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def field_mapping(self) -> Dict[str, str]:
|
||||||
|
"""Map CSV headers to database field names"""
|
||||||
|
return {
|
||||||
|
"file_no": "file_no",
|
||||||
|
"item_no": "item_no",
|
||||||
|
"date": "date",
|
||||||
|
"t_code": "t_code",
|
||||||
|
"t_type": "t_type",
|
||||||
|
"t_type_l": "t_type_l",
|
||||||
|
"empl_num": "empl_num",
|
||||||
|
"quantity": "quantity",
|
||||||
|
"rate": "rate",
|
||||||
|
"amount": "amount",
|
||||||
|
"billed": "billed",
|
||||||
|
"note": "note"
|
||||||
|
}
|
||||||
|
|
||||||
|
def create_model_instance(self, row_data: Dict[str, Any]) -> Ledger:
|
||||||
|
"""Create a Ledger instance from processed row data"""
|
||||||
|
|
||||||
|
# Validate required fields
|
||||||
|
required_checks = [
|
||||||
|
("file_no", "File number"),
|
||||||
|
("date", "Date"),
|
||||||
|
("t_code", "Transaction code"),
|
||||||
|
("t_type", "Transaction type"),
|
||||||
|
("empl_num", "Employee number"),
|
||||||
|
("amount", "Amount")
|
||||||
|
]
|
||||||
|
|
||||||
|
for field, display_name in required_checks:
|
||||||
|
if not row_data.get(field):
|
||||||
|
raise ImportValidationError(f"{display_name} is required")
|
||||||
|
|
||||||
|
# Validate foreign key exists (file number)
|
||||||
|
file_exists = self.db_session.query(File).filter_by(file_no=row_data["file_no"]).first()
|
||||||
|
if not file_exists:
|
||||||
|
raise ImportValidationError(f"File number '{row_data['file_no']}' does not exist")
|
||||||
|
|
||||||
|
# Parse date
|
||||||
|
try:
|
||||||
|
transaction_date = self.parse_date(row_data["date"])
|
||||||
|
except ValueError as e:
|
||||||
|
raise ImportValidationError(f"Invalid date: {e}")
|
||||||
|
|
||||||
|
# Parse numeric fields
|
||||||
|
try:
|
||||||
|
item_no = 1 # Default
|
||||||
|
if row_data.get("item_no"):
|
||||||
|
item_no = self.parse_int(row_data["item_no"])
|
||||||
|
if item_no < 1:
|
||||||
|
raise ImportValidationError("Item number must be positive")
|
||||||
|
except ValueError as e:
|
||||||
|
raise ImportValidationError(f"Invalid item number: {e}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
quantity = self.parse_float(row_data.get("quantity", "0"))
|
||||||
|
rate = self.parse_float(row_data.get("rate", "0"))
|
||||||
|
amount = self.parse_float(row_data["amount"])
|
||||||
|
except ValueError as e:
|
||||||
|
raise ImportValidationError(f"Invalid numeric value: {e}")
|
||||||
|
|
||||||
|
# Validate transaction code and type
|
||||||
|
t_code = self.normalize_string(row_data["t_code"], 10)
|
||||||
|
t_type = self.normalize_string(row_data["t_type"], 1)
|
||||||
|
t_type_l = self.normalize_string(row_data.get("t_type_l", ""), 1)
|
||||||
|
|
||||||
|
# Validate billed field (Y/N)
|
||||||
|
billed = row_data.get("billed", "N").strip().upper()
|
||||||
|
if billed not in ["Y", "N", ""]:
|
||||||
|
billed = "N" # Default to N if invalid
|
||||||
|
|
||||||
|
# Create instance
|
||||||
|
ledger = Ledger(
|
||||||
|
file_no=self.normalize_string(row_data["file_no"], 45),
|
||||||
|
item_no=item_no,
|
||||||
|
date=transaction_date,
|
||||||
|
t_code=t_code,
|
||||||
|
t_type=t_type,
|
||||||
|
t_type_l=t_type_l,
|
||||||
|
empl_num=self.normalize_string(row_data["empl_num"], 10),
|
||||||
|
quantity=quantity,
|
||||||
|
rate=rate,
|
||||||
|
amount=amount,
|
||||||
|
billed=billed,
|
||||||
|
note=row_data.get("note", "") # Text field
|
||||||
|
)
|
||||||
|
|
||||||
|
return ledger
|
||||||
160
app/import_export/logging_config.py
Normal file
160
app/import_export/logging_config.py
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
"""
|
||||||
|
Enhanced logging configuration for import operations
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Dict, Any
|
||||||
|
|
||||||
|
|
||||||
|
class ImportLogger:
|
||||||
|
"""Specialized logger for import operations"""
|
||||||
|
|
||||||
|
def __init__(self, import_id: str, table_name: str):
|
||||||
|
self.import_id = import_id
|
||||||
|
self.table_name = table_name
|
||||||
|
self.logger = logging.getLogger(f"import.{table_name}")
|
||||||
|
|
||||||
|
# Create logs directory if it doesn't exist
|
||||||
|
log_dir = "logs/imports"
|
||||||
|
os.makedirs(log_dir, exist_ok=True)
|
||||||
|
|
||||||
|
# Create file handler for this specific import
|
||||||
|
log_file = os.path.join(log_dir, f"{import_id}_{table_name}.log")
|
||||||
|
file_handler = logging.FileHandler(log_file)
|
||||||
|
file_handler.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
# Create formatter
|
||||||
|
formatter = logging.Formatter(
|
||||||
|
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||||
|
)
|
||||||
|
file_handler.setFormatter(formatter)
|
||||||
|
|
||||||
|
# Add handler to logger
|
||||||
|
self.logger.addHandler(file_handler)
|
||||||
|
self.logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
# Track import session details
|
||||||
|
self.session_start = datetime.utcnow()
|
||||||
|
self.row_count = 0
|
||||||
|
self.error_count = 0
|
||||||
|
|
||||||
|
def info(self, message: str, **kwargs):
|
||||||
|
"""Log info message with import context"""
|
||||||
|
self._log_with_context("info", message, **kwargs)
|
||||||
|
|
||||||
|
def warning(self, message: str, **kwargs):
|
||||||
|
"""Log warning message with import context"""
|
||||||
|
self._log_with_context("warning", message, **kwargs)
|
||||||
|
|
||||||
|
def error(self, message: str, **kwargs):
|
||||||
|
"""Log error message with import context"""
|
||||||
|
self.error_count += 1
|
||||||
|
self._log_with_context("error", message, **kwargs)
|
||||||
|
|
||||||
|
def debug(self, message: str, **kwargs):
|
||||||
|
"""Log debug message with import context"""
|
||||||
|
self._log_with_context("debug", message, **kwargs)
|
||||||
|
|
||||||
|
def _log_with_context(self, level: str, message: str, **kwargs):
|
||||||
|
"""Log message with import context"""
|
||||||
|
context = {
|
||||||
|
"import_id": self.import_id,
|
||||||
|
"table": self.table_name,
|
||||||
|
"row_count": self.row_count,
|
||||||
|
**kwargs
|
||||||
|
}
|
||||||
|
|
||||||
|
context_str = " | ".join([f"{k}={v}" for k, v in context.items()])
|
||||||
|
full_message = f"[{context_str}] {message}"
|
||||||
|
|
||||||
|
getattr(self.logger, level)(full_message)
|
||||||
|
|
||||||
|
def log_row_processed(self, row_number: int, success: bool = True):
|
||||||
|
"""Log that a row has been processed"""
|
||||||
|
self.row_count += 1
|
||||||
|
if success:
|
||||||
|
self.debug(f"Row {row_number} processed successfully")
|
||||||
|
else:
|
||||||
|
self.error(f"Row {row_number} failed to process")
|
||||||
|
|
||||||
|
def log_validation_error(self, row_number: int, field: str, value: Any, error: str):
|
||||||
|
"""Log validation error for specific field"""
|
||||||
|
self.error(
|
||||||
|
f"Validation error on row {row_number}",
|
||||||
|
field=field,
|
||||||
|
value=str(value)[:100], # Truncate long values
|
||||||
|
error=error
|
||||||
|
)
|
||||||
|
|
||||||
|
def log_import_summary(self, total_rows: int, imported_rows: int, error_rows: int):
|
||||||
|
"""Log final import summary"""
|
||||||
|
duration = datetime.utcnow() - self.session_start
|
||||||
|
|
||||||
|
self.info(
|
||||||
|
f"Import completed",
|
||||||
|
total_rows=total_rows,
|
||||||
|
imported_rows=imported_rows,
|
||||||
|
error_rows=error_rows,
|
||||||
|
duration_seconds=duration.total_seconds(),
|
||||||
|
success_rate=f"{(imported_rows/total_rows)*100:.1f}%" if total_rows > 0 else "0%"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_import_logger(import_id: str, table_name: str) -> ImportLogger:
|
||||||
|
"""Factory function to create import logger"""
|
||||||
|
return ImportLogger(import_id, table_name)
|
||||||
|
|
||||||
|
|
||||||
|
class ImportMetrics:
|
||||||
|
"""Track import performance metrics"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.start_time = datetime.utcnow()
|
||||||
|
self.end_time = None
|
||||||
|
self.total_rows = 0
|
||||||
|
self.processed_rows = 0
|
||||||
|
self.error_rows = 0
|
||||||
|
self.validation_errors = []
|
||||||
|
self.database_errors = []
|
||||||
|
|
||||||
|
def record_row_processed(self, success: bool = True):
|
||||||
|
"""Record that a row was processed"""
|
||||||
|
self.processed_rows += 1
|
||||||
|
if not success:
|
||||||
|
self.error_rows += 1
|
||||||
|
|
||||||
|
def record_validation_error(self, row_number: int, error: str):
|
||||||
|
"""Record a validation error"""
|
||||||
|
self.validation_errors.append({
|
||||||
|
"row": row_number,
|
||||||
|
"error": error,
|
||||||
|
"timestamp": datetime.utcnow()
|
||||||
|
})
|
||||||
|
|
||||||
|
def record_database_error(self, error: str):
|
||||||
|
"""Record a database error"""
|
||||||
|
self.database_errors.append({
|
||||||
|
"error": error,
|
||||||
|
"timestamp": datetime.utcnow()
|
||||||
|
})
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
"""Finalize metrics collection"""
|
||||||
|
self.end_time = datetime.utcnow()
|
||||||
|
|
||||||
|
def get_summary(self) -> Dict[str, Any]:
|
||||||
|
"""Get metrics summary"""
|
||||||
|
duration = (self.end_time or datetime.utcnow()) - self.start_time
|
||||||
|
|
||||||
|
return {
|
||||||
|
"start_time": self.start_time.isoformat(),
|
||||||
|
"end_time": self.end_time.isoformat() if self.end_time else None,
|
||||||
|
"duration_seconds": duration.total_seconds(),
|
||||||
|
"total_rows": self.total_rows,
|
||||||
|
"processed_rows": self.processed_rows,
|
||||||
|
"error_rows": self.error_rows,
|
||||||
|
"success_rate": (self.processed_rows / self.total_rows * 100) if self.total_rows > 0 else 0,
|
||||||
|
"validation_errors": len(self.validation_errors),
|
||||||
|
"database_errors": len(self.database_errors)
|
||||||
|
}
|
||||||
54
app/import_export/phone_importer.py
Normal file
54
app/import_export/phone_importer.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
"""
|
||||||
|
PHONE CSV Importer
|
||||||
|
"""
|
||||||
|
from typing import Dict, List, Any
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from .base import BaseCSVImporter, ImportValidationError
|
||||||
|
from app.models.rolodex import Phone, Rolodex
|
||||||
|
|
||||||
|
|
||||||
|
class PhoneCSVImporter(BaseCSVImporter):
|
||||||
|
"""CSV importer for PHONE table"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def table_name(self) -> str:
|
||||||
|
return "phone"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def required_fields(self) -> List[str]:
|
||||||
|
return ["rolodex_id", "phone"] # rolodex_id and phone number are required
|
||||||
|
|
||||||
|
@property
|
||||||
|
def field_mapping(self) -> Dict[str, str]:
|
||||||
|
"""Map CSV headers to database field names"""
|
||||||
|
return {
|
||||||
|
"rolodex_id": "rolodex_id",
|
||||||
|
"location": "location",
|
||||||
|
"phone": "phone"
|
||||||
|
}
|
||||||
|
|
||||||
|
def create_model_instance(self, row_data: Dict[str, Any]) -> Phone:
|
||||||
|
"""Create a Phone instance from processed row data"""
|
||||||
|
|
||||||
|
# Validate required fields
|
||||||
|
if not row_data.get("rolodex_id"):
|
||||||
|
raise ImportValidationError("Rolodex ID is required")
|
||||||
|
if not row_data.get("phone"):
|
||||||
|
raise ImportValidationError("Phone number is required")
|
||||||
|
|
||||||
|
# Validate foreign key exists
|
||||||
|
rolodex_exists = self.db_session.query(Rolodex).filter_by(
|
||||||
|
id=row_data["rolodex_id"]
|
||||||
|
).first()
|
||||||
|
if not rolodex_exists:
|
||||||
|
raise ImportValidationError(f"Rolodex ID '{row_data['rolodex_id']}' does not exist")
|
||||||
|
|
||||||
|
# Create instance with field length validation
|
||||||
|
phone = Phone(
|
||||||
|
rolodex_id=self.normalize_string(row_data["rolodex_id"], 80),
|
||||||
|
location=self.normalize_string(row_data.get("location", ""), 45),
|
||||||
|
phone=self.normalize_string(row_data["phone"], 45)
|
||||||
|
)
|
||||||
|
|
||||||
|
return phone
|
||||||
137
app/import_export/qdros_importer.py
Normal file
137
app/import_export/qdros_importer.py
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
"""
|
||||||
|
QDROS CSV Importer
|
||||||
|
"""
|
||||||
|
from typing import Dict, List, Any
|
||||||
|
from datetime import date
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from .base import BaseCSVImporter, ImportValidationError
|
||||||
|
from app.models.qdro import QDRO
|
||||||
|
from app.models.files import File
|
||||||
|
|
||||||
|
|
||||||
|
class QdrosCSVImporter(BaseCSVImporter):
|
||||||
|
"""CSV importer for QDROS table"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def table_name(self) -> str:
|
||||||
|
return "qdros"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def required_fields(self) -> List[str]:
|
||||||
|
return ["file_no"] # Only file_no is strictly required
|
||||||
|
|
||||||
|
@property
|
||||||
|
def field_mapping(self) -> Dict[str, str]:
|
||||||
|
"""Map CSV headers to database field names"""
|
||||||
|
return {
|
||||||
|
"file_no": "file_no",
|
||||||
|
"version": "version",
|
||||||
|
"plan_id": "plan_id",
|
||||||
|
# Legacy CSV fields
|
||||||
|
"field1": "field1",
|
||||||
|
"field2": "field2",
|
||||||
|
"part": "part",
|
||||||
|
"altp": "altp",
|
||||||
|
"pet": "pet",
|
||||||
|
"res": "res",
|
||||||
|
# Case information
|
||||||
|
"case_type": "case_type",
|
||||||
|
"case_code": "case_code",
|
||||||
|
"section": "section",
|
||||||
|
"case_number": "case_number",
|
||||||
|
# Dates
|
||||||
|
"judgment_date": "judgment_date",
|
||||||
|
"valuation_date": "valuation_date",
|
||||||
|
"married_on": "married_on",
|
||||||
|
# Award and venue
|
||||||
|
"percent_awarded": "percent_awarded",
|
||||||
|
"ven_city": "ven_city",
|
||||||
|
"ven_cnty": "ven_cnty",
|
||||||
|
"ven_st": "ven_st",
|
||||||
|
# Document status dates
|
||||||
|
"draft_out": "draft_out",
|
||||||
|
"draft_apr": "draft_apr",
|
||||||
|
"final_out": "final_out",
|
||||||
|
# Additional fields
|
||||||
|
"judge": "judge",
|
||||||
|
"form_name": "form_name",
|
||||||
|
"status": "status",
|
||||||
|
"content": "content",
|
||||||
|
"notes": "notes",
|
||||||
|
"approval_status": "approval_status",
|
||||||
|
"approved_date": "approved_date",
|
||||||
|
"filed_date": "filed_date"
|
||||||
|
}
|
||||||
|
|
||||||
|
def create_model_instance(self, row_data: Dict[str, Any]) -> QDRO:
|
||||||
|
"""Create a Qdro instance from processed row data"""
|
||||||
|
|
||||||
|
# Validate required fields
|
||||||
|
if not row_data.get("file_no"):
|
||||||
|
raise ImportValidationError("File number is required")
|
||||||
|
|
||||||
|
# Validate foreign key exists (file number)
|
||||||
|
file_exists = self.db_session.query(File).filter_by(file_no=row_data["file_no"]).first()
|
||||||
|
if not file_exists:
|
||||||
|
raise ImportValidationError(f"File number '{row_data['file_no']}' does not exist")
|
||||||
|
|
||||||
|
# Parse date fields
|
||||||
|
date_fields = [
|
||||||
|
"judgment_date", "valuation_date", "married_on",
|
||||||
|
"draft_out", "draft_apr", "final_out", "approved_date", "filed_date"
|
||||||
|
]
|
||||||
|
|
||||||
|
parsed_dates = {}
|
||||||
|
for field in date_fields:
|
||||||
|
if row_data.get(field):
|
||||||
|
try:
|
||||||
|
parsed_dates[field] = self.parse_date(row_data[field])
|
||||||
|
except ValueError as e:
|
||||||
|
raise ImportValidationError(f"Invalid {field}: {e}")
|
||||||
|
else:
|
||||||
|
parsed_dates[field] = None
|
||||||
|
|
||||||
|
# Validate state abbreviation length
|
||||||
|
ven_st = row_data.get("ven_st", "")
|
||||||
|
if ven_st and len(ven_st) > 2:
|
||||||
|
self.result.add_warning(f"State abbreviation truncated: {ven_st}")
|
||||||
|
ven_st = ven_st[:2]
|
||||||
|
|
||||||
|
# Set default status if not provided
|
||||||
|
status = row_data.get("status", "DRAFT")
|
||||||
|
|
||||||
|
# Create instance
|
||||||
|
qdro = QDRO(
|
||||||
|
file_no=self.normalize_string(row_data["file_no"], 45),
|
||||||
|
version=self.normalize_string(row_data.get("version", "01"), 10),
|
||||||
|
plan_id=self.normalize_string(row_data.get("plan_id", ""), 45),
|
||||||
|
# Legacy CSV fields
|
||||||
|
field1=self.normalize_string(row_data.get("field1", ""), 100),
|
||||||
|
field2=self.normalize_string(row_data.get("field2", ""), 100),
|
||||||
|
part=self.normalize_string(row_data.get("part", ""), 100),
|
||||||
|
altp=self.normalize_string(row_data.get("altp", ""), 100),
|
||||||
|
pet=self.normalize_string(row_data.get("pet", ""), 100),
|
||||||
|
res=self.normalize_string(row_data.get("res", ""), 100),
|
||||||
|
# Case information
|
||||||
|
case_type=self.normalize_string(row_data.get("case_type", ""), 45),
|
||||||
|
case_code=self.normalize_string(row_data.get("case_code", ""), 45),
|
||||||
|
section=self.normalize_string(row_data.get("section", ""), 45),
|
||||||
|
case_number=self.normalize_string(row_data.get("case_number", ""), 100),
|
||||||
|
# Dates
|
||||||
|
**parsed_dates,
|
||||||
|
# Award and venue
|
||||||
|
percent_awarded=self.normalize_string(row_data.get("percent_awarded", ""), 100),
|
||||||
|
ven_city=self.normalize_string(row_data.get("ven_city", ""), 50),
|
||||||
|
ven_cnty=self.normalize_string(row_data.get("ven_cnty", ""), 50),
|
||||||
|
ven_st=ven_st,
|
||||||
|
# Additional fields
|
||||||
|
judge=self.normalize_string(row_data.get("judge", ""), 100),
|
||||||
|
form_name=self.normalize_string(row_data.get("form_name", ""), 200),
|
||||||
|
status=self.normalize_string(status, 45),
|
||||||
|
content=row_data.get("content", ""), # Text field
|
||||||
|
notes=row_data.get("notes", ""), # Text field
|
||||||
|
approval_status=self.normalize_string(row_data.get("approval_status", ""), 45)
|
||||||
|
)
|
||||||
|
|
||||||
|
return qdro
|
||||||
93
app/import_export/rolodex_importer.py
Normal file
93
app/import_export/rolodex_importer.py
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
"""
|
||||||
|
ROLODEX CSV Importer
|
||||||
|
"""
|
||||||
|
from typing import Dict, List, Any
|
||||||
|
from datetime import date
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from .base import BaseCSVImporter, ImportValidationError
|
||||||
|
from app.models.rolodex import Rolodex
|
||||||
|
|
||||||
|
|
||||||
|
class RolodexCSVImporter(BaseCSVImporter):
|
||||||
|
"""CSV importer for ROLODEX table"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def table_name(self) -> str:
|
||||||
|
return "rolodex"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def required_fields(self) -> List[str]:
|
||||||
|
return ["id", "last"] # Only ID and last name are required
|
||||||
|
|
||||||
|
@property
|
||||||
|
def field_mapping(self) -> Dict[str, str]:
|
||||||
|
"""Map CSV headers to database field names"""
|
||||||
|
return {
|
||||||
|
"id": "id",
|
||||||
|
"last": "last",
|
||||||
|
"first": "first",
|
||||||
|
"middle": "middle",
|
||||||
|
"prefix": "prefix",
|
||||||
|
"suffix": "suffix",
|
||||||
|
"title": "title",
|
||||||
|
"group": "group",
|
||||||
|
"a1": "a1",
|
||||||
|
"a2": "a2",
|
||||||
|
"a3": "a3",
|
||||||
|
"city": "city",
|
||||||
|
"abrev": "abrev",
|
||||||
|
"zip": "zip",
|
||||||
|
"email": "email",
|
||||||
|
"dob": "dob",
|
||||||
|
"ss_number": "ss_number",
|
||||||
|
"legal_status": "legal_status",
|
||||||
|
"memo": "memo"
|
||||||
|
}
|
||||||
|
|
||||||
|
def create_model_instance(self, row_data: Dict[str, Any]) -> Rolodex:
|
||||||
|
"""Create a Rolodex instance from processed row data"""
|
||||||
|
|
||||||
|
# Validate required fields
|
||||||
|
if not row_data.get("id"):
|
||||||
|
raise ImportValidationError("ID is required")
|
||||||
|
if not row_data.get("last"):
|
||||||
|
raise ImportValidationError("Last name is required")
|
||||||
|
|
||||||
|
# Check for duplicate ID
|
||||||
|
existing = self.db_session.query(Rolodex).filter_by(id=row_data["id"]).first()
|
||||||
|
if existing:
|
||||||
|
raise ImportValidationError(f"Rolodex ID '{row_data['id']}' already exists")
|
||||||
|
|
||||||
|
# Parse date of birth
|
||||||
|
dob = None
|
||||||
|
if row_data.get("dob"):
|
||||||
|
try:
|
||||||
|
dob = self.parse_date(row_data["dob"])
|
||||||
|
except ValueError as e:
|
||||||
|
raise ImportValidationError(f"Invalid date of birth: {e}")
|
||||||
|
|
||||||
|
# Create instance with field length validation
|
||||||
|
rolodex = Rolodex(
|
||||||
|
id=self.normalize_string(row_data["id"], 80),
|
||||||
|
last=self.normalize_string(row_data["last"], 80),
|
||||||
|
first=self.normalize_string(row_data.get("first", ""), 45),
|
||||||
|
middle=self.normalize_string(row_data.get("middle", ""), 45),
|
||||||
|
prefix=self.normalize_string(row_data.get("prefix", ""), 45),
|
||||||
|
suffix=self.normalize_string(row_data.get("suffix", ""), 45),
|
||||||
|
title=self.normalize_string(row_data.get("title", ""), 45),
|
||||||
|
group=self.normalize_string(row_data.get("group", ""), 45),
|
||||||
|
a1=self.normalize_string(row_data.get("a1", ""), 45),
|
||||||
|
a2=self.normalize_string(row_data.get("a2", ""), 45),
|
||||||
|
a3=self.normalize_string(row_data.get("a3", ""), 45),
|
||||||
|
city=self.normalize_string(row_data.get("city", ""), 80),
|
||||||
|
abrev=self.normalize_string(row_data.get("abrev", ""), 45),
|
||||||
|
zip=self.normalize_string(row_data.get("zip", ""), 45),
|
||||||
|
email=self.normalize_string(row_data.get("email", ""), 100),
|
||||||
|
dob=dob,
|
||||||
|
ss_number=self.normalize_string(row_data.get("ss_number", ""), 20),
|
||||||
|
legal_status=self.normalize_string(row_data.get("legal_status", ""), 45),
|
||||||
|
memo=row_data.get("memo", "") # Text field, no length limit
|
||||||
|
)
|
||||||
|
|
||||||
|
return rolodex
|
||||||
77
app/main.py
77
app/main.py
@@ -10,12 +10,13 @@ from fastapi.middleware.cors import CORSMiddleware
|
|||||||
from app.config import settings
|
from app.config import settings
|
||||||
from app.database.base import engine
|
from app.database.base import engine
|
||||||
from sqlalchemy import text
|
from sqlalchemy import text
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
from app.database.fts import ensure_rolodex_fts, ensure_files_fts, ensure_ledger_fts, ensure_qdros_fts
|
from app.database.fts import ensure_rolodex_fts, ensure_files_fts, ensure_ledger_fts, ensure_qdros_fts
|
||||||
from app.database.indexes import ensure_secondary_indexes
|
from app.database.indexes import ensure_secondary_indexes
|
||||||
from app.database.schema_updates import ensure_schema_updates
|
from app.database.schema_updates import ensure_schema_updates
|
||||||
from app.models import BaseModel
|
from app.models import BaseModel
|
||||||
from app.models.user import User
|
from app.models.user import User
|
||||||
from app.auth.security import get_admin_user
|
from app.auth.security import get_admin_user, get_password_hash, verify_password
|
||||||
from app.core.logging import setup_logging, get_logger
|
from app.core.logging import setup_logging, get_logger
|
||||||
from app.middleware.logging import LoggingMiddleware
|
from app.middleware.logging import LoggingMiddleware
|
||||||
from app.middleware.errors import register_exception_handlers
|
from app.middleware.errors import register_exception_handlers
|
||||||
@@ -54,6 +55,48 @@ ensure_secondary_indexes(engine)
|
|||||||
logger.info("Ensuring schema updates (new columns)")
|
logger.info("Ensuring schema updates (new columns)")
|
||||||
ensure_schema_updates(engine)
|
ensure_schema_updates(engine)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_admin_user():
|
||||||
|
"""Ensure admin user exists and password matches environment variable"""
|
||||||
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
|
db = SessionLocal()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Check if admin user exists
|
||||||
|
admin_user = db.query(User).filter(User.username == settings.admin_username).first()
|
||||||
|
|
||||||
|
if not admin_user:
|
||||||
|
# Create admin user if it doesn't exist
|
||||||
|
logger.info(f"Creating admin user '{settings.admin_username}'")
|
||||||
|
admin_user = User(
|
||||||
|
username=settings.admin_username,
|
||||||
|
email=f"{settings.admin_username}@delphicg.local",
|
||||||
|
full_name="System Administrator",
|
||||||
|
hashed_password=get_password_hash(settings.admin_password),
|
||||||
|
is_active=True,
|
||||||
|
is_admin=True
|
||||||
|
)
|
||||||
|
db.add(admin_user)
|
||||||
|
db.commit()
|
||||||
|
logger.info(f"Admin user '{settings.admin_username}' created successfully")
|
||||||
|
else:
|
||||||
|
# Check if password needs to be updated
|
||||||
|
if not verify_password(settings.admin_password, admin_user.hashed_password):
|
||||||
|
logger.info(f"Updating admin password for user '{settings.admin_username}'")
|
||||||
|
admin_user.hashed_password = get_password_hash(settings.admin_password)
|
||||||
|
db.commit()
|
||||||
|
logger.info("Admin password updated successfully")
|
||||||
|
else:
|
||||||
|
logger.debug(f"Admin user '{settings.admin_username}' password is current")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error ensuring admin user: {e}")
|
||||||
|
db.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
|
||||||
# Initialize FastAPI app
|
# Initialize FastAPI app
|
||||||
logger.info("Initializing FastAPI application", version=settings.app_version, debug=settings.debug)
|
logger.info("Initializing FastAPI application", version=settings.app_version, debug=settings.debug)
|
||||||
app = FastAPI(
|
app = FastAPI(
|
||||||
@@ -67,6 +110,11 @@ app = FastAPI(
|
|||||||
async def startup_event():
|
async def startup_event():
|
||||||
"""Initialize WebSocket pool and other startup tasks"""
|
"""Initialize WebSocket pool and other startup tasks"""
|
||||||
from app.services.websocket_pool import initialize_websocket_pool
|
from app.services.websocket_pool import initialize_websocket_pool
|
||||||
|
|
||||||
|
# Ensure admin user exists and password is synced with environment
|
||||||
|
logger.info("Ensuring admin user exists and password is current")
|
||||||
|
ensure_admin_user()
|
||||||
|
|
||||||
logger.info("Initializing WebSocket connection pool")
|
logger.info("Initializing WebSocket connection pool")
|
||||||
await initialize_websocket_pool(
|
await initialize_websocket_pool(
|
||||||
cleanup_interval=60,
|
cleanup_interval=60,
|
||||||
@@ -160,8 +208,6 @@ from app.api.documents import router as documents_router
|
|||||||
from app.api.billing import router as billing_router
|
from app.api.billing import router as billing_router
|
||||||
from app.api.search import router as search_router
|
from app.api.search import router as search_router
|
||||||
from app.api.admin import router as admin_router
|
from app.api.admin import router as admin_router
|
||||||
from app.api.import_data import router as import_router
|
|
||||||
from app.api.flexible import router as flexible_router
|
|
||||||
from app.api.support import router as support_router
|
from app.api.support import router as support_router
|
||||||
from app.api.settings import router as settings_router
|
from app.api.settings import router as settings_router
|
||||||
from app.api.mortality import router as mortality_router
|
from app.api.mortality import router as mortality_router
|
||||||
@@ -177,6 +223,7 @@ from app.api.document_workflows import router as document_workflows_router
|
|||||||
from app.api.session_management import router as session_management_router
|
from app.api.session_management import router as session_management_router
|
||||||
from app.api.advanced_templates import router as advanced_templates_router
|
from app.api.advanced_templates import router as advanced_templates_router
|
||||||
from app.api.jobs import router as jobs_router
|
from app.api.jobs import router as jobs_router
|
||||||
|
from app.api.import_csv import router as import_csv_router
|
||||||
|
|
||||||
logger.info("Including API routers")
|
logger.info("Including API routers")
|
||||||
app.include_router(advanced_variables_router, prefix="/api/variables", tags=["advanced-variables"])
|
app.include_router(advanced_variables_router, prefix="/api/variables", tags=["advanced-variables"])
|
||||||
@@ -189,10 +236,8 @@ app.include_router(billing_router, prefix="/api/billing", tags=["billing"])
|
|||||||
app.include_router(documents_router, prefix="/api/documents", tags=["documents"])
|
app.include_router(documents_router, prefix="/api/documents", tags=["documents"])
|
||||||
app.include_router(search_router, prefix="/api/search", tags=["search"])
|
app.include_router(search_router, prefix="/api/search", tags=["search"])
|
||||||
app.include_router(admin_router, prefix="/api/admin", tags=["admin"])
|
app.include_router(admin_router, prefix="/api/admin", tags=["admin"])
|
||||||
app.include_router(import_router, prefix="/api/import", tags=["import"])
|
|
||||||
app.include_router(support_router, prefix="/api/support", tags=["support"])
|
app.include_router(support_router, prefix="/api/support", tags=["support"])
|
||||||
app.include_router(settings_router, prefix="/api/settings", tags=["settings"])
|
app.include_router(settings_router, prefix="/api/settings", tags=["settings"])
|
||||||
app.include_router(flexible_router, prefix="/api")
|
|
||||||
app.include_router(mortality_router, prefix="/api/mortality", tags=["mortality"])
|
app.include_router(mortality_router, prefix="/api/mortality", tags=["mortality"])
|
||||||
app.include_router(pensions_router, prefix="/api/pensions", tags=["pensions"])
|
app.include_router(pensions_router, prefix="/api/pensions", tags=["pensions"])
|
||||||
app.include_router(pension_valuation_router, prefix="/api/pensions", tags=["pensions-valuation"])
|
app.include_router(pension_valuation_router, prefix="/api/pensions", tags=["pensions-valuation"])
|
||||||
@@ -205,6 +250,7 @@ app.include_router(deadlines_router, prefix="/api/deadlines", tags=["deadlines"]
|
|||||||
app.include_router(document_workflows_router, prefix="/api/workflows", tags=["document-workflows"])
|
app.include_router(document_workflows_router, prefix="/api/workflows", tags=["document-workflows"])
|
||||||
app.include_router(labels_router, prefix="/api/labels", tags=["labels"])
|
app.include_router(labels_router, prefix="/api/labels", tags=["labels"])
|
||||||
app.include_router(jobs_router, prefix="/api/jobs", tags=["jobs"])
|
app.include_router(jobs_router, prefix="/api/jobs", tags=["jobs"])
|
||||||
|
app.include_router(import_csv_router, prefix="/api/admin/import", tags=["import"])
|
||||||
|
|
||||||
|
|
||||||
@app.get("/", response_class=HTMLResponse)
|
@app.get("/", response_class=HTMLResponse)
|
||||||
@@ -288,22 +334,19 @@ async def admin_page(request: Request):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/import", response_class=HTMLResponse)
|
@app.get("/admin/import", response_class=HTMLResponse)
|
||||||
async def import_page(request: Request):
|
async def admin_import_page(request: Request):
|
||||||
"""Data import management page (admin only)"""
|
"""CSV Import page (admin only)"""
|
||||||
return templates.TemplateResponse(
|
return templates.TemplateResponse(
|
||||||
"import.html",
|
"admin_import.html",
|
||||||
{"request": request, "title": "Data Import - " + settings.app_name}
|
{"request": request, "title": "CSV Import - " + settings.app_name}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/flexible", response_class=HTMLResponse)
|
|
||||||
async def flexible_page(request: Request):
|
|
||||||
"""Flexible imports admin page (admin only)."""
|
|
||||||
return templates.TemplateResponse(
|
|
||||||
"flexible.html",
|
|
||||||
{"request": request, "title": "Flexible Imports - " + settings.app_name}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@app.get("/health")
|
@app.get("/health")
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ from .qdro import QDRO, QDROVersion, QDROCommunication
|
|||||||
from .audit import AuditLog, LoginAttempt, ImportAudit, ImportAuditFile
|
from .audit import AuditLog, LoginAttempt, ImportAudit, ImportAuditFile
|
||||||
from .auth import RefreshToken
|
from .auth import RefreshToken
|
||||||
from .additional import Deposit, Payment, FileNote, FormVariable, ReportVariable, Document
|
from .additional import Deposit, Payment, FileNote, FormVariable, ReportVariable, Document
|
||||||
from .flexible import FlexibleImport
|
|
||||||
from .support import SupportTicket, TicketResponse, TicketStatus, TicketPriority, TicketCategory
|
from .support import SupportTicket, TicketResponse, TicketStatus, TicketPriority, TicketCategory
|
||||||
from .pensions import (
|
from .pensions import (
|
||||||
Pension, PensionSchedule, MarriageHistory, DeathBenefit,
|
Pension, PensionSchedule, MarriageHistory, DeathBenefit,
|
||||||
@@ -52,7 +51,7 @@ from .lookups import (
|
|||||||
__all__ = [
|
__all__ = [
|
||||||
"BaseModel", "User", "Rolodex", "Phone", "File", "Ledger", "QDRO", "QDROVersion", "QDROCommunication",
|
"BaseModel", "User", "Rolodex", "Phone", "File", "Ledger", "QDRO", "QDROVersion", "QDROCommunication",
|
||||||
"AuditLog", "LoginAttempt", "ImportAudit", "ImportAuditFile", "RefreshToken",
|
"AuditLog", "LoginAttempt", "ImportAudit", "ImportAuditFile", "RefreshToken",
|
||||||
"Deposit", "Payment", "FileNote", "FormVariable", "ReportVariable", "Document", "FlexibleImport",
|
"Deposit", "Payment", "FileNote", "FormVariable", "ReportVariable", "Document",
|
||||||
"SupportTicket", "TicketResponse", "TicketStatus", "TicketPriority", "TicketCategory",
|
"SupportTicket", "TicketResponse", "TicketStatus", "TicketPriority", "TicketCategory",
|
||||||
"Pension", "PensionSchedule", "MarriageHistory", "DeathBenefit",
|
"Pension", "PensionSchedule", "MarriageHistory", "DeathBenefit",
|
||||||
"SeparationAgreement", "LifeTable", "NumberTable", "PensionResult",
|
"SeparationAgreement", "LifeTable", "NumberTable", "PensionResult",
|
||||||
|
|||||||
@@ -1,37 +0,0 @@
|
|||||||
"""
|
|
||||||
Flexible storage for unmapped CSV columns during import
|
|
||||||
"""
|
|
||||||
from sqlalchemy import Column, Integer, String
|
|
||||||
from sqlalchemy.types import JSON
|
|
||||||
|
|
||||||
from app.models.base import BaseModel
|
|
||||||
|
|
||||||
|
|
||||||
class FlexibleImport(BaseModel):
|
|
||||||
"""Stores per-row extra/unmapped data for any import, without persisting mapping patterns."""
|
|
||||||
|
|
||||||
__tablename__ = "flexible_imports"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
|
||||||
|
|
||||||
# The CSV filename used by the importer (e.g., "FILES.csv" or arbitrary names in flexible mode)
|
|
||||||
file_type = Column(String(120), nullable=False, index=True)
|
|
||||||
|
|
||||||
# The SQLAlchemy model table this extra data is associated with (if any)
|
|
||||||
target_table = Column(String(120), nullable=True, index=True)
|
|
||||||
|
|
||||||
# Optional link to the primary record created in the target table
|
|
||||||
primary_key_field = Column(String(120), nullable=True)
|
|
||||||
primary_key_value = Column(String(255), nullable=True, index=True)
|
|
||||||
|
|
||||||
# Extra unmapped columns from the CSV row
|
|
||||||
extra_data = Column(JSON, nullable=False)
|
|
||||||
|
|
||||||
def __repr__(self) -> str: # pragma: no cover - repr utility
|
|
||||||
return (
|
|
||||||
f"<FlexibleImport(id={self.id}, file_type='{self.file_type}', "
|
|
||||||
f"target_table='{self.target_table}', pk_field='{self.primary_key_field}', "
|
|
||||||
f"pk_value='{self.primary_key_value}')>"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,15 +1,15 @@
|
|||||||
version: '3.8'
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
delphi-db:
|
delphi-db:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
|
args:
|
||||||
|
BASE_IMAGE: ${BASE_IMAGE:-python:3.12-slim}
|
||||||
container_name: delphi-database-dev
|
container_name: delphi-database-dev
|
||||||
ports:
|
ports:
|
||||||
- "${EXTERNAL_PORT:-6920}:8000"
|
- "${EXTERNAL_PORT:-6920}:8000"
|
||||||
environment:
|
environment:
|
||||||
- DATABASE_URL=${DATABASE_URL:-sqlite:///data/delphi_database.db}
|
- DATABASE_URL=${DATABASE_URL:-sqlite:///app/data/delphi_database.db}
|
||||||
- SECRET_KEY=${SECRET_KEY:-dev-secret-key-not-for-production}
|
- SECRET_KEY=${SECRET_KEY:-dev-secret-key-not-for-production}
|
||||||
- DEBUG=${DEBUG:-True}
|
- DEBUG=${DEBUG:-True}
|
||||||
- ACCESS_TOKEN_EXPIRE_MINUTES=${ACCESS_TOKEN_EXPIRE_MINUTES:-120}
|
- ACCESS_TOKEN_EXPIRE_MINUTES=${ACCESS_TOKEN_EXPIRE_MINUTES:-120}
|
||||||
|
|||||||
@@ -1,11 +1,15 @@
|
|||||||
services:
|
services:
|
||||||
delphi-db:
|
delphi-db:
|
||||||
build: .
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile.production
|
||||||
|
args:
|
||||||
|
BASE_IMAGE: ${BASE_IMAGE:-python:3.12-slim}
|
||||||
container_name: delphi-database
|
container_name: delphi-database
|
||||||
ports:
|
ports:
|
||||||
- "${EXTERNAL_PORT:-6920}:8000"
|
- "${EXTERNAL_PORT:-6920}:8000"
|
||||||
environment:
|
environment:
|
||||||
- DATABASE_URL=${DATABASE_URL:-sqlite:///data/delphi_database.db}
|
- DATABASE_URL=${DATABASE_URL:-sqlite:///app/data/delphi_database.db}
|
||||||
- SECRET_KEY=${SECRET_KEY}
|
- SECRET_KEY=${SECRET_KEY}
|
||||||
- DEBUG=${DEBUG:-False}
|
- DEBUG=${DEBUG:-False}
|
||||||
- ACCESS_TOKEN_EXPIRE_MINUTES=${ACCESS_TOKEN_EXPIRE_MINUTES:-30}
|
- ACCESS_TOKEN_EXPIRE_MINUTES=${ACCESS_TOKEN_EXPIRE_MINUTES:-30}
|
||||||
|
|||||||
763
static/js/admin_import.js
Normal file
763
static/js/admin_import.js
Normal file
@@ -0,0 +1,763 @@
|
|||||||
|
/**
|
||||||
|
* Admin Import JavaScript
|
||||||
|
* Handles CSV file import functionality
|
||||||
|
*/
|
||||||
|
|
||||||
|
class ImportManager {
|
||||||
|
constructor() {
|
||||||
|
this.supportedTables = [];
|
||||||
|
this.batchFileCount = 0;
|
||||||
|
this.currentImportId = null;
|
||||||
|
this.pollInterval = null;
|
||||||
|
this.bulkFiles = [];
|
||||||
|
|
||||||
|
this.init();
|
||||||
|
}
|
||||||
|
|
||||||
|
async init() {
|
||||||
|
await this.loadSupportedTables();
|
||||||
|
await this.loadImportStatus();
|
||||||
|
this.setupEventListeners();
|
||||||
|
}
|
||||||
|
|
||||||
|
async loadSupportedTables() {
|
||||||
|
try {
|
||||||
|
console.log('Loading supported tables...');
|
||||||
|
const response = await window.http.wrappedFetch('/api/admin/import/tables');
|
||||||
|
if (response.ok) {
|
||||||
|
const data = await response.json();
|
||||||
|
this.supportedTables = data.tables || [];
|
||||||
|
console.log('Supported tables loaded:', this.supportedTables);
|
||||||
|
} else {
|
||||||
|
console.error('Failed to load supported tables, status:', response.status);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to load supported tables:', error);
|
||||||
|
window.alerts.error('Failed to load supported tables');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
setupEventListeners() {
|
||||||
|
// Single import form
|
||||||
|
document.getElementById('importForm').addEventListener('submit', (e) => {
|
||||||
|
e.preventDefault();
|
||||||
|
this.handleSingleImport();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Validate button
|
||||||
|
document.getElementById('validateBtn').addEventListener('click', () => {
|
||||||
|
this.validateHeaders();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Table selection change
|
||||||
|
document.getElementById('tableSelect').addEventListener('change', (e) => {
|
||||||
|
this.onTableChange(e.target.value);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Bulk file upload
|
||||||
|
document.getElementById('bulkFiles').addEventListener('change', (e) => {
|
||||||
|
this.handleBulkFileSelection(e);
|
||||||
|
});
|
||||||
|
|
||||||
|
document.getElementById('autoMapBtn').addEventListener('click', () => {
|
||||||
|
this.autoMapTables();
|
||||||
|
});
|
||||||
|
|
||||||
|
document.getElementById('clearAllBtn').addEventListener('click', () => {
|
||||||
|
this.clearAllFiles();
|
||||||
|
});
|
||||||
|
|
||||||
|
document.getElementById('bulkUploadBtn').addEventListener('click', () => {
|
||||||
|
this.handleBulkUpload();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Status refresh button
|
||||||
|
document.getElementById('refreshStatusBtn').addEventListener('click', () => {
|
||||||
|
this.loadImportStatus();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async onTableChange(tableName) {
|
||||||
|
const schemaInfo = document.getElementById('schemaInfo');
|
||||||
|
const schemaDetails = document.getElementById('schemaDetails');
|
||||||
|
|
||||||
|
if (!tableName) {
|
||||||
|
schemaInfo.classList.add('hidden');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
console.log('Loading schema for table:', tableName);
|
||||||
|
const response = await window.http.wrappedFetch(`/api/admin/import/tables/${tableName}/schema`);
|
||||||
|
if (response.ok) {
|
||||||
|
const data = await response.json();
|
||||||
|
const schema = data.schema;
|
||||||
|
console.log('Schema loaded for', tableName, ':', schema);
|
||||||
|
|
||||||
|
let html = '<div class="grid grid-cols-1 md:grid-cols-2 gap-4">';
|
||||||
|
html += '<div><h4 class="font-semibold mb-2">Required Fields:</h4>';
|
||||||
|
html += '<ul class="list-disc list-inside space-y-1">';
|
||||||
|
schema.required_fields.forEach(field => {
|
||||||
|
html += `<li><code class="bg-blue-100 px-1 rounded">${field}</code></li>`;
|
||||||
|
});
|
||||||
|
html += '</ul></div>';
|
||||||
|
|
||||||
|
html += '<div><h4 class="font-semibold mb-2">All Available Fields:</h4>';
|
||||||
|
html += '<div class="max-h-32 overflow-y-auto">';
|
||||||
|
html += '<div class="grid grid-cols-2 gap-1 text-xs">';
|
||||||
|
Object.keys(schema.field_mapping).forEach(field => {
|
||||||
|
html += `<code class="bg-gray-100 px-1 rounded">${field}</code>`;
|
||||||
|
});
|
||||||
|
html += '</div></div></div></div>';
|
||||||
|
|
||||||
|
schemaDetails.innerHTML = html;
|
||||||
|
schemaInfo.classList.remove('hidden');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to load schema:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async validateHeaders() {
|
||||||
|
const tableSelect = document.getElementById('tableSelect');
|
||||||
|
const fileInput = document.getElementById('csvFile');
|
||||||
|
|
||||||
|
console.log('Starting header validation...');
|
||||||
|
|
||||||
|
if (!tableSelect.value) {
|
||||||
|
console.warn('No table selected for validation');
|
||||||
|
window.alerts.error('Please select a table type');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!fileInput.files[0]) {
|
||||||
|
console.warn('No file selected for validation');
|
||||||
|
window.alerts.error('Please select a CSV file');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Validating headers for table:', tableSelect.value, 'file:', fileInput.files[0].name);
|
||||||
|
|
||||||
|
const formData = new FormData();
|
||||||
|
formData.append('table_name', tableSelect.value);
|
||||||
|
formData.append('file', fileInput.files[0]);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await window.http.wrappedFetch('/api/admin/import/validate', {
|
||||||
|
method: 'POST',
|
||||||
|
body: formData
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('Validation response status:', response.status);
|
||||||
|
|
||||||
|
if (response.ok) {
|
||||||
|
const result = await response.json();
|
||||||
|
console.log('Validation result:', result);
|
||||||
|
if (result.success) {
|
||||||
|
window.alerts.success('CSV headers validated successfully!');
|
||||||
|
} else {
|
||||||
|
const errors = result.validation_result.errors.join('\\n');
|
||||||
|
console.error('Validation errors:', result.validation_result.errors);
|
||||||
|
window.alerts.error(`Validation failed:\\n${errors}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const error = await response.json();
|
||||||
|
console.error('Validation failed with error:', error);
|
||||||
|
window.alerts.error(`Validation failed: ${error.detail}`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Validation error:', error);
|
||||||
|
window.alerts.error('Failed to validate CSV headers');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async handleSingleImport() {
|
||||||
|
const tableSelect = document.getElementById('tableSelect');
|
||||||
|
const fileInput = document.getElementById('csvFile');
|
||||||
|
|
||||||
|
console.log('Starting single import...');
|
||||||
|
|
||||||
|
if (!tableSelect.value) {
|
||||||
|
console.warn('No table selected for import');
|
||||||
|
window.alerts.error('Please select a table type');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!fileInput.files[0]) {
|
||||||
|
console.warn('No file selected for import');
|
||||||
|
window.alerts.error('Please select a CSV file');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Importing to table:', tableSelect.value, 'file:', fileInput.files[0].name);
|
||||||
|
|
||||||
|
const formData = new FormData();
|
||||||
|
formData.append('table_name', tableSelect.value);
|
||||||
|
formData.append('file', fileInput.files[0]);
|
||||||
|
|
||||||
|
// Show progress
|
||||||
|
this.showProgress();
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await window.http.wrappedFetch('/api/admin/import/csv', {
|
||||||
|
method: 'POST',
|
||||||
|
body: formData
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('Import response status:', response.status);
|
||||||
|
|
||||||
|
if (response.ok) {
|
||||||
|
const result = await response.json();
|
||||||
|
console.log('Import started successfully:', result);
|
||||||
|
this.currentImportId = result.import_id;
|
||||||
|
this.updateProgress(`Import started for ${result.table_name} (ID: ${result.import_id})`, 'info');
|
||||||
|
this.startPolling();
|
||||||
|
} else {
|
||||||
|
const error = await response.json();
|
||||||
|
console.error('Import failed:', error);
|
||||||
|
this.updateProgress(`Import failed: ${error.detail}`, 'error');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Import error:', error);
|
||||||
|
this.updateProgress('Import failed: Network error', 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
showProgress() {
|
||||||
|
document.getElementById('importProgress').classList.remove('hidden');
|
||||||
|
document.getElementById('importResults').classList.add('hidden');
|
||||||
|
}
|
||||||
|
|
||||||
|
updateProgress(message, type = 'info') {
|
||||||
|
const progressDetails = document.getElementById('progressDetails');
|
||||||
|
const timestamp = new Date().toLocaleTimeString();
|
||||||
|
|
||||||
|
let colorClass = 'text-blue-600';
|
||||||
|
if (type === 'error') colorClass = 'text-red-600';
|
||||||
|
if (type === 'success') colorClass = 'text-green-600';
|
||||||
|
if (type === 'warning') colorClass = 'text-yellow-600';
|
||||||
|
|
||||||
|
progressDetails.innerHTML += `
|
||||||
|
<div class="flex items-center space-x-2 mb-2">
|
||||||
|
<span class="text-gray-500 text-sm">${timestamp}</span>
|
||||||
|
<span class="${colorClass}">${message}</span>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
|
||||||
|
// Scroll to bottom
|
||||||
|
progressDetails.scrollTop = progressDetails.scrollHeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
startPolling() {
|
||||||
|
if (this.pollInterval) {
|
||||||
|
clearInterval(this.pollInterval);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.pollInterval = setInterval(async () => {
|
||||||
|
await this.checkImportStatus();
|
||||||
|
}, 2000); // Poll every 2 seconds
|
||||||
|
}
|
||||||
|
|
||||||
|
startBatchPolling() {
|
||||||
|
if (this.pollInterval) {
|
||||||
|
clearInterval(this.pollInterval);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.pollInterval = setInterval(async () => {
|
||||||
|
await this.checkBatchStatus();
|
||||||
|
}, 2000); // Poll every 2 seconds
|
||||||
|
}
|
||||||
|
|
||||||
|
async checkImportStatus() {
|
||||||
|
if (!this.currentImportId) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await window.http.wrappedFetch(`/api/admin/import/status/${this.currentImportId}`);
|
||||||
|
if (response.ok) {
|
||||||
|
const status = await response.json();
|
||||||
|
|
||||||
|
if (status.status === 'COMPLETED') {
|
||||||
|
clearInterval(this.pollInterval);
|
||||||
|
this.updateProgress('Import completed successfully!', 'success');
|
||||||
|
this.showResults(status.result);
|
||||||
|
// Refresh status after successful import
|
||||||
|
setTimeout(() => {
|
||||||
|
this.loadImportStatus();
|
||||||
|
}, 1000);
|
||||||
|
} else if (status.status === 'FAILED') {
|
||||||
|
clearInterval(this.pollInterval);
|
||||||
|
this.updateProgress(`Import failed: ${status.error || 'Unknown error'}`, 'error');
|
||||||
|
if (status.result) {
|
||||||
|
this.showResults(status.result);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.updateProgress(`Import status: ${status.status}`, 'info');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Status check error:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async checkBatchStatus() {
|
||||||
|
if (!this.currentImportIds || !Array.isArray(this.currentImportIds)) return;
|
||||||
|
|
||||||
|
let allCompleted = true;
|
||||||
|
let anyFailed = false;
|
||||||
|
|
||||||
|
for (const importId of this.currentImportIds) {
|
||||||
|
try {
|
||||||
|
const response = await window.http.wrappedFetch(`/api/admin/import/status/${importId}`);
|
||||||
|
if (response.ok) {
|
||||||
|
const status = await response.json();
|
||||||
|
|
||||||
|
if (status.status === 'PROCESSING') {
|
||||||
|
allCompleted = false;
|
||||||
|
} else if (status.status === 'FAILED') {
|
||||||
|
anyFailed = true;
|
||||||
|
this.updateProgress(`${status.table_name} import failed: ${status.error || 'Unknown error'}`, 'error');
|
||||||
|
} else if (status.status === 'COMPLETED') {
|
||||||
|
this.updateProgress(`${status.table_name} import completed`, 'success');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Batch status check error:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (allCompleted) {
|
||||||
|
clearInterval(this.pollInterval);
|
||||||
|
const message = anyFailed ? 'Batch import completed with some failures' : 'Batch import completed successfully!';
|
||||||
|
const type = anyFailed ? 'warning' : 'success';
|
||||||
|
this.updateProgress(message, type);
|
||||||
|
|
||||||
|
// Refresh the import status after batch completion
|
||||||
|
setTimeout(() => {
|
||||||
|
this.loadImportStatus();
|
||||||
|
}, 1000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
showResults(result) {
|
||||||
|
const resultsContent = document.getElementById('resultsContent');
|
||||||
|
const resultsDiv = document.getElementById('importResults');
|
||||||
|
|
||||||
|
let html = '<div class="space-y-4">';
|
||||||
|
|
||||||
|
// Summary
|
||||||
|
html += `
|
||||||
|
<div class="grid grid-cols-2 md:grid-cols-4 gap-4">
|
||||||
|
<div class="bg-blue-50 p-3 rounded">
|
||||||
|
<div class="text-2xl font-bold text-blue-600">${result.total_rows}</div>
|
||||||
|
<div class="text-sm text-blue-800">Total Rows</div>
|
||||||
|
</div>
|
||||||
|
<div class="bg-green-50 p-3 rounded">
|
||||||
|
<div class="text-2xl font-bold text-green-600">${result.imported_rows}</div>
|
||||||
|
<div class="text-sm text-green-800">Imported</div>
|
||||||
|
</div>
|
||||||
|
<div class="bg-yellow-50 p-3 rounded">
|
||||||
|
<div class="text-2xl font-bold text-yellow-600">${result.skipped_rows}</div>
|
||||||
|
<div class="text-sm text-yellow-800">Skipped</div>
|
||||||
|
</div>
|
||||||
|
<div class="bg-red-50 p-3 rounded">
|
||||||
|
<div class="text-2xl font-bold text-red-600">${result.error_rows}</div>
|
||||||
|
<div class="text-sm text-red-800">Errors</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
|
||||||
|
// Errors
|
||||||
|
if (result.errors && result.errors.length > 0) {
|
||||||
|
html += '<div class="bg-red-50 border border-red-200 rounded p-4">';
|
||||||
|
html += '<h4 class="font-semibold text-red-800 mb-2">Errors:</h4>';
|
||||||
|
html += '<div class="text-sm text-red-700 space-y-1 max-h-40 overflow-y-auto">';
|
||||||
|
result.errors.forEach(error => {
|
||||||
|
html += `<div>${this.escapeHtml(error)}</div>`;
|
||||||
|
});
|
||||||
|
html += '</div></div>';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Warnings
|
||||||
|
if (result.warnings && result.warnings.length > 0) {
|
||||||
|
html += '<div class="bg-yellow-50 border border-yellow-200 rounded p-4">';
|
||||||
|
html += '<h4 class="font-semibold text-yellow-800 mb-2">Warnings:</h4>';
|
||||||
|
html += '<div class="text-sm text-yellow-700 space-y-1 max-h-40 overflow-y-auto">';
|
||||||
|
result.warnings.forEach(warning => {
|
||||||
|
html += `<div>${this.escapeHtml(warning)}</div>`;
|
||||||
|
});
|
||||||
|
html += '</div></div>';
|
||||||
|
}
|
||||||
|
|
||||||
|
html += '</div>';
|
||||||
|
|
||||||
|
resultsContent.innerHTML = html;
|
||||||
|
resultsDiv.classList.remove('hidden');
|
||||||
|
}
|
||||||
|
|
||||||
|
handleBulkFileSelection(event) {
|
||||||
|
const files = Array.from(event.target.files);
|
||||||
|
console.log('Selected files:', files);
|
||||||
|
|
||||||
|
this.bulkFiles = files.map(file => {
|
||||||
|
const suggestion = this.suggestTableType(file.name);
|
||||||
|
return {
|
||||||
|
file: file,
|
||||||
|
name: file.name,
|
||||||
|
size: file.size,
|
||||||
|
suggested_table: suggestion || 'skip' // Default to skip for unknown files
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
this.renderBulkFiles();
|
||||||
|
this.updateBulkControls();
|
||||||
|
}
|
||||||
|
|
||||||
|
suggestTableType(filename) {
|
||||||
|
const name = filename.toUpperCase().replace('.CSV', '');
|
||||||
|
const mapping = {
|
||||||
|
// Core supported tables
|
||||||
|
'ROLODEX': 'rolodex',
|
||||||
|
'ROLEX_V': 'rolodex', // ROLEX_V variant
|
||||||
|
'PHONE': 'phone',
|
||||||
|
'FILES': 'files',
|
||||||
|
'FILES_R': 'files',
|
||||||
|
'FILES_V': 'files',
|
||||||
|
'LEDGER': 'ledger',
|
||||||
|
'QDROS': 'qdros',
|
||||||
|
// Lookup and reference tables
|
||||||
|
'GRUPLKUP': 'gruplkup',
|
||||||
|
'PLANINFO': 'planinfo',
|
||||||
|
'RVARLKUP': 'rvarlkup',
|
||||||
|
'FVARLKUP': 'fvarlkup',
|
||||||
|
'TRNSLKUP': 'trnslkup',
|
||||||
|
'EMPLOYEE': 'employee',
|
||||||
|
'FILETYPE': 'filetype',
|
||||||
|
'TRNSTYPE': 'trnstype',
|
||||||
|
'TRNSACTN': 'trnsactn',
|
||||||
|
'FILENOTS': 'filenots',
|
||||||
|
'SETUP': 'setup',
|
||||||
|
'PENSIONS': 'pensions',
|
||||||
|
'PAYMENTS': 'payments',
|
||||||
|
'DEPOSITS': 'deposits',
|
||||||
|
// Form tables
|
||||||
|
'NUMBERAL': 'numberal',
|
||||||
|
'INX_LKUP': 'inx_lkup',
|
||||||
|
'FORM_LST': 'form_lst',
|
||||||
|
'FORM_INX': 'form_inx',
|
||||||
|
'LIFETABL': 'lifetabl',
|
||||||
|
// Pension tables
|
||||||
|
'MARRIAGE': 'marriage',
|
||||||
|
'DEATH': 'death',
|
||||||
|
'SEPARATE': 'separate',
|
||||||
|
'SCHEDULE': 'schedule'
|
||||||
|
};
|
||||||
|
return mapping[name] || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
renderBulkFiles() {
|
||||||
|
const filesDisplay = document.getElementById('bulkFilesDisplay');
|
||||||
|
const filesList = document.getElementById('bulkFilesList');
|
||||||
|
const fileCount = document.getElementById('bulkFileCount');
|
||||||
|
|
||||||
|
fileCount.textContent = this.bulkFiles.length;
|
||||||
|
|
||||||
|
if (this.bulkFiles.length === 0) {
|
||||||
|
filesDisplay.classList.add('hidden');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
filesDisplay.classList.remove('hidden');
|
||||||
|
|
||||||
|
let html = '';
|
||||||
|
this.bulkFiles.forEach((fileObj, index) => {
|
||||||
|
const sizeKB = Math.round(fileObj.size / 1024);
|
||||||
|
|
||||||
|
html += `
|
||||||
|
<div class="flex items-center justify-between p-3 bg-white dark:bg-neutral-700 rounded-lg border border-neutral-200 dark:border-neutral-600">
|
||||||
|
<div class="flex items-center space-x-3">
|
||||||
|
<i class="fa-solid fa-file-csv text-primary-600"></i>
|
||||||
|
<div>
|
||||||
|
<div class="font-medium text-neutral-900 dark:text-neutral-100">
|
||||||
|
${fileObj.name}
|
||||||
|
</div>
|
||||||
|
<p class="text-sm text-neutral-500 dark:text-neutral-400">${sizeKB} KB</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="flex items-center space-x-2">
|
||||||
|
<select onchange="importManager.updateBulkFileTableMapping(${index}, this.value)"
|
||||||
|
class="px-3 py-1 border border-neutral-300 dark:border-neutral-600 rounded-md text-sm bg-white dark:bg-neutral-700 text-neutral-900 dark:text-neutral-100">
|
||||||
|
<option value="">-- Select Table --</option>
|
||||||
|
<optgroup label="Core Tables">
|
||||||
|
<option value="rolodex" ${fileObj.suggested_table === 'rolodex' ? 'selected' : ''}>ROLODEX (Contacts)</option>
|
||||||
|
<option value="phone" ${fileObj.suggested_table === 'phone' ? 'selected' : ''}>PHONE (Phone Numbers)</option>
|
||||||
|
<option value="files" ${fileObj.suggested_table === 'files' ? 'selected' : ''}>FILES (Case Files)</option>
|
||||||
|
<option value="ledger" ${fileObj.suggested_table === 'ledger' ? 'selected' : ''}>LEDGER (Financial)</option>
|
||||||
|
<option value="qdros" ${fileObj.suggested_table === 'qdros' ? 'selected' : ''}>QDROS (Documents)</option>
|
||||||
|
</optgroup>
|
||||||
|
<optgroup label="Lookup Tables">
|
||||||
|
<option value="gruplkup" ${fileObj.suggested_table === 'gruplkup' ? 'selected' : ''}>GRUPLKUP</option>
|
||||||
|
<option value="employee" ${fileObj.suggested_table === 'employee' ? 'selected' : ''}>EMPLOYEE</option>
|
||||||
|
<option value="filetype" ${fileObj.suggested_table === 'filetype' ? 'selected' : ''}>FILETYPE</option>
|
||||||
|
<option value="trnstype" ${fileObj.suggested_table === 'trnstype' ? 'selected' : ''}>TRNSTYPE</option>
|
||||||
|
<option value="trnslkup" ${fileObj.suggested_table === 'trnslkup' ? 'selected' : ''}>TRNSLKUP</option>
|
||||||
|
<option value="rvarlkup" ${fileObj.suggested_table === 'rvarlkup' ? 'selected' : ''}>RVARLKUP</option>
|
||||||
|
<option value="fvarlkup" ${fileObj.suggested_table === 'fvarlkup' ? 'selected' : ''}>FVARLKUP</option>
|
||||||
|
</optgroup>
|
||||||
|
<optgroup label="Financial Tables">
|
||||||
|
<option value="payments" ${fileObj.suggested_table === 'payments' ? 'selected' : ''}>PAYMENTS</option>
|
||||||
|
<option value="deposits" ${fileObj.suggested_table === 'deposits' ? 'selected' : ''}>DEPOSITS</option>
|
||||||
|
<option value="trnsactn" ${fileObj.suggested_table === 'trnsactn' ? 'selected' : ''}>TRNSACTN</option>
|
||||||
|
</optgroup>
|
||||||
|
<optgroup label="Forms & Documents">
|
||||||
|
<option value="numberal" ${fileObj.suggested_table === 'numberal' ? 'selected' : ''}>NUMBERAL</option>
|
||||||
|
<option value="inx_lkup" ${fileObj.suggested_table === 'inx_lkup' ? 'selected' : ''}>INX_LKUP</option>
|
||||||
|
<option value="form_lst" ${fileObj.suggested_table === 'form_lst' ? 'selected' : ''}>FORM_LST</option>
|
||||||
|
<option value="form_inx" ${fileObj.suggested_table === 'form_inx' ? 'selected' : ''}>FORM_INX</option>
|
||||||
|
<option value="lifetabl" ${fileObj.suggested_table === 'lifetabl' ? 'selected' : ''}>LIFETABL</option>
|
||||||
|
</optgroup>
|
||||||
|
<optgroup label="Pension Tables">
|
||||||
|
<option value="pensions" ${fileObj.suggested_table === 'pensions' ? 'selected' : ''}>PENSIONS</option>
|
||||||
|
<option value="marriage" ${fileObj.suggested_table === 'marriage' ? 'selected' : ''}>MARRIAGE</option>
|
||||||
|
<option value="death" ${fileObj.suggested_table === 'death' ? 'selected' : ''}>DEATH</option>
|
||||||
|
<option value="separate" ${fileObj.suggested_table === 'separate' ? 'selected' : ''}>SEPARATE</option>
|
||||||
|
<option value="schedule" ${fileObj.suggested_table === 'schedule' ? 'selected' : ''}>SCHEDULE</option>
|
||||||
|
</optgroup>
|
||||||
|
<optgroup label="Configuration">
|
||||||
|
<option value="setup" ${fileObj.suggested_table === 'setup' ? 'selected' : ''}>SETUP</option>
|
||||||
|
<option value="planinfo" ${fileObj.suggested_table === 'planinfo' ? 'selected' : ''}>PLANINFO</option>
|
||||||
|
<option value="filenots" ${fileObj.suggested_table === 'filenots' ? 'selected' : ''}>FILENOTS</option>
|
||||||
|
</optgroup>
|
||||||
|
<optgroup label="Other">
|
||||||
|
<option value="skip" ${fileObj.suggested_table === 'skip' ? 'selected' : ''}>⚠️ SKIP (Don't Import)</option>
|
||||||
|
</optgroup>
|
||||||
|
</select>
|
||||||
|
<button onclick="importManager.removeBulkFile(${index})"
|
||||||
|
class="px-2 py-1 bg-red-600 text-white rounded-md hover:bg-red-700 text-sm">
|
||||||
|
<i class="fa-solid fa-times"></i>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
});
|
||||||
|
|
||||||
|
filesList.innerHTML = html;
|
||||||
|
}
|
||||||
|
|
||||||
|
updateBulkFileTableMapping(index, tableType) {
|
||||||
|
if (this.bulkFiles[index]) {
|
||||||
|
this.bulkFiles[index].suggested_table = tableType;
|
||||||
|
}
|
||||||
|
this.updateBulkControls();
|
||||||
|
}
|
||||||
|
|
||||||
|
removeBulkFile(index) {
|
||||||
|
this.bulkFiles.splice(index, 1);
|
||||||
|
this.renderBulkFiles();
|
||||||
|
this.updateBulkControls();
|
||||||
|
}
|
||||||
|
|
||||||
|
autoMapTables() {
|
||||||
|
this.bulkFiles.forEach(fileObj => {
|
||||||
|
if (!fileObj.suggested_table || fileObj.suggested_table === '') {
|
||||||
|
const suggestion = this.suggestTableType(fileObj.name);
|
||||||
|
// If no mapping found, default to 'skip' for unknown files
|
||||||
|
fileObj.suggested_table = suggestion || 'skip';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
this.renderBulkFiles();
|
||||||
|
this.updateBulkControls();
|
||||||
|
}
|
||||||
|
|
||||||
|
clearAllFiles() {
|
||||||
|
this.bulkFiles = [];
|
||||||
|
document.getElementById('bulkFiles').value = '';
|
||||||
|
this.renderBulkFiles();
|
||||||
|
this.updateBulkControls();
|
||||||
|
}
|
||||||
|
|
||||||
|
updateBulkControls() {
|
||||||
|
const autoMapBtn = document.getElementById('autoMapBtn');
|
||||||
|
const clearAllBtn = document.getElementById('clearAllBtn');
|
||||||
|
const uploadBtn = document.getElementById('bulkUploadBtn');
|
||||||
|
|
||||||
|
const hasFiles = this.bulkFiles.length > 0;
|
||||||
|
const allMapped = this.bulkFiles.every(f => f.suggested_table && f.suggested_table !== '');
|
||||||
|
|
||||||
|
autoMapBtn.disabled = !hasFiles;
|
||||||
|
clearAllBtn.disabled = !hasFiles;
|
||||||
|
uploadBtn.disabled = !hasFiles || !allMapped;
|
||||||
|
|
||||||
|
if (hasFiles) {
|
||||||
|
const mappedCount = this.bulkFiles.filter(f => f.suggested_table && f.suggested_table !== '').length;
|
||||||
|
const importCount = this.bulkFiles.filter(f => f.suggested_table && f.suggested_table !== 'skip').length;
|
||||||
|
uploadBtn.innerHTML = `
|
||||||
|
<i class="fa-solid fa-cloud-upload"></i>
|
||||||
|
<span>Upload & Import ${importCount} Files (${mappedCount} mapped)</span>
|
||||||
|
`;
|
||||||
|
} else {
|
||||||
|
uploadBtn.innerHTML = `
|
||||||
|
<i class="fa-solid fa-cloud-upload"></i>
|
||||||
|
<span>Upload & Import All</span>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async handleBulkUpload() {
|
||||||
|
if (this.bulkFiles.length === 0) {
|
||||||
|
window.alerts.error('Please select files to upload');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate all files have table mappings
|
||||||
|
const unmappedFiles = this.bulkFiles.filter(f => !f.suggested_table || f.suggested_table === '');
|
||||||
|
if (unmappedFiles.length > 0) {
|
||||||
|
window.alerts.error(`Please select table types for: ${unmappedFiles.map(f => f.name).join(', ')}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter out files marked as 'skip'
|
||||||
|
const filesToImport = this.bulkFiles.filter(f => f.suggested_table !== 'skip');
|
||||||
|
|
||||||
|
if (filesToImport.length === 0) {
|
||||||
|
window.alerts.error('No files selected for import (all marked as skip)');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Starting bulk upload:', filesToImport);
|
||||||
|
|
||||||
|
// Show progress
|
||||||
|
this.showProgress();
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Use the existing batch endpoint that handles FormData
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
// Add only files that are not marked as 'skip'
|
||||||
|
filesToImport.forEach(fileObj => {
|
||||||
|
formData.append('files', fileObj.file);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Add corresponding table names
|
||||||
|
filesToImport.forEach(fileObj => {
|
||||||
|
formData.append('table_names', fileObj.suggested_table);
|
||||||
|
});
|
||||||
|
|
||||||
|
const response = await window.http.wrappedFetch('/api/admin/import/batch', {
|
||||||
|
method: 'POST',
|
||||||
|
body: formData
|
||||||
|
});
|
||||||
|
|
||||||
|
if (response.ok) {
|
||||||
|
const result = await response.json();
|
||||||
|
this.currentImportIds = result.import_ids;
|
||||||
|
this.updateProgress(`Bulk upload started for ${result.total_files} files`, 'info');
|
||||||
|
this.startBatchPolling();
|
||||||
|
} else {
|
||||||
|
const error = await response.json();
|
||||||
|
this.updateProgress(`Bulk upload failed: ${error.detail}`, 'error');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Bulk upload error:', error);
|
||||||
|
this.updateProgress('Bulk upload failed: Network error', 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async loadImportStatus() {
|
||||||
|
try {
|
||||||
|
console.log('Loading import status...');
|
||||||
|
const response = await window.http.wrappedFetch('/api/admin/import/status');
|
||||||
|
if (response.ok) {
|
||||||
|
const data = await response.json();
|
||||||
|
this.displayImportStatus(data);
|
||||||
|
console.log('Import status loaded:', data);
|
||||||
|
} else {
|
||||||
|
console.error('Failed to load import status, status:', response.status);
|
||||||
|
window.alerts.error('Failed to load import status');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to load import status:', error);
|
||||||
|
window.alerts.error('Failed to load import status');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
displayImportStatus(data) {
|
||||||
|
const summary = data.summary;
|
||||||
|
const categories = data.categories;
|
||||||
|
|
||||||
|
// Update summary stats
|
||||||
|
document.getElementById('totalTables').textContent = summary.total_tables;
|
||||||
|
document.getElementById('importedTables').textContent = summary.imported_tables;
|
||||||
|
document.getElementById('emptyTables').textContent = summary.empty_tables;
|
||||||
|
document.getElementById('missingTables').textContent = summary.missing_tables;
|
||||||
|
document.getElementById('totalRows').textContent = summary.total_rows.toLocaleString();
|
||||||
|
document.getElementById('completionPercentage').textContent = `${summary.completion_percentage}%`;
|
||||||
|
|
||||||
|
// Update progress bar
|
||||||
|
const progressBar = document.getElementById('progressBar');
|
||||||
|
progressBar.style.width = `${summary.completion_percentage}%`;
|
||||||
|
|
||||||
|
// Display categories
|
||||||
|
const categoryContainer = document.getElementById('statusByCategory');
|
||||||
|
categoryContainer.innerHTML = '';
|
||||||
|
|
||||||
|
Object.keys(categories).forEach(categoryName => {
|
||||||
|
const tables = categories[categoryName];
|
||||||
|
const categoryDiv = document.createElement('div');
|
||||||
|
categoryDiv.className = 'bg-neutral-50 dark:bg-neutral-900 rounded-lg p-4';
|
||||||
|
|
||||||
|
const importedCount = tables.filter(t => t.imported).length;
|
||||||
|
const totalCount = tables.length;
|
||||||
|
|
||||||
|
categoryDiv.innerHTML = `
|
||||||
|
<h3 class="font-semibold text-neutral-900 dark:text-neutral-100 mb-3 flex items-center justify-between">
|
||||||
|
<span>${categoryName} Tables</span>
|
||||||
|
<span class="text-sm font-normal text-neutral-600 dark:text-neutral-400">${importedCount}/${totalCount} imported</span>
|
||||||
|
</h3>
|
||||||
|
<div class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-3">
|
||||||
|
${tables.map(table => `
|
||||||
|
<div class="flex items-center justify-between p-3 bg-white dark:bg-neutral-800 rounded-lg border border-neutral-200 dark:border-neutral-600">
|
||||||
|
<div class="flex items-center space-x-3">
|
||||||
|
<div class="flex-shrink-0">
|
||||||
|
${table.imported ?
|
||||||
|
'<i class="fa-solid fa-check-circle text-green-600"></i>' :
|
||||||
|
table.exists ?
|
||||||
|
'<i class="fa-solid fa-exclamation-circle text-yellow-600"></i>' :
|
||||||
|
'<i class="fa-solid fa-times-circle text-red-600"></i>'
|
||||||
|
}
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<div class="font-medium text-neutral-900 dark:text-neutral-100">${table.display_name}</div>
|
||||||
|
<div class="text-sm text-neutral-500 dark:text-neutral-400">
|
||||||
|
${table.imported ?
|
||||||
|
`${table.row_count.toLocaleString()} rows` :
|
||||||
|
table.exists ?
|
||||||
|
'Empty table' :
|
||||||
|
'Not created'
|
||||||
|
}
|
||||||
|
</div>
|
||||||
|
<div class="text-xs text-neutral-400 dark:text-neutral-500">
|
||||||
|
Expected: ${table.expected_files.join(', ')}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`).join('')}
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
|
||||||
|
categoryContainer.appendChild(categoryDiv);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
escapeHtml(text) {
|
||||||
|
const div = document.createElement('div');
|
||||||
|
div.textContent = text;
|
||||||
|
return div.innerHTML;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize when DOM is loaded
|
||||||
|
document.addEventListener('DOMContentLoaded', () => {
|
||||||
|
window.importManager = new ImportManager();
|
||||||
|
});
|
||||||
@@ -1,314 +0,0 @@
|
|||||||
(function() {
|
|
||||||
const apiBase = '/api/flexible';
|
|
||||||
let state = {
|
|
||||||
fileType: '',
|
|
||||||
targetTable: '',
|
|
||||||
q: '',
|
|
||||||
skip: 0,
|
|
||||||
limit: 50,
|
|
||||||
total: 0,
|
|
||||||
hasKeys: [],
|
|
||||||
};
|
|
||||||
|
|
||||||
function q(id) { return document.getElementById(id); }
|
|
||||||
|
|
||||||
function formatPreviewHtml(obj, term) {
|
|
||||||
// Returns sanitized HTML with clickable keys
|
|
||||||
try {
|
|
||||||
const payload = obj && obj.unmapped && typeof obj.unmapped === 'object' ? obj.unmapped : obj;
|
|
||||||
const keys = Object.keys(payload || {}).slice(0, 5);
|
|
||||||
const segments = keys.map((k) => {
|
|
||||||
const safeKey = window.htmlSanitizer.escape(String(k));
|
|
||||||
const valueStr = String(payload[k]).slice(0, 60);
|
|
||||||
const valueHtml = term && term.trim().length > 0 ? highlight(valueStr, term) : window.htmlSanitizer.escape(valueStr);
|
|
||||||
return `<span class="kv-pair"><button type="button" class="key-link text-primary-700 dark:text-primary-400 hover:underline" data-key="${safeKey}">${safeKey}</button>: ${valueHtml}</span>`;
|
|
||||||
});
|
|
||||||
return segments.join(', ');
|
|
||||||
} catch (_) { return ''; }
|
|
||||||
}
|
|
||||||
|
|
||||||
function escapeRegExp(str) {
|
|
||||||
return String(str).replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
||||||
}
|
|
||||||
|
|
||||||
function highlight(text, term) {
|
|
||||||
if (!term) return window.htmlSanitizer.escape(text);
|
|
||||||
const pattern = new RegExp(escapeRegExp(term), 'ig');
|
|
||||||
const escaped = window.htmlSanitizer.escape(text);
|
|
||||||
// Replace on the escaped string to avoid breaking HTML
|
|
||||||
return escaped.replace(pattern, (m) => `<mark>${window.htmlSanitizer.escape(m)}</mark>`);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function loadOptions() {
|
|
||||||
try {
|
|
||||||
const res = await window.http.wrappedFetch(`${apiBase}/options`);
|
|
||||||
if (!res.ok) throw await window.http.toError(res, 'Failed to load options');
|
|
||||||
const data = await res.json();
|
|
||||||
const fileSel = q('filterFileType');
|
|
||||||
const tableSel = q('filterTargetTable');
|
|
||||||
// Clear existing except first
|
|
||||||
fileSel.length = 1; tableSel.length = 1;
|
|
||||||
(data.file_types || []).forEach(v => {
|
|
||||||
const opt = document.createElement('option');
|
|
||||||
opt.value = v; opt.textContent = v; fileSel.appendChild(opt);
|
|
||||||
});
|
|
||||||
(data.target_tables || []).forEach(v => {
|
|
||||||
const opt = document.createElement('option');
|
|
||||||
opt.value = v; opt.textContent = v; tableSel.appendChild(opt);
|
|
||||||
});
|
|
||||||
} catch (e) {
|
|
||||||
alert(window.http.formatAlert(e, 'Error loading options'));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function loadRows() {
|
|
||||||
try {
|
|
||||||
const params = new URLSearchParams();
|
|
||||||
if (state.fileType) params.set('file_type', state.fileType);
|
|
||||||
if (state.targetTable) params.set('target_table', state.targetTable);
|
|
||||||
if (state.q) params.set('q', state.q);
|
|
||||||
if (Array.isArray(state.hasKeys)) {
|
|
||||||
state.hasKeys.forEach((k) => {
|
|
||||||
if (k && String(k).trim().length > 0) params.append('has_keys', String(k).trim());
|
|
||||||
});
|
|
||||||
}
|
|
||||||
params.set('skip', String(state.skip));
|
|
||||||
params.set('limit', String(state.limit));
|
|
||||||
const res = await window.http.wrappedFetch(`${apiBase}/imports?${params.toString()}`);
|
|
||||||
if (!res.ok) throw await window.http.toError(res, 'Failed to load flexible imports');
|
|
||||||
const data = await res.json();
|
|
||||||
state.total = data.total || 0;
|
|
||||||
renderRows(data.items || []);
|
|
||||||
renderMeta();
|
|
||||||
renderKeyChips();
|
|
||||||
} catch (e) {
|
|
||||||
alert(window.http.formatAlert(e, 'Error loading flexible imports'));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderRows(items) {
|
|
||||||
const tbody = q('flexibleRows');
|
|
||||||
tbody.innerHTML = '';
|
|
||||||
items.forEach(item => {
|
|
||||||
const tr = document.createElement('tr');
|
|
||||||
tr.className = 'hover:bg-neutral-50 dark:hover:bg-neutral-700/40 cursor-pointer';
|
|
||||||
tr.innerHTML = `
|
|
||||||
<td class="px-3 py-2 whitespace-nowrap">${item.id}</td>
|
|
||||||
<td class="px-3 py-2 whitespace-nowrap">${window.htmlSanitizer.escape(item.file_type || '')}</td>
|
|
||||||
<td class="px-3 py-2 whitespace-nowrap">${window.htmlSanitizer.escape(item.target_table || '')}</td>
|
|
||||||
<td class="px-3 py-2 whitespace-nowrap text-xs text-neutral-500">${window.htmlSanitizer.escape((item.primary_key_field || '') + (item.primary_key_value ? '=' + item.primary_key_value : ''))}</td>
|
|
||||||
<td class="px-3 py-2 text-xs previewCell"></td>
|
|
||||||
<td class="px-3 py-2 text-right">
|
|
||||||
<button class="inline-flex items-center gap-1 text-primary-600 hover:text-primary-700 px-2 py-1 text-xs rounded-md hover:bg-primary-50 dark:hover:bg-primary-900/30" data-action="export" data-id="${item.id}">
|
|
||||||
<i class="fa-solid fa-download"></i>
|
|
||||||
<span>CSV</span>
|
|
||||||
</button>
|
|
||||||
</td>
|
|
||||||
`;
|
|
||||||
// Set sanitized highlighted preview
|
|
||||||
const previewCell = tr.querySelector('.previewCell');
|
|
||||||
const previewHtml = formatPreviewHtml(item.extra_data || {}, state.q);
|
|
||||||
window.setSafeHTML(previewCell, previewHtml);
|
|
||||||
// Bind click on keys to add filters
|
|
||||||
previewCell.querySelectorAll('.key-link').forEach((btn) => {
|
|
||||||
btn.addEventListener('click', (ev) => {
|
|
||||||
ev.stopPropagation();
|
|
||||||
const key = btn.getAttribute('data-key') || '';
|
|
||||||
addKeyFilter(key);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
// Row click opens modal
|
|
||||||
tr.addEventListener('click', (ev) => {
|
|
||||||
// Ignore clicks on the export button inside the row
|
|
||||||
const target = ev.target.closest('button[data-action="export"]');
|
|
||||||
if (target) return;
|
|
||||||
openDetailModal(item);
|
|
||||||
});
|
|
||||||
// Export button handler
|
|
||||||
tr.querySelector('button[data-action="export"]').addEventListener('click', (ev) => {
|
|
||||||
ev.stopPropagation();
|
|
||||||
exportSingleRow(item.id);
|
|
||||||
});
|
|
||||||
tbody.appendChild(tr);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderMeta() {
|
|
||||||
const start = state.total === 0 ? 0 : state.skip + 1;
|
|
||||||
const end = Math.min(state.skip + state.limit, state.total);
|
|
||||||
q('rowsMeta').textContent = `Showing ${start}-${end} of ${state.total}`;
|
|
||||||
q('prevPageBtn').disabled = state.skip === 0;
|
|
||||||
q('nextPageBtn').disabled = state.skip + state.limit >= state.total;
|
|
||||||
}
|
|
||||||
|
|
||||||
function applyFilters() {
|
|
||||||
state.fileType = q('filterFileType').value || '';
|
|
||||||
state.targetTable = q('filterTargetTable').value || '';
|
|
||||||
state.q = (q('quickSearch').value || '').trim();
|
|
||||||
state.skip = 0;
|
|
||||||
loadRows();
|
|
||||||
}
|
|
||||||
|
|
||||||
function addKeyFilter(key) {
|
|
||||||
const k = String(key || '').trim();
|
|
||||||
if (!k) return;
|
|
||||||
if (!Array.isArray(state.hasKeys)) state.hasKeys = [];
|
|
||||||
if (!state.hasKeys.includes(k)) {
|
|
||||||
state.hasKeys.push(k);
|
|
||||||
state.skip = 0;
|
|
||||||
loadRows();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function removeKeyFilter(key) {
|
|
||||||
const k = String(key || '').trim();
|
|
||||||
if (!k) return;
|
|
||||||
state.hasKeys = (state.hasKeys || []).filter((x) => x !== k);
|
|
||||||
state.skip = 0;
|
|
||||||
loadRows();
|
|
||||||
}
|
|
||||||
|
|
||||||
function clearKeyFilters() {
|
|
||||||
if ((state.hasKeys || []).length === 0) return;
|
|
||||||
state.hasKeys = [];
|
|
||||||
state.skip = 0;
|
|
||||||
loadRows();
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderKeyChips() {
|
|
||||||
const container = q('keyChipsContainer');
|
|
||||||
const chipsWrap = q('keyChips');
|
|
||||||
const clearBtn = q('clearKeyChips');
|
|
||||||
if (!container || !chipsWrap) return;
|
|
||||||
chipsWrap.innerHTML = '';
|
|
||||||
const keys = state.hasKeys || [];
|
|
||||||
if (keys.length === 0) {
|
|
||||||
container.classList.add('hidden');
|
|
||||||
} else {
|
|
||||||
container.classList.remove('hidden');
|
|
||||||
keys.forEach((k) => {
|
|
||||||
const btn = document.createElement('button');
|
|
||||||
btn.type = 'button';
|
|
||||||
btn.className = 'inline-flex items-center gap-1 px-2 py-1 rounded-full text-xs bg-primary-50 text-primary-700 border border-primary-200 hover:bg-primary-100 dark:bg-primary-900/30 dark:text-primary-200 dark:border-primary-800';
|
|
||||||
btn.setAttribute('data-chip-key', k);
|
|
||||||
btn.innerHTML = `<span class="font-mono">${window.htmlSanitizer.escape(k)}</span> <i class="fa-solid fa-xmark"></i>`;
|
|
||||||
btn.addEventListener('click', (ev) => {
|
|
||||||
ev.stopPropagation();
|
|
||||||
removeKeyFilter(k);
|
|
||||||
});
|
|
||||||
chipsWrap.appendChild(btn);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (clearBtn) {
|
|
||||||
clearBtn.onclick = (ev) => { ev.preventDefault(); clearKeyFilters(); };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function exportCsv() {
|
|
||||||
try {
|
|
||||||
const params = new URLSearchParams();
|
|
||||||
if (state.fileType) params.set('file_type', state.fileType);
|
|
||||||
if (state.targetTable) params.set('target_table', state.targetTable);
|
|
||||||
if (Array.isArray(state.hasKeys)) {
|
|
||||||
state.hasKeys.forEach((k) => {
|
|
||||||
if (k && String(k).trim().length > 0) params.append('has_keys', String(k).trim());
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const url = `${apiBase}/export?${params.toString()}`;
|
|
||||||
const res = await window.http.wrappedFetch(url);
|
|
||||||
if (!res.ok) throw await window.http.toError(res, 'Export failed');
|
|
||||||
const blob = await res.blob();
|
|
||||||
const a = document.createElement('a');
|
|
||||||
const objectUrl = URL.createObjectURL(blob);
|
|
||||||
a.href = objectUrl;
|
|
||||||
a.download = 'flexible_unmapped.csv';
|
|
||||||
document.body.appendChild(a);
|
|
||||||
a.click();
|
|
||||||
a.remove();
|
|
||||||
setTimeout(() => URL.revokeObjectURL(objectUrl), 1000);
|
|
||||||
} catch (e) {
|
|
||||||
alert(window.http.formatAlert(e, 'Error exporting CSV'));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function exportSingleRow(rowId) {
|
|
||||||
try {
|
|
||||||
const res = await window.http.wrappedFetch(`${apiBase}/export/${rowId}`);
|
|
||||||
if (!res.ok) throw await window.http.toError(res, 'Export failed');
|
|
||||||
const blob = await res.blob();
|
|
||||||
const a = document.createElement('a');
|
|
||||||
const objectUrl = URL.createObjectURL(blob);
|
|
||||||
a.href = objectUrl;
|
|
||||||
a.download = `flexible_row_${rowId}.csv`;
|
|
||||||
document.body.appendChild(a);
|
|
||||||
a.click();
|
|
||||||
a.remove();
|
|
||||||
setTimeout(() => URL.revokeObjectURL(objectUrl), 1000);
|
|
||||||
} catch (e) {
|
|
||||||
alert(window.http.formatAlert(e, 'Error exporting row CSV'));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function openDetailModal(item) {
|
|
||||||
// Populate fields
|
|
||||||
q('detailRowId').textContent = `#${item.id}`;
|
|
||||||
q('detailFileType').textContent = item.file_type || '';
|
|
||||||
q('detailTargetTable').textContent = item.target_table || '';
|
|
||||||
q('detailPkField').textContent = item.primary_key_field || '';
|
|
||||||
q('detailPkValue').textContent = item.primary_key_value || '';
|
|
||||||
try {
|
|
||||||
const pretty = JSON.stringify(item.extra_data || {}, null, 2);
|
|
||||||
q('detailJson').textContent = pretty;
|
|
||||||
} catch (_) {
|
|
||||||
q('detailJson').textContent = '';
|
|
||||||
}
|
|
||||||
const exportBtn = q('detailExportBtn');
|
|
||||||
exportBtn.onclick = () => exportSingleRow(item.id);
|
|
||||||
openModal('flexibleDetailModal');
|
|
||||||
}
|
|
||||||
|
|
||||||
function bindEvents() {
|
|
||||||
q('applyFiltersBtn').addEventListener('click', applyFilters);
|
|
||||||
q('exportCsvBtn').addEventListener('click', exportCsv);
|
|
||||||
const clearBtn = q('clearKeyChips');
|
|
||||||
if (clearBtn) clearBtn.addEventListener('click', (ev) => { ev.preventDefault(); clearKeyFilters(); });
|
|
||||||
// Quick search with debounce
|
|
||||||
const searchInput = q('quickSearch');
|
|
||||||
let searchTimer = null;
|
|
||||||
searchInput.addEventListener('input', () => {
|
|
||||||
const value = searchInput.value || '';
|
|
||||||
clearTimeout(searchTimer);
|
|
||||||
searchTimer = setTimeout(() => {
|
|
||||||
state.q = value.trim();
|
|
||||||
state.skip = 0;
|
|
||||||
loadRows();
|
|
||||||
}, 300);
|
|
||||||
});
|
|
||||||
searchInput.addEventListener('keydown', (ev) => {
|
|
||||||
if (ev.key === 'Enter') {
|
|
||||||
ev.preventDefault();
|
|
||||||
clearTimeout(searchTimer);
|
|
||||||
state.q = (searchInput.value || '').trim();
|
|
||||||
state.skip = 0;
|
|
||||||
loadRows();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
q('prevPageBtn').addEventListener('click', () => {
|
|
||||||
state.skip = Math.max(0, state.skip - state.limit);
|
|
||||||
loadRows();
|
|
||||||
});
|
|
||||||
q('nextPageBtn').addEventListener('click', () => {
|
|
||||||
if (state.skip + state.limit < state.total) {
|
|
||||||
state.skip += state.limit;
|
|
||||||
loadRows();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
document.addEventListener('DOMContentLoaded', () => {
|
|
||||||
bindEvents();
|
|
||||||
loadOptions().then(loadRows);
|
|
||||||
});
|
|
||||||
})();
|
|
||||||
|
|
||||||
|
|
||||||
@@ -57,7 +57,7 @@ function handleKeyboardShortcuts(event) {
|
|||||||
break;
|
break;
|
||||||
case 'Alt+I':
|
case 'Alt+I':
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
navigateTo('/import');
|
navigateTo('/admin/import');
|
||||||
break;
|
break;
|
||||||
case 'Alt+A':
|
case 'Alt+A':
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
|
|||||||
@@ -199,14 +199,25 @@ function initializeBatchProgressUI() {
|
|||||||
|
|
||||||
async function cancelBatch(batchId) {
|
async function cancelBatch(batchId) {
|
||||||
try {
|
try {
|
||||||
const resp = await window.http.wrappedFetch(`/api/billing/statements/batch-progress/${encodeURIComponent(batchId)}`, { method: 'DELETE' });
|
if (!confirm(`Are you sure you want to cancel batch ${batchId}?`)) {
|
||||||
if (!resp.ok) {
|
return;
|
||||||
throw await window.http.toError(resp, 'Failed to cancel batch');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Import functionality removed
|
||||||
|
|
||||||
|
const result = await resp.json();
|
||||||
|
console.log('Import batch cancelled:', result.message);
|
||||||
|
|
||||||
// Let stream update the row; no-op here
|
// Let stream update the row; no-op here
|
||||||
|
// The progress will be updated via WebSocket
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.warn('Cancel failed', e);
|
console.warn('Cancel import batch failed', e);
|
||||||
try { alert(window.http.formatAlert(e, 'Cancel failed')); } catch (_) {}
|
try {
|
||||||
|
const errorMsg = window.http.formatAlert(e, 'Cancel import batch failed');
|
||||||
|
alert(errorMsg);
|
||||||
|
} catch (_) {
|
||||||
|
alert('Failed to cancel import batch');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -464,14 +475,11 @@ async function checkUserPermissions() {
|
|||||||
const adminDivider = document.getElementById('admin-menu-divider');
|
const adminDivider = document.getElementById('admin-menu-divider');
|
||||||
if (adminItem) adminItem.classList.remove('hidden');
|
if (adminItem) adminItem.classList.remove('hidden');
|
||||||
if (adminDivider) adminDivider.classList.remove('hidden');
|
if (adminDivider) adminDivider.classList.remove('hidden');
|
||||||
|
// Show import menu for admins
|
||||||
const importDesktop = document.getElementById('nav-import-desktop');
|
const importDesktop = document.getElementById('nav-import-desktop');
|
||||||
const importMobile = document.getElementById('nav-import-mobile');
|
const importMobile = document.getElementById('nav-import-mobile');
|
||||||
if (importDesktop) importDesktop.classList.remove('hidden');
|
if (importDesktop) importDesktop.classList.remove('hidden');
|
||||||
if (importMobile) importMobile.classList.remove('hidden');
|
if (importMobile) importMobile.classList.remove('hidden');
|
||||||
const flexibleDesktop = document.getElementById('nav-flexible-desktop');
|
|
||||||
const flexibleMobile = document.getElementById('nav-flexible-mobile');
|
|
||||||
if (flexibleDesktop) flexibleDesktop.classList.remove('hidden');
|
|
||||||
if (flexibleMobile) flexibleMobile.classList.remove('hidden');
|
|
||||||
}
|
}
|
||||||
const userDropdownName = document.querySelector('#userDropdown button span');
|
const userDropdownName = document.querySelector('#userDropdown button span');
|
||||||
if (user.full_name && userDropdownName) {
|
if (user.full_name && userDropdownName) {
|
||||||
|
|||||||
213
templates/admin_import.html
Normal file
213
templates/admin_import.html
Normal file
@@ -0,0 +1,213 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block title %}CSV Import - Delphi Database{% endblock %}
|
||||||
|
|
||||||
|
{% block bridge_css %}{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<div class="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-6">
|
||||||
|
<!-- Page Header -->
|
||||||
|
<div class="flex flex-col sm:flex-row sm:items-center sm:justify-between gap-4 mb-8">
|
||||||
|
<div class="flex items-center gap-3">
|
||||||
|
<div class="flex items-center justify-center w-12 h-12 bg-primary-100 dark:bg-primary-800 text-primary-600 dark:text-primary-400 rounded-xl">
|
||||||
|
<i class="fa-solid fa-cloud-arrow-up text-xl"></i>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<h1 class="text-3xl font-bold text-neutral-900 dark:text-neutral-100">CSV Data Import</h1>
|
||||||
|
<p class="text-neutral-600 dark:text-neutral-400">Import CSV files converted from legacy .sc files into the database</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="flex items-center gap-2">
|
||||||
|
<a href="/admin" class="inline-flex items-center gap-2 px-4 py-2 bg-neutral-100 dark:bg-neutral-800 text-neutral-700 dark:text-neutral-300 rounded-lg hover:bg-neutral-200 dark:hover:bg-neutral-700 transition-colors duration-200">
|
||||||
|
<i class="fa-solid fa-arrow-left text-sm"></i>
|
||||||
|
<span>Back to Admin</span>
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Import Status Overview -->
|
||||||
|
<div class="bg-white dark:bg-neutral-800 rounded-xl shadow-lg border border-neutral-200 dark:border-neutral-700 p-6 mb-8">
|
||||||
|
<div class="flex items-center justify-between mb-4">
|
||||||
|
<h2 class="text-xl font-semibold text-neutral-900 dark:text-neutral-100 flex items-center gap-2">
|
||||||
|
<i class="fa-solid fa-chart-pie text-primary-600"></i>
|
||||||
|
Import Status Overview
|
||||||
|
</h2>
|
||||||
|
<button id="refreshStatusBtn" class="px-4 py-2 bg-primary-600 text-white rounded-lg hover:bg-primary-700 focus:outline-none focus:ring-2 focus:ring-primary-500 transition-colors duration-200 flex items-center gap-2">
|
||||||
|
<i class="fa-solid fa-refresh"></i>
|
||||||
|
<span>Refresh</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Summary Stats -->
|
||||||
|
<div id="statusSummary" class="grid grid-cols-2 md:grid-cols-5 gap-4 mb-6">
|
||||||
|
<div class="bg-blue-50 dark:bg-blue-900/20 p-4 rounded-lg">
|
||||||
|
<div class="text-2xl font-bold text-blue-600 dark:text-blue-400" id="totalTables">-</div>
|
||||||
|
<div class="text-sm text-blue-800 dark:text-blue-300">Total Tables</div>
|
||||||
|
</div>
|
||||||
|
<div class="bg-green-50 dark:bg-green-900/20 p-4 rounded-lg">
|
||||||
|
<div class="text-2xl font-bold text-green-600 dark:text-green-400" id="importedTables">-</div>
|
||||||
|
<div class="text-sm text-green-800 dark:text-green-300">Imported</div>
|
||||||
|
</div>
|
||||||
|
<div class="bg-yellow-50 dark:bg-yellow-900/20 p-4 rounded-lg">
|
||||||
|
<div class="text-2xl font-bold text-yellow-600 dark:text-yellow-400" id="emptyTables">-</div>
|
||||||
|
<div class="text-sm text-yellow-800 dark:text-yellow-300">Empty</div>
|
||||||
|
</div>
|
||||||
|
<div class="bg-red-50 dark:bg-red-900/20 p-4 rounded-lg">
|
||||||
|
<div class="text-2xl font-bold text-red-600 dark:text-red-400" id="missingTables">-</div>
|
||||||
|
<div class="text-sm text-red-800 dark:text-red-300">Missing</div>
|
||||||
|
</div>
|
||||||
|
<div class="bg-purple-50 dark:bg-purple-900/20 p-4 rounded-lg">
|
||||||
|
<div class="text-2xl font-bold text-purple-600 dark:text-purple-400" id="totalRows">-</div>
|
||||||
|
<div class="text-sm text-purple-800 dark:text-purple-300">Total Rows</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Progress Bar -->
|
||||||
|
<div class="mb-6">
|
||||||
|
<div class="flex justify-between text-sm font-medium text-neutral-700 dark:text-neutral-300 mb-2">
|
||||||
|
<span>Import Progress</span>
|
||||||
|
<span id="completionPercentage">0%</span>
|
||||||
|
</div>
|
||||||
|
<div class="w-full bg-neutral-200 dark:bg-neutral-700 rounded-full h-3">
|
||||||
|
<div id="progressBar" class="bg-primary-600 h-3 rounded-full transition-all duration-300" style="width: 0%"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Status by Category -->
|
||||||
|
<div id="statusByCategory" class="space-y-4">
|
||||||
|
<!-- Categories will be populated here -->
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Import Form -->
|
||||||
|
<div class="bg-white dark:bg-neutral-800 rounded-xl shadow-lg border border-neutral-200 dark:border-neutral-700 p-6 mb-8">
|
||||||
|
<h2 class="text-xl font-semibold text-neutral-900 dark:text-neutral-100 mb-4 flex items-center gap-2">
|
||||||
|
<i class="fa-solid fa-file-arrow-up text-primary-600"></i>
|
||||||
|
Single File Import
|
||||||
|
</h2>
|
||||||
|
|
||||||
|
<form id="importForm" class="space-y-6">
|
||||||
|
<div>
|
||||||
|
<label for="tableSelect" class="block text-sm font-medium text-neutral-700 dark:text-neutral-300 mb-2">
|
||||||
|
Select Table Type
|
||||||
|
</label>
|
||||||
|
<select id="tableSelect" name="table_name"
|
||||||
|
class="w-full px-4 py-3 border border-neutral-300 dark:border-neutral-600 rounded-lg bg-white dark:bg-neutral-700 text-neutral-900 dark:text-neutral-100 focus:outline-none focus:ring-2 focus:ring-primary-500 focus:border-transparent">
|
||||||
|
<option value="">-- Select Table --</option>
|
||||||
|
<option value="rolodex">ROLODEX (Contacts)</option>
|
||||||
|
<option value="phone">PHONE (Phone Numbers)</option>
|
||||||
|
<option value="files">FILES (Case Files)</option>
|
||||||
|
<option value="ledger">LEDGER (Financial Transactions)</option>
|
||||||
|
<option value="qdros">QDROS (QDRO Documents)</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<label for="csvFile" class="block text-sm font-medium text-neutral-700 dark:text-neutral-300 mb-2">
|
||||||
|
Select CSV File
|
||||||
|
</label>
|
||||||
|
<input type="file" id="csvFile" name="file" accept=".csv"
|
||||||
|
class="w-full px-4 py-3 border border-neutral-300 dark:border-neutral-600 rounded-lg bg-white dark:bg-neutral-700 text-neutral-900 dark:text-neutral-100 focus:outline-none focus:ring-2 focus:ring-primary-500 focus:border-transparent file:mr-4 file:py-2 file:px-4 file:rounded-lg file:border-0 file:bg-primary-50 file:text-primary-700 file:hover:bg-primary-100">
|
||||||
|
<p class="text-sm text-neutral-500 dark:text-neutral-400 mt-2">Only CSV files are supported. Max file size: 100MB</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="flex space-x-4">
|
||||||
|
<button type="button" id="validateBtn"
|
||||||
|
class="px-6 py-3 bg-warning-600 text-white rounded-lg hover:bg-warning-700 focus:outline-none focus:ring-2 focus:ring-warning-500 disabled:opacity-50 transition-colors duration-200 flex items-center gap-2">
|
||||||
|
<i class="fa-solid fa-check-circle"></i>
|
||||||
|
<span>Validate Headers</span>
|
||||||
|
</button>
|
||||||
|
<button type="submit" id="importBtn"
|
||||||
|
class="px-6 py-3 bg-primary-600 text-white rounded-lg hover:bg-primary-700 focus:outline-none focus:ring-2 focus:ring-primary-500 disabled:opacity-50 transition-colors duration-200 flex items-center gap-2">
|
||||||
|
<i class="fa-solid fa-upload"></i>
|
||||||
|
<span>Import Data</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Schema Information -->
|
||||||
|
<div id="schemaInfo" class="bg-primary-50 dark:bg-primary-900/20 border border-primary-200 dark:border-primary-800 rounded-xl p-6 mb-8 hidden">
|
||||||
|
<h3 class="text-lg font-semibold text-primary-900 dark:text-primary-100 mb-3 flex items-center gap-2">
|
||||||
|
<i class="fa-solid fa-table-columns"></i>
|
||||||
|
Required Columns
|
||||||
|
</h3>
|
||||||
|
<div id="schemaDetails" class="text-sm text-primary-800 dark:text-primary-200"></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Import Progress -->
|
||||||
|
<div id="importProgress" class="bg-white dark:bg-neutral-800 rounded-xl shadow-lg border border-neutral-200 dark:border-neutral-700 p-6 mb-8 hidden">
|
||||||
|
<h3 class="text-xl font-semibold text-neutral-900 dark:text-neutral-100 mb-4 flex items-center gap-2">
|
||||||
|
<i class="fa-solid fa-spinner fa-spin text-primary-600"></i>
|
||||||
|
Import Progress
|
||||||
|
</h3>
|
||||||
|
<div id="progressDetails" class="space-y-2 max-h-64 overflow-y-auto"></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Import Results -->
|
||||||
|
<div id="importResults" class="bg-white dark:bg-neutral-800 rounded-xl shadow-lg border border-neutral-200 dark:border-neutral-700 p-6 mb-8 hidden">
|
||||||
|
<h3 class="text-xl font-semibold text-neutral-900 dark:text-neutral-100 mb-4 flex items-center gap-2">
|
||||||
|
<i class="fa-solid fa-chart-line text-success-600"></i>
|
||||||
|
Import Results
|
||||||
|
</h3>
|
||||||
|
<div id="resultsContent"></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Bulk Upload Section -->
|
||||||
|
<div class="bg-white dark:bg-neutral-800 rounded-xl shadow-lg border border-neutral-200 dark:border-neutral-700 p-6">
|
||||||
|
<h2 class="text-xl font-semibold text-neutral-900 dark:text-neutral-100 mb-4 flex items-center gap-2">
|
||||||
|
<i class="fa-solid fa-layer-group text-primary-600"></i>
|
||||||
|
Bulk File Upload
|
||||||
|
</h2>
|
||||||
|
<p class="text-neutral-600 dark:text-neutral-400 mb-6">
|
||||||
|
Select multiple CSV files to upload and import at once. You can select all your CSV files in one go.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<!-- Bulk File Input -->
|
||||||
|
<div class="mb-6">
|
||||||
|
<label for="bulkFiles" class="block text-sm font-medium text-neutral-700 dark:text-neutral-300 mb-2">
|
||||||
|
Select Multiple CSV Files
|
||||||
|
</label>
|
||||||
|
<input type="file" id="bulkFiles" multiple accept=".csv"
|
||||||
|
class="w-full px-4 py-3 border border-neutral-300 dark:border-neutral-600 rounded-lg bg-white dark:bg-neutral-700 text-neutral-900 dark:text-neutral-100 focus:outline-none focus:ring-2 focus:ring-primary-500 focus:border-transparent file:mr-4 file:py-2 file:px-4 file:rounded-lg file:border-0 file:bg-primary-50 file:text-primary-700 file:hover:bg-primary-100">
|
||||||
|
<p class="text-sm text-neutral-500 dark:text-neutral-400 mt-2">
|
||||||
|
Hold Ctrl/Cmd to select multiple files. Only CSV files are supported. Max 100MB per file.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Selected Files Display -->
|
||||||
|
<div id="bulkFilesDisplay" class="hidden mb-6">
|
||||||
|
<h3 class="text-lg font-semibold text-neutral-900 dark:text-neutral-100 mb-4">
|
||||||
|
Selected Files (<span id="bulkFileCount">0</span>)
|
||||||
|
</h3>
|
||||||
|
<div class="bg-neutral-50 dark:bg-neutral-900 rounded-lg p-4 max-h-96 overflow-y-auto">
|
||||||
|
<div id="bulkFilesList" class="space-y-2">
|
||||||
|
<!-- Selected files will be displayed here -->
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Bulk Controls -->
|
||||||
|
<div class="flex flex-wrap items-center gap-4 mb-6">
|
||||||
|
<button type="button" id="autoMapBtn"
|
||||||
|
class="px-4 py-2 bg-info-600 text-white rounded-lg hover:bg-info-700 focus:outline-none focus:ring-2 focus:ring-info-500 transition-colors duration-200 flex items-center gap-2 disabled:opacity-50" disabled>
|
||||||
|
<i class="fa-solid fa-magic"></i>
|
||||||
|
<span>Auto-Map Tables</span>
|
||||||
|
</button>
|
||||||
|
<button type="button" id="clearAllBtn"
|
||||||
|
class="px-4 py-2 bg-warning-600 text-white rounded-lg hover:bg-warning-700 focus:outline-none focus:ring-2 focus:ring-warning-500 transition-colors duration-200 flex items-center gap-2 disabled:opacity-50" disabled>
|
||||||
|
<i class="fa-solid fa-trash"></i>
|
||||||
|
<span>Clear All</span>
|
||||||
|
</button>
|
||||||
|
<button type="button" id="bulkUploadBtn"
|
||||||
|
class="px-6 py-3 bg-purple-600 text-white rounded-lg hover:bg-purple-700 focus:outline-none focus:ring-2 focus:ring-purple-500 disabled:opacity-50 transition-colors duration-200 flex items-center gap-2" disabled>
|
||||||
|
<i class="fa-solid fa-cloud-upload"></i>
|
||||||
|
<span>Upload & Import All</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Include the import JavaScript -->
|
||||||
|
<script src="/static/js/admin_import.js"></script>
|
||||||
|
{% endblock %}
|
||||||
@@ -55,14 +55,11 @@
|
|||||||
<i class="fa-solid fa-magnifying-glass"></i>
|
<i class="fa-solid fa-magnifying-glass"></i>
|
||||||
<span>Search</span>
|
<span>Search</span>
|
||||||
</a>
|
</a>
|
||||||
<a id="nav-import-desktop" href="/import" data-shortcut="Alt+I" class="hidden flex items-center gap-2 px-3 py-2 rounded-lg text-primary-100 hover:text-white hover:bg-primary-700 transition-all duration-200">
|
<a id="nav-import-desktop" href="/admin/import" data-shortcut="Alt+I" class="hidden flex items-center gap-2 px-3 py-2 rounded-lg text-primary-100 hover:text-white hover:bg-primary-700 transition-all duration-200">
|
||||||
<i class="fa-solid fa-cloud-arrow-up"></i>
|
<i class="fa-solid fa-cloud-arrow-up"></i>
|
||||||
<span>Import</span>
|
<span>Import</span>
|
||||||
</a>
|
</a>
|
||||||
<a id="nav-flexible-desktop" href="/flexible" class="hidden flex items-center gap-2 px-3 py-2 rounded-lg text-primary-100 hover:text-white hover:bg-primary-700 transition-all duration-200">
|
<!-- Flexible import removed -->
|
||||||
<i class="fa-solid fa-table-columns"></i>
|
|
||||||
<span>Flexible</span>
|
|
||||||
</a>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Right side items -->
|
<!-- Right side items -->
|
||||||
@@ -129,14 +126,11 @@
|
|||||||
<i class="fa-solid fa-magnifying-glass"></i>
|
<i class="fa-solid fa-magnifying-glass"></i>
|
||||||
<span>Search</span>
|
<span>Search</span>
|
||||||
</a>
|
</a>
|
||||||
<a id="nav-import-mobile" href="/import" class="hidden flex items-center gap-3 px-3 py-2 rounded-lg text-primary-100 hover:text-white hover:bg-primary-700 transition-all duration-200">
|
<a id="nav-import-mobile" href="/admin/import" class="hidden flex items-center gap-3 px-3 py-2 rounded-lg text-primary-100 hover:text-white hover:bg-primary-700 transition-all duration-200">
|
||||||
<i class="fa-solid fa-cloud-arrow-up"></i>
|
<i class="fa-solid fa-cloud-arrow-up"></i>
|
||||||
<span>Import</span>
|
<span>Import</span>
|
||||||
</a>
|
</a>
|
||||||
<a id="nav-flexible-mobile" href="/flexible" class="hidden flex items-center gap-3 px-3 py-2 rounded-lg text-primary-100 hover:text-white hover:bg-primary-700 transition-all duration-200">
|
<!-- Flexible import removed -->
|
||||||
<i class="fa-solid fa-table-columns"></i>
|
|
||||||
<span>Flexible</span>
|
|
||||||
</a>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -433,10 +427,9 @@
|
|||||||
'/files': 'File Cabinet',
|
'/files': 'File Cabinet',
|
||||||
'/financial': 'Financial/Ledger',
|
'/financial': 'Financial/Ledger',
|
||||||
'/documents': 'Document Management',
|
'/documents': 'Document Management',
|
||||||
'/import': 'Data Import',
|
'/admin/import': 'Data Import',
|
||||||
'/search': 'Advanced Search',
|
'/search': 'Advanced Search',
|
||||||
'/admin': 'System Administration',
|
'/admin': 'System Administration',
|
||||||
'/flexible': 'Flexible Imports'
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const currentPage = pageNames[path] || `Page: ${path}`;
|
const currentPage = pageNames[path] || `Page: ${path}`;
|
||||||
|
|||||||
@@ -129,7 +129,7 @@
|
|||||||
<span class="font-medium">Global Search</span>
|
<span class="font-medium">Global Search</span>
|
||||||
<kbd class="text-xs text-neutral-500 dark:text-neutral-400 mt-1">Ctrl+F</kbd>
|
<kbd class="text-xs text-neutral-500 dark:text-neutral-400 mt-1">Ctrl+F</kbd>
|
||||||
</button>
|
</button>
|
||||||
<button onclick="window.location.href='/import'" class="w-full flex flex-col items-center justify-center p-4 bg-neutral-50 dark:bg-neutral-900/50 hover:bg-neutral-100 dark:hover:bg-neutral-900 rounded-lg border border-neutral-200 dark:border-neutral-700 transition-colors duration-200">
|
<button onclick="window.location.href='/admin/import'" class="w-full flex flex-col items-center justify-center p-4 bg-neutral-50 dark:bg-neutral-900/50 hover:bg-neutral-100 dark:hover:bg-neutral-900 rounded-lg border border-neutral-200 dark:border-neutral-700 transition-colors duration-200">
|
||||||
<i class="fa-solid fa-cloud-arrow-up text-2xl text-primary-600 mb-1"></i>
|
<i class="fa-solid fa-cloud-arrow-up text-2xl text-primary-600 mb-1"></i>
|
||||||
<span class="font-medium">Import Data</span>
|
<span class="font-medium">Import Data</span>
|
||||||
<kbd class="text-xs text-neutral-500 dark:text-neutral-400 mt-1">Alt+I</kbd>
|
<kbd class="text-xs text-neutral-500 dark:text-neutral-400 mt-1">Alt+I</kbd>
|
||||||
@@ -325,7 +325,7 @@ async function loadRecentImports() {
|
|||||||
<div>
|
<div>
|
||||||
<div class="flex items-center justify-between mb-2">
|
<div class="flex items-center justify-between mb-2">
|
||||||
<h6 class="text-sm font-semibold flex items-center gap-2"><i class="fa-solid fa-file-arrow-up"></i> Recent Import Status</h6>
|
<h6 class="text-sm font-semibold flex items-center gap-2"><i class="fa-solid fa-file-arrow-up"></i> Recent Import Status</h6>
|
||||||
<a href="/import" class="text-primary-600 hover:underline text-sm">Open Import</a>
|
<a href="/admin/import" class="text-primary-600 hover:underline text-sm">Open Import</a>
|
||||||
</div>
|
</div>
|
||||||
<div class="border border-neutral-200 dark:border-neutral-700 rounded-lg p-3">${items || '<p class="text-neutral-500 text-sm">No imported data yet.</p>'}</div>
|
<div class="border border-neutral-200 dark:border-neutral-700 rounded-lg p-3">${items || '<p class="text-neutral-500 text-sm">No imported data yet.</p>'}</div>
|
||||||
<div class="mt-2 text-xs text-neutral-600 dark:text-neutral-400">Total records across tracked CSVs: <strong>${Number(total).toLocaleString()}</strong></div>
|
<div class="mt-2 text-xs text-neutral-600 dark:text-neutral-400">Total records across tracked CSVs: <strong>${Number(total).toLocaleString()}</strong></div>
|
||||||
|
|||||||
@@ -1,113 +0,0 @@
|
|||||||
{% extends "base.html" %}
|
|
||||||
|
|
||||||
{% block content %}
|
|
||||||
<div class="space-y-6">
|
|
||||||
<div class="flex items-center justify-between">
|
|
||||||
<h1 class="text-2xl font-semibold">Flexible Imports</h1>
|
|
||||||
<div class="flex items-center gap-2">
|
|
||||||
<button id="exportCsvBtn" class="px-4 py-2 bg-primary-600 hover:bg-primary-700 text-white rounded-lg transition-colors">
|
|
||||||
<i class="fa-solid fa-file-csv mr-2"></i> Export CSV
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="bg-white dark:bg-neutral-800 rounded-xl border border-neutral-200 dark:border-neutral-700 p-4">
|
|
||||||
<div class="grid grid-cols-1 md:grid-cols-4 gap-4">
|
|
||||||
<div>
|
|
||||||
<label class="block text-sm font-medium mb-1">File Type</label>
|
|
||||||
<select id="filterFileType" class="w-full rounded-lg border border-neutral-300 dark:border-neutral-600 bg-white dark:bg-neutral-900 p-2">
|
|
||||||
<option value="">All</option>
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<label class="block text-sm font-medium mb-1">Target Table</label>
|
|
||||||
<select id="filterTargetTable" class="w-full rounded-lg border border-neutral-300 dark:border-neutral-600 bg-white dark:bg-neutral-900 p-2">
|
|
||||||
<option value="">All</option>
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<label class="block text-sm font-medium mb-1">Quick Search</label>
|
|
||||||
<input id="quickSearch" type="text" placeholder="Search file type, target table, keys and values" class="w-full rounded-lg border border-neutral-300 dark:border-neutral-600 bg-white dark:bg-neutral-900 p-2" />
|
|
||||||
</div>
|
|
||||||
<div class="flex items-end">
|
|
||||||
<button id="applyFiltersBtn" class="w-full md:w-auto px-4 py-2 bg-neutral-100 dark:bg-neutral-700 hover:bg-neutral-200 dark:hover:bg-neutral-600 rounded-lg border border-neutral-200 dark:border-neutral-600">Apply</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<!-- Key filter chips -->
|
|
||||||
<div id="keyChipsContainer" class="mt-3 hidden">
|
|
||||||
<div class="flex items-center gap-2 flex-wrap">
|
|
||||||
<span class="text-xs text-neutral-500">Filters:</span>
|
|
||||||
<div id="keyChips" class="flex items-center gap-2 flex-wrap"></div>
|
|
||||||
<button id="clearKeyChips" class="ml-auto text-xs text-neutral-600 hover:text-neutral-800 dark:text-neutral-300 dark:hover:text-white">Clear</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="bg-white dark:bg-neutral-800 rounded-xl border border-neutral-200 dark:border-neutral-700 overflow-hidden">
|
|
||||||
<div class="overflow-x-auto">
|
|
||||||
<table class="w-full text-sm">
|
|
||||||
<thead>
|
|
||||||
<tr class="bg-neutral-100 dark:bg-neutral-700 text-left">
|
|
||||||
<th class="px-3 py-2">ID</th>
|
|
||||||
<th class="px-3 py-2">File Type</th>
|
|
||||||
<th class="px-3 py-2">Target Table</th>
|
|
||||||
<th class="px-3 py-2">PK</th>
|
|
||||||
<th class="px-3 py-2">Unmapped Preview</th>
|
|
||||||
<th class="px-3 py-2 text-right">Actions</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody id="flexibleRows" class="divide-y divide-neutral-200 dark:divide-neutral-700">
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
<div class="flex items-center justify-between p-3 border-t border-neutral-200 dark:border-neutral-700">
|
|
||||||
<div class="text-xs text-neutral-500" id="rowsMeta">Loading...</div>
|
|
||||||
<div class="flex items-center gap-2">
|
|
||||||
<button id="prevPageBtn" class="px-3 py-1.5 bg-neutral-100 dark:bg-neutral-700 disabled:opacity-50 rounded-lg">Prev</button>
|
|
||||||
<button id="nextPageBtn" class="px-3 py-1.5 bg-neutral-100 dark:bg-neutral-700 disabled:opacity-50 rounded-lg">Next</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Row detail modal -->
|
|
||||||
<div id="flexibleDetailModal" class="hidden fixed inset-0 bg-black/60 z-50 overflow-y-auto" aria-hidden="true">
|
|
||||||
<div class="flex min-h-full items-center justify-center p-4">
|
|
||||||
<div class="bg-white dark:bg-neutral-800 rounded-xl shadow-xl max-w-4xl w-full max-h-[85vh] overflow-hidden">
|
|
||||||
<div class="flex items-center justify-between px-6 py-4 border-b border-neutral-200 dark:border-neutral-700">
|
|
||||||
<h2 class="text-lg font-semibold">Flexible Row <span id="detailRowId"></span></h2>
|
|
||||||
<div class="flex items-center gap-2">
|
|
||||||
<button id="detailExportBtn" class="px-3 py-1.5 bg-primary-600 hover:bg-primary-700 text-white rounded-lg text-sm">
|
|
||||||
<i class="fa-solid fa-file-csv mr-1"></i> Export CSV
|
|
||||||
</button>
|
|
||||||
<button onclick="closeModal('flexibleDetailModal')" class="text-neutral-500 hover:text-neutral-700">
|
|
||||||
<i class="fa-solid fa-xmark text-xl"></i>
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="p-4">
|
|
||||||
<div class="grid grid-cols-1 md:grid-cols-2 gap-3 mb-3 text-xs text-neutral-600 dark:text-neutral-300">
|
|
||||||
<div>File Type: <span id="detailFileType" class="font-mono"></span></div>
|
|
||||||
<div>Target Table: <span id="detailTargetTable" class="font-mono"></span></div>
|
|
||||||
<div>PK Field: <span id="detailPkField" class="font-mono"></span></div>
|
|
||||||
<div>PK Value: <span id="detailPkValue" class="font-mono"></span></div>
|
|
||||||
</div>
|
|
||||||
<div class="border border-neutral-200 dark:border-neutral-700 rounded-lg overflow-hidden">
|
|
||||||
<pre id="detailJson" class="p-4 text-xs bg-neutral-50 dark:bg-neutral-900 overflow-auto max-h-[60vh]"></pre>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="flex items-center justify-end gap-3 px-6 py-4 border-t border-neutral-200 dark:border-neutral-700 bg-neutral-50 dark:bg-neutral-800/50">
|
|
||||||
<button onclick="closeModal('flexibleDetailModal')" class="px-4 py-2 bg-neutral-100 dark:bg-neutral-700 text-neutral-700 dark:text-neutral-300 hover:bg-neutral-200 dark:hover:bg-neutral-600 rounded-lg transition-colors duration-200">
|
|
||||||
Close
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block extra_scripts %}
|
|
||||||
<script src="/static/js/flexible.js"></script>
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -206,7 +206,7 @@ let supportSystem = {
|
|||||||
'/files': 'File Cabinet',
|
'/files': 'File Cabinet',
|
||||||
'/financial': 'Financial/Ledger',
|
'/financial': 'Financial/Ledger',
|
||||||
'/documents': 'Document Management',
|
'/documents': 'Document Management',
|
||||||
'/import': 'Data Import',
|
'/admin/import': 'Data Import',
|
||||||
'/search': 'Advanced Search',
|
'/search': 'Advanced Search',
|
||||||
'/admin': 'System Administration'
|
'/admin': 'System Administration'
|
||||||
};
|
};
|
||||||
|
|||||||
3
test_qdros.csv
Normal file
3
test_qdros.csv
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
File_No,Version,Plan_Id,^1,^2,^Part,^AltP,^Pet,^Res,Case_Type,Case_Code,Section,Case_Number,Judgment_Date,Valuation_Date,Married_On,Percent_Awarded,Ven_City,Ven_Cnty,Ven_St,Draft_Out,Draft_Apr,Final_Out,Judge,Form_Name
|
||||||
|
1234,1,ABC123,test1,test2,part1,altp1,pet1,res1,DIV,001,401,2023-001,2023-01-15,2022-12-01,2020-05-10,50.00,Chicago,Cook,IL,2023-02-01,2023-02-15,2023-03-01,Judge Smith,Form A
|
||||||
|
5678,2,DEF456,test3,test4,part2,altp2,pet2,res2,DIV,002,401,2023-002,2023-02-20,2023-01-15,2019-03-20,60.00,Springfield,Sangamon,IL,2023-03-01,2023-03-15,2023-04-01,Judge Jones,Form B
|
||||||
|
Reference in New Issue
Block a user