working on new system for importing
This commit is contained in:
@@ -1,281 +0,0 @@
|
|||||||
"""
|
|
||||||
Flexible Imports admin API: list, filter, and export unmapped rows captured during CSV imports.
|
|
||||||
"""
|
|
||||||
from typing import Optional, Dict, Any, List
|
|
||||||
from datetime import datetime
|
|
||||||
import csv
|
|
||||||
import io
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, Query, HTTPException
|
|
||||||
from fastapi.responses import StreamingResponse
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from sqlalchemy import func, or_, cast, String
|
|
||||||
|
|
||||||
from app.database.base import get_db
|
|
||||||
from app.auth.security import get_admin_user
|
|
||||||
from app.models.flexible import FlexibleImport
|
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/flexible", tags=["flexible"])
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/imports")
|
|
||||||
async def list_flexible_imports(
|
|
||||||
file_type: Optional[str] = Query(None, description="Filter by CSV file type (e.g., FILES.csv)"),
|
|
||||||
target_table: Optional[str] = Query(None, description="Filter by target model table name"),
|
|
||||||
q: Optional[str] = Query(None, description="Quick text search across file type, target table, and unmapped data"),
|
|
||||||
has_keys: Optional[List[str]] = Query(
|
|
||||||
None,
|
|
||||||
description="Filter rows where extra_data (or its 'unmapped' payload) contains these keys. Repeat param for multiple keys.",
|
|
||||||
),
|
|
||||||
skip: int = Query(0, ge=0),
|
|
||||||
limit: int = Query(50, ge=1, le=500),
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
current_user=Depends(get_admin_user),
|
|
||||||
):
|
|
||||||
"""List flexible import rows with optional filtering, quick search, and pagination."""
|
|
||||||
query = db.query(FlexibleImport)
|
|
||||||
if file_type:
|
|
||||||
query = query.filter(FlexibleImport.file_type == file_type)
|
|
||||||
if target_table:
|
|
||||||
query = query.filter(FlexibleImport.target_table == target_table)
|
|
||||||
if q:
|
|
||||||
pattern = f"%{q.strip()}%"
|
|
||||||
# Search across file_type, target_table, and serialized JSON extra_data
|
|
||||||
query = query.filter(
|
|
||||||
or_(
|
|
||||||
FlexibleImport.file_type.ilike(pattern),
|
|
||||||
FlexibleImport.target_table.ilike(pattern),
|
|
||||||
cast(FlexibleImport.extra_data, String).ilike(pattern),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Filter by key presence inside JSON payload by string matching of the serialized JSON
|
|
||||||
# This is DB-agnostic and works across SQLite/Postgres, though not as precise as JSON operators.
|
|
||||||
if has_keys:
|
|
||||||
for k in [k for k in has_keys if k is not None and str(k).strip() != ""]:
|
|
||||||
key = str(k).strip()
|
|
||||||
# Look for the JSON key token followed by a colon, e.g. "key":
|
|
||||||
query = query.filter(cast(FlexibleImport.extra_data, String).ilike(f'%"{key}":%'))
|
|
||||||
|
|
||||||
total = query.count()
|
|
||||||
items = (
|
|
||||||
query.order_by(FlexibleImport.id.desc())
|
|
||||||
.offset(skip)
|
|
||||||
.limit(limit)
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
|
|
||||||
def serialize(item: FlexibleImport) -> Dict[str, Any]:
|
|
||||||
return {
|
|
||||||
"id": item.id,
|
|
||||||
"file_type": item.file_type,
|
|
||||||
"target_table": item.target_table,
|
|
||||||
"primary_key_field": item.primary_key_field,
|
|
||||||
"primary_key_value": item.primary_key_value,
|
|
||||||
"extra_data": item.extra_data,
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
"total": total,
|
|
||||||
"skip": skip,
|
|
||||||
"limit": limit,
|
|
||||||
"items": [serialize(i) for i in items],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/options")
|
|
||||||
async def flexible_options(
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
current_user=Depends(get_admin_user),
|
|
||||||
):
|
|
||||||
"""Return distinct file types and target tables for filter dropdowns."""
|
|
||||||
file_types: List[str] = [
|
|
||||||
ft for (ft,) in db.query(func.distinct(FlexibleImport.file_type)).order_by(FlexibleImport.file_type.asc()).all()
|
|
||||||
if ft is not None
|
|
||||||
]
|
|
||||||
target_tables: List[str] = [
|
|
||||||
tt for (tt,) in db.query(func.distinct(FlexibleImport.target_table)).order_by(FlexibleImport.target_table.asc()).all()
|
|
||||||
if tt is not None and tt != ""
|
|
||||||
]
|
|
||||||
return {"file_types": file_types, "target_tables": target_tables}
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/export")
|
|
||||||
async def export_unmapped_csv(
|
|
||||||
file_type: Optional[str] = Query(None, description="Filter by CSV file type (e.g., FILES.csv)"),
|
|
||||||
target_table: Optional[str] = Query(None, description="Filter by target model table name"),
|
|
||||||
has_keys: Optional[List[str]] = Query(
|
|
||||||
None,
|
|
||||||
description="Filter rows where extra_data (or its 'unmapped' payload) contains these keys. Repeat param for multiple keys.",
|
|
||||||
),
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
current_user=Depends(get_admin_user),
|
|
||||||
):
|
|
||||||
"""Export unmapped rows as CSV for review. Includes basic metadata columns and unmapped fields.
|
|
||||||
|
|
||||||
If FlexibleImport.extra_data contains a nested 'unmapped' dict, those keys are exported.
|
|
||||||
Otherwise, all keys of extra_data are exported.
|
|
||||||
"""
|
|
||||||
query = db.query(FlexibleImport)
|
|
||||||
if file_type:
|
|
||||||
query = query.filter(FlexibleImport.file_type == file_type)
|
|
||||||
if target_table:
|
|
||||||
query = query.filter(FlexibleImport.target_table == target_table)
|
|
||||||
if has_keys:
|
|
||||||
for k in [k for k in has_keys if k is not None and str(k).strip() != ""]:
|
|
||||||
key = str(k).strip()
|
|
||||||
query = query.filter(cast(FlexibleImport.extra_data, String).ilike(f'%"{key}":%'))
|
|
||||||
|
|
||||||
rows: List[FlexibleImport] = query.order_by(FlexibleImport.id.asc()).all()
|
|
||||||
if not rows:
|
|
||||||
raise HTTPException(status_code=404, detail="No matching flexible imports to export")
|
|
||||||
|
|
||||||
# Determine union of unmapped keys across all rows
|
|
||||||
unmapped_keys: List[str] = []
|
|
||||||
key_set = set()
|
|
||||||
for r in rows:
|
|
||||||
data = r.extra_data or {}
|
|
||||||
payload = data.get("unmapped") if isinstance(data, dict) and isinstance(data.get("unmapped"), dict) else data
|
|
||||||
if isinstance(payload, dict):
|
|
||||||
for k in payload.keys():
|
|
||||||
if k not in key_set:
|
|
||||||
key_set.add(k)
|
|
||||||
unmapped_keys.append(k)
|
|
||||||
|
|
||||||
# Prepare CSV
|
|
||||||
meta_headers = [
|
|
||||||
"id",
|
|
||||||
"file_type",
|
|
||||||
"target_table",
|
|
||||||
"primary_key_field",
|
|
||||||
"primary_key_value",
|
|
||||||
]
|
|
||||||
fieldnames = meta_headers + unmapped_keys
|
|
||||||
|
|
||||||
output = io.StringIO()
|
|
||||||
writer = csv.DictWriter(output, fieldnames=fieldnames)
|
|
||||||
writer.writeheader()
|
|
||||||
|
|
||||||
for r in rows:
|
|
||||||
row_out: Dict[str, Any] = {
|
|
||||||
"id": r.id,
|
|
||||||
"file_type": r.file_type,
|
|
||||||
"target_table": r.target_table or "",
|
|
||||||
"primary_key_field": r.primary_key_field or "",
|
|
||||||
"primary_key_value": r.primary_key_value or "",
|
|
||||||
}
|
|
||||||
data = r.extra_data or {}
|
|
||||||
payload = data.get("unmapped") if isinstance(data, dict) and isinstance(data.get("unmapped"), dict) else data
|
|
||||||
if isinstance(payload, dict):
|
|
||||||
for k in unmapped_keys:
|
|
||||||
v = payload.get(k)
|
|
||||||
# Normalize lists/dicts to JSON strings for CSV safety
|
|
||||||
if isinstance(v, (dict, list)):
|
|
||||||
try:
|
|
||||||
import json as _json
|
|
||||||
row_out[k] = _json.dumps(v, ensure_ascii=False)
|
|
||||||
except Exception:
|
|
||||||
row_out[k] = str(v)
|
|
||||||
else:
|
|
||||||
row_out[k] = v if v is not None else ""
|
|
||||||
writer.writerow(row_out)
|
|
||||||
|
|
||||||
output.seek(0)
|
|
||||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
||||||
filename_parts = ["flexible_unmapped"]
|
|
||||||
if file_type:
|
|
||||||
filename_parts.append(file_type.replace("/", "-").replace(" ", "_"))
|
|
||||||
if target_table:
|
|
||||||
filename_parts.append(target_table.replace("/", "-").replace(" ", "_"))
|
|
||||||
filename = "_".join(filename_parts) + f"_{timestamp}.csv"
|
|
||||||
|
|
||||||
return StreamingResponse(
|
|
||||||
iter([output.getvalue()]),
|
|
||||||
media_type="text/csv",
|
|
||||||
headers={
|
|
||||||
"Content-Disposition": f"attachment; filename=\"{filename}\"",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/export/{row_id}")
|
|
||||||
async def export_single_row_csv(
|
|
||||||
row_id: int,
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
current_user=Depends(get_admin_user),
|
|
||||||
):
|
|
||||||
"""Export a single flexible import row as CSV.
|
|
||||||
|
|
||||||
Includes metadata columns plus keys from the row's unmapped payload.
|
|
||||||
If FlexibleImport.extra_data contains a nested 'unmapped' dict, those keys are exported;
|
|
||||||
otherwise, all keys of extra_data are exported.
|
|
||||||
"""
|
|
||||||
row: Optional[FlexibleImport] = (
|
|
||||||
db.query(FlexibleImport).filter(FlexibleImport.id == row_id).first()
|
|
||||||
)
|
|
||||||
if not row:
|
|
||||||
raise HTTPException(status_code=404, detail="Flexible import row not found")
|
|
||||||
|
|
||||||
data = row.extra_data or {}
|
|
||||||
payload = (
|
|
||||||
data.get("unmapped")
|
|
||||||
if isinstance(data, dict) and isinstance(data.get("unmapped"), dict)
|
|
||||||
else data
|
|
||||||
)
|
|
||||||
|
|
||||||
unmapped_keys: List[str] = []
|
|
||||||
if isinstance(payload, dict):
|
|
||||||
for k in payload.keys():
|
|
||||||
unmapped_keys.append(k)
|
|
||||||
|
|
||||||
meta_headers = [
|
|
||||||
"id",
|
|
||||||
"file_type",
|
|
||||||
"target_table",
|
|
||||||
"primary_key_field",
|
|
||||||
"primary_key_value",
|
|
||||||
]
|
|
||||||
fieldnames = meta_headers + unmapped_keys
|
|
||||||
|
|
||||||
output = io.StringIO()
|
|
||||||
writer = csv.DictWriter(output, fieldnames=fieldnames)
|
|
||||||
writer.writeheader()
|
|
||||||
|
|
||||||
row_out: Dict[str, Any] = {
|
|
||||||
"id": row.id,
|
|
||||||
"file_type": row.file_type,
|
|
||||||
"target_table": row.target_table or "",
|
|
||||||
"primary_key_field": row.primary_key_field or "",
|
|
||||||
"primary_key_value": row.primary_key_value or "",
|
|
||||||
}
|
|
||||||
if isinstance(payload, dict):
|
|
||||||
for k in unmapped_keys:
|
|
||||||
v = payload.get(k)
|
|
||||||
if isinstance(v, (dict, list)):
|
|
||||||
try:
|
|
||||||
import json as _json
|
|
||||||
row_out[k] = _json.dumps(v, ensure_ascii=False)
|
|
||||||
except Exception:
|
|
||||||
row_out[k] = str(v)
|
|
||||||
else:
|
|
||||||
row_out[k] = v if v is not None else ""
|
|
||||||
|
|
||||||
writer.writerow(row_out)
|
|
||||||
output.seek(0)
|
|
||||||
|
|
||||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
||||||
filename = (
|
|
||||||
f"flexible_row_{row.id}_{row.file_type.replace('/', '-').replace(' ', '_')}_{timestamp}.csv"
|
|
||||||
if row.file_type
|
|
||||||
else f"flexible_row_{row.id}_{timestamp}.csv"
|
|
||||||
)
|
|
||||||
|
|
||||||
return StreamingResponse(
|
|
||||||
iter([output.getvalue()]),
|
|
||||||
media_type="text/csv",
|
|
||||||
headers={
|
|
||||||
"Content-Disposition": f"attachment; filename=\"{filename}\"",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -20,6 +20,7 @@ pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
|||||||
|
|
||||||
# JWT Security
|
# JWT Security
|
||||||
security = HTTPBearer()
|
security = HTTPBearer()
|
||||||
|
optional_security = HTTPBearer(auto_error=False)
|
||||||
|
|
||||||
|
|
||||||
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||||
@@ -190,6 +191,31 @@ def get_current_user(
|
|||||||
return user
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
def get_optional_current_user(
|
||||||
|
credentials: Optional[HTTPAuthorizationCredentials] = Depends(optional_security),
|
||||||
|
db: Session = Depends(get_db)
|
||||||
|
) -> Optional[User]:
|
||||||
|
"""Get current authenticated user, but allow None if not authenticated"""
|
||||||
|
if not credentials:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
token = credentials.credentials
|
||||||
|
username = verify_token(token)
|
||||||
|
|
||||||
|
if username is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
user = db.query(User).filter(User.username == username).first()
|
||||||
|
if user is None or not user.is_active:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return user
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_admin_user(current_user: User = Depends(get_current_user)) -> User:
|
def get_admin_user(current_user: User = Depends(get_current_user)) -> User:
|
||||||
"""Require admin privileges"""
|
"""Require admin privileges"""
|
||||||
if not current_user.is_admin:
|
if not current_user.is_admin:
|
||||||
|
|||||||
@@ -9,7 +9,15 @@ from app.config import settings
|
|||||||
|
|
||||||
engine = create_engine(
|
engine = create_engine(
|
||||||
settings.database_url,
|
settings.database_url,
|
||||||
connect_args={"check_same_thread": False} if "sqlite" in settings.database_url else {}
|
connect_args={
|
||||||
|
"check_same_thread": False,
|
||||||
|
# SQLite performance optimizations for bulk imports
|
||||||
|
"timeout": 30,
|
||||||
|
} if "sqlite" in settings.database_url else {},
|
||||||
|
# Performance settings for bulk operations
|
||||||
|
pool_pre_ping=True,
|
||||||
|
pool_recycle=3600, # Recycle connections after 1 hour
|
||||||
|
echo=False # Set to True for SQL debugging
|
||||||
)
|
)
|
||||||
|
|
||||||
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
|
||||||
|
|||||||
6
app/import_export/__init__.py
Normal file
6
app/import_export/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
"""
|
||||||
|
Import/Export module for Delphi Database System
|
||||||
|
|
||||||
|
This module provides clean, modular CSV import functionality
|
||||||
|
for all database tables.
|
||||||
|
"""
|
||||||
20
app/main.py
20
app/main.py
@@ -160,8 +160,6 @@ from app.api.documents import router as documents_router
|
|||||||
from app.api.billing import router as billing_router
|
from app.api.billing import router as billing_router
|
||||||
from app.api.search import router as search_router
|
from app.api.search import router as search_router
|
||||||
from app.api.admin import router as admin_router
|
from app.api.admin import router as admin_router
|
||||||
from app.api.import_data import router as import_router
|
|
||||||
from app.api.flexible import router as flexible_router
|
|
||||||
from app.api.support import router as support_router
|
from app.api.support import router as support_router
|
||||||
from app.api.settings import router as settings_router
|
from app.api.settings import router as settings_router
|
||||||
from app.api.mortality import router as mortality_router
|
from app.api.mortality import router as mortality_router
|
||||||
@@ -189,10 +187,8 @@ app.include_router(billing_router, prefix="/api/billing", tags=["billing"])
|
|||||||
app.include_router(documents_router, prefix="/api/documents", tags=["documents"])
|
app.include_router(documents_router, prefix="/api/documents", tags=["documents"])
|
||||||
app.include_router(search_router, prefix="/api/search", tags=["search"])
|
app.include_router(search_router, prefix="/api/search", tags=["search"])
|
||||||
app.include_router(admin_router, prefix="/api/admin", tags=["admin"])
|
app.include_router(admin_router, prefix="/api/admin", tags=["admin"])
|
||||||
app.include_router(import_router, prefix="/api/import", tags=["import"])
|
|
||||||
app.include_router(support_router, prefix="/api/support", tags=["support"])
|
app.include_router(support_router, prefix="/api/support", tags=["support"])
|
||||||
app.include_router(settings_router, prefix="/api/settings", tags=["settings"])
|
app.include_router(settings_router, prefix="/api/settings", tags=["settings"])
|
||||||
app.include_router(flexible_router, prefix="/api")
|
|
||||||
app.include_router(mortality_router, prefix="/api/mortality", tags=["mortality"])
|
app.include_router(mortality_router, prefix="/api/mortality", tags=["mortality"])
|
||||||
app.include_router(pensions_router, prefix="/api/pensions", tags=["pensions"])
|
app.include_router(pensions_router, prefix="/api/pensions", tags=["pensions"])
|
||||||
app.include_router(pension_valuation_router, prefix="/api/pensions", tags=["pensions-valuation"])
|
app.include_router(pension_valuation_router, prefix="/api/pensions", tags=["pensions-valuation"])
|
||||||
@@ -288,22 +284,10 @@ async def admin_page(request: Request):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@app.get("/import", response_class=HTMLResponse)
|
|
||||||
async def import_page(request: Request):
|
|
||||||
"""Data import management page (admin only)"""
|
|
||||||
return templates.TemplateResponse(
|
|
||||||
"import.html",
|
|
||||||
{"request": request, "title": "Data Import - " + settings.app_name}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@app.get("/flexible", response_class=HTMLResponse)
|
|
||||||
async def flexible_page(request: Request):
|
|
||||||
"""Flexible imports admin page (admin only)."""
|
|
||||||
return templates.TemplateResponse(
|
|
||||||
"flexible.html",
|
|
||||||
{"request": request, "title": "Flexible Imports - " + settings.app_name}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@app.get("/health")
|
@app.get("/health")
|
||||||
|
|||||||
@@ -1,37 +0,0 @@
|
|||||||
"""
|
|
||||||
Flexible storage for unmapped CSV columns during import
|
|
||||||
"""
|
|
||||||
from sqlalchemy import Column, Integer, String
|
|
||||||
from sqlalchemy.types import JSON
|
|
||||||
|
|
||||||
from app.models.base import BaseModel
|
|
||||||
|
|
||||||
|
|
||||||
class FlexibleImport(BaseModel):
|
|
||||||
"""Stores per-row extra/unmapped data for any import, without persisting mapping patterns."""
|
|
||||||
|
|
||||||
__tablename__ = "flexible_imports"
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
|
||||||
|
|
||||||
# The CSV filename used by the importer (e.g., "FILES.csv" or arbitrary names in flexible mode)
|
|
||||||
file_type = Column(String(120), nullable=False, index=True)
|
|
||||||
|
|
||||||
# The SQLAlchemy model table this extra data is associated with (if any)
|
|
||||||
target_table = Column(String(120), nullable=True, index=True)
|
|
||||||
|
|
||||||
# Optional link to the primary record created in the target table
|
|
||||||
primary_key_field = Column(String(120), nullable=True)
|
|
||||||
primary_key_value = Column(String(255), nullable=True, index=True)
|
|
||||||
|
|
||||||
# Extra unmapped columns from the CSV row
|
|
||||||
extra_data = Column(JSON, nullable=False)
|
|
||||||
|
|
||||||
def __repr__(self) -> str: # pragma: no cover - repr utility
|
|
||||||
return (
|
|
||||||
f"<FlexibleImport(id={self.id}, file_type='{self.file_type}', "
|
|
||||||
f"target_table='{self.target_table}', pk_field='{self.primary_key_field}', "
|
|
||||||
f"pk_value='{self.primary_key_value}')>"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,314 +0,0 @@
|
|||||||
(function() {
|
|
||||||
const apiBase = '/api/flexible';
|
|
||||||
let state = {
|
|
||||||
fileType: '',
|
|
||||||
targetTable: '',
|
|
||||||
q: '',
|
|
||||||
skip: 0,
|
|
||||||
limit: 50,
|
|
||||||
total: 0,
|
|
||||||
hasKeys: [],
|
|
||||||
};
|
|
||||||
|
|
||||||
function q(id) { return document.getElementById(id); }
|
|
||||||
|
|
||||||
function formatPreviewHtml(obj, term) {
|
|
||||||
// Returns sanitized HTML with clickable keys
|
|
||||||
try {
|
|
||||||
const payload = obj && obj.unmapped && typeof obj.unmapped === 'object' ? obj.unmapped : obj;
|
|
||||||
const keys = Object.keys(payload || {}).slice(0, 5);
|
|
||||||
const segments = keys.map((k) => {
|
|
||||||
const safeKey = window.htmlSanitizer.escape(String(k));
|
|
||||||
const valueStr = String(payload[k]).slice(0, 60);
|
|
||||||
const valueHtml = term && term.trim().length > 0 ? highlight(valueStr, term) : window.htmlSanitizer.escape(valueStr);
|
|
||||||
return `<span class="kv-pair"><button type="button" class="key-link text-primary-700 dark:text-primary-400 hover:underline" data-key="${safeKey}">${safeKey}</button>: ${valueHtml}</span>`;
|
|
||||||
});
|
|
||||||
return segments.join(', ');
|
|
||||||
} catch (_) { return ''; }
|
|
||||||
}
|
|
||||||
|
|
||||||
function escapeRegExp(str) {
|
|
||||||
return String(str).replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
||||||
}
|
|
||||||
|
|
||||||
function highlight(text, term) {
|
|
||||||
if (!term) return window.htmlSanitizer.escape(text);
|
|
||||||
const pattern = new RegExp(escapeRegExp(term), 'ig');
|
|
||||||
const escaped = window.htmlSanitizer.escape(text);
|
|
||||||
// Replace on the escaped string to avoid breaking HTML
|
|
||||||
return escaped.replace(pattern, (m) => `<mark>${window.htmlSanitizer.escape(m)}</mark>`);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function loadOptions() {
|
|
||||||
try {
|
|
||||||
const res = await window.http.wrappedFetch(`${apiBase}/options`);
|
|
||||||
if (!res.ok) throw await window.http.toError(res, 'Failed to load options');
|
|
||||||
const data = await res.json();
|
|
||||||
const fileSel = q('filterFileType');
|
|
||||||
const tableSel = q('filterTargetTable');
|
|
||||||
// Clear existing except first
|
|
||||||
fileSel.length = 1; tableSel.length = 1;
|
|
||||||
(data.file_types || []).forEach(v => {
|
|
||||||
const opt = document.createElement('option');
|
|
||||||
opt.value = v; opt.textContent = v; fileSel.appendChild(opt);
|
|
||||||
});
|
|
||||||
(data.target_tables || []).forEach(v => {
|
|
||||||
const opt = document.createElement('option');
|
|
||||||
opt.value = v; opt.textContent = v; tableSel.appendChild(opt);
|
|
||||||
});
|
|
||||||
} catch (e) {
|
|
||||||
alert(window.http.formatAlert(e, 'Error loading options'));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function loadRows() {
|
|
||||||
try {
|
|
||||||
const params = new URLSearchParams();
|
|
||||||
if (state.fileType) params.set('file_type', state.fileType);
|
|
||||||
if (state.targetTable) params.set('target_table', state.targetTable);
|
|
||||||
if (state.q) params.set('q', state.q);
|
|
||||||
if (Array.isArray(state.hasKeys)) {
|
|
||||||
state.hasKeys.forEach((k) => {
|
|
||||||
if (k && String(k).trim().length > 0) params.append('has_keys', String(k).trim());
|
|
||||||
});
|
|
||||||
}
|
|
||||||
params.set('skip', String(state.skip));
|
|
||||||
params.set('limit', String(state.limit));
|
|
||||||
const res = await window.http.wrappedFetch(`${apiBase}/imports?${params.toString()}`);
|
|
||||||
if (!res.ok) throw await window.http.toError(res, 'Failed to load flexible imports');
|
|
||||||
const data = await res.json();
|
|
||||||
state.total = data.total || 0;
|
|
||||||
renderRows(data.items || []);
|
|
||||||
renderMeta();
|
|
||||||
renderKeyChips();
|
|
||||||
} catch (e) {
|
|
||||||
alert(window.http.formatAlert(e, 'Error loading flexible imports'));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderRows(items) {
|
|
||||||
const tbody = q('flexibleRows');
|
|
||||||
tbody.innerHTML = '';
|
|
||||||
items.forEach(item => {
|
|
||||||
const tr = document.createElement('tr');
|
|
||||||
tr.className = 'hover:bg-neutral-50 dark:hover:bg-neutral-700/40 cursor-pointer';
|
|
||||||
tr.innerHTML = `
|
|
||||||
<td class="px-3 py-2 whitespace-nowrap">${item.id}</td>
|
|
||||||
<td class="px-3 py-2 whitespace-nowrap">${window.htmlSanitizer.escape(item.file_type || '')}</td>
|
|
||||||
<td class="px-3 py-2 whitespace-nowrap">${window.htmlSanitizer.escape(item.target_table || '')}</td>
|
|
||||||
<td class="px-3 py-2 whitespace-nowrap text-xs text-neutral-500">${window.htmlSanitizer.escape((item.primary_key_field || '') + (item.primary_key_value ? '=' + item.primary_key_value : ''))}</td>
|
|
||||||
<td class="px-3 py-2 text-xs previewCell"></td>
|
|
||||||
<td class="px-3 py-2 text-right">
|
|
||||||
<button class="inline-flex items-center gap-1 text-primary-600 hover:text-primary-700 px-2 py-1 text-xs rounded-md hover:bg-primary-50 dark:hover:bg-primary-900/30" data-action="export" data-id="${item.id}">
|
|
||||||
<i class="fa-solid fa-download"></i>
|
|
||||||
<span>CSV</span>
|
|
||||||
</button>
|
|
||||||
</td>
|
|
||||||
`;
|
|
||||||
// Set sanitized highlighted preview
|
|
||||||
const previewCell = tr.querySelector('.previewCell');
|
|
||||||
const previewHtml = formatPreviewHtml(item.extra_data || {}, state.q);
|
|
||||||
window.setSafeHTML(previewCell, previewHtml);
|
|
||||||
// Bind click on keys to add filters
|
|
||||||
previewCell.querySelectorAll('.key-link').forEach((btn) => {
|
|
||||||
btn.addEventListener('click', (ev) => {
|
|
||||||
ev.stopPropagation();
|
|
||||||
const key = btn.getAttribute('data-key') || '';
|
|
||||||
addKeyFilter(key);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
// Row click opens modal
|
|
||||||
tr.addEventListener('click', (ev) => {
|
|
||||||
// Ignore clicks on the export button inside the row
|
|
||||||
const target = ev.target.closest('button[data-action="export"]');
|
|
||||||
if (target) return;
|
|
||||||
openDetailModal(item);
|
|
||||||
});
|
|
||||||
// Export button handler
|
|
||||||
tr.querySelector('button[data-action="export"]').addEventListener('click', (ev) => {
|
|
||||||
ev.stopPropagation();
|
|
||||||
exportSingleRow(item.id);
|
|
||||||
});
|
|
||||||
tbody.appendChild(tr);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderMeta() {
|
|
||||||
const start = state.total === 0 ? 0 : state.skip + 1;
|
|
||||||
const end = Math.min(state.skip + state.limit, state.total);
|
|
||||||
q('rowsMeta').textContent = `Showing ${start}-${end} of ${state.total}`;
|
|
||||||
q('prevPageBtn').disabled = state.skip === 0;
|
|
||||||
q('nextPageBtn').disabled = state.skip + state.limit >= state.total;
|
|
||||||
}
|
|
||||||
|
|
||||||
function applyFilters() {
|
|
||||||
state.fileType = q('filterFileType').value || '';
|
|
||||||
state.targetTable = q('filterTargetTable').value || '';
|
|
||||||
state.q = (q('quickSearch').value || '').trim();
|
|
||||||
state.skip = 0;
|
|
||||||
loadRows();
|
|
||||||
}
|
|
||||||
|
|
||||||
function addKeyFilter(key) {
|
|
||||||
const k = String(key || '').trim();
|
|
||||||
if (!k) return;
|
|
||||||
if (!Array.isArray(state.hasKeys)) state.hasKeys = [];
|
|
||||||
if (!state.hasKeys.includes(k)) {
|
|
||||||
state.hasKeys.push(k);
|
|
||||||
state.skip = 0;
|
|
||||||
loadRows();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function removeKeyFilter(key) {
|
|
||||||
const k = String(key || '').trim();
|
|
||||||
if (!k) return;
|
|
||||||
state.hasKeys = (state.hasKeys || []).filter((x) => x !== k);
|
|
||||||
state.skip = 0;
|
|
||||||
loadRows();
|
|
||||||
}
|
|
||||||
|
|
||||||
function clearKeyFilters() {
|
|
||||||
if ((state.hasKeys || []).length === 0) return;
|
|
||||||
state.hasKeys = [];
|
|
||||||
state.skip = 0;
|
|
||||||
loadRows();
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderKeyChips() {
|
|
||||||
const container = q('keyChipsContainer');
|
|
||||||
const chipsWrap = q('keyChips');
|
|
||||||
const clearBtn = q('clearKeyChips');
|
|
||||||
if (!container || !chipsWrap) return;
|
|
||||||
chipsWrap.innerHTML = '';
|
|
||||||
const keys = state.hasKeys || [];
|
|
||||||
if (keys.length === 0) {
|
|
||||||
container.classList.add('hidden');
|
|
||||||
} else {
|
|
||||||
container.classList.remove('hidden');
|
|
||||||
keys.forEach((k) => {
|
|
||||||
const btn = document.createElement('button');
|
|
||||||
btn.type = 'button';
|
|
||||||
btn.className = 'inline-flex items-center gap-1 px-2 py-1 rounded-full text-xs bg-primary-50 text-primary-700 border border-primary-200 hover:bg-primary-100 dark:bg-primary-900/30 dark:text-primary-200 dark:border-primary-800';
|
|
||||||
btn.setAttribute('data-chip-key', k);
|
|
||||||
btn.innerHTML = `<span class="font-mono">${window.htmlSanitizer.escape(k)}</span> <i class="fa-solid fa-xmark"></i>`;
|
|
||||||
btn.addEventListener('click', (ev) => {
|
|
||||||
ev.stopPropagation();
|
|
||||||
removeKeyFilter(k);
|
|
||||||
});
|
|
||||||
chipsWrap.appendChild(btn);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (clearBtn) {
|
|
||||||
clearBtn.onclick = (ev) => { ev.preventDefault(); clearKeyFilters(); };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function exportCsv() {
|
|
||||||
try {
|
|
||||||
const params = new URLSearchParams();
|
|
||||||
if (state.fileType) params.set('file_type', state.fileType);
|
|
||||||
if (state.targetTable) params.set('target_table', state.targetTable);
|
|
||||||
if (Array.isArray(state.hasKeys)) {
|
|
||||||
state.hasKeys.forEach((k) => {
|
|
||||||
if (k && String(k).trim().length > 0) params.append('has_keys', String(k).trim());
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const url = `${apiBase}/export?${params.toString()}`;
|
|
||||||
const res = await window.http.wrappedFetch(url);
|
|
||||||
if (!res.ok) throw await window.http.toError(res, 'Export failed');
|
|
||||||
const blob = await res.blob();
|
|
||||||
const a = document.createElement('a');
|
|
||||||
const objectUrl = URL.createObjectURL(blob);
|
|
||||||
a.href = objectUrl;
|
|
||||||
a.download = 'flexible_unmapped.csv';
|
|
||||||
document.body.appendChild(a);
|
|
||||||
a.click();
|
|
||||||
a.remove();
|
|
||||||
setTimeout(() => URL.revokeObjectURL(objectUrl), 1000);
|
|
||||||
} catch (e) {
|
|
||||||
alert(window.http.formatAlert(e, 'Error exporting CSV'));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function exportSingleRow(rowId) {
|
|
||||||
try {
|
|
||||||
const res = await window.http.wrappedFetch(`${apiBase}/export/${rowId}`);
|
|
||||||
if (!res.ok) throw await window.http.toError(res, 'Export failed');
|
|
||||||
const blob = await res.blob();
|
|
||||||
const a = document.createElement('a');
|
|
||||||
const objectUrl = URL.createObjectURL(blob);
|
|
||||||
a.href = objectUrl;
|
|
||||||
a.download = `flexible_row_${rowId}.csv`;
|
|
||||||
document.body.appendChild(a);
|
|
||||||
a.click();
|
|
||||||
a.remove();
|
|
||||||
setTimeout(() => URL.revokeObjectURL(objectUrl), 1000);
|
|
||||||
} catch (e) {
|
|
||||||
alert(window.http.formatAlert(e, 'Error exporting row CSV'));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function openDetailModal(item) {
|
|
||||||
// Populate fields
|
|
||||||
q('detailRowId').textContent = `#${item.id}`;
|
|
||||||
q('detailFileType').textContent = item.file_type || '';
|
|
||||||
q('detailTargetTable').textContent = item.target_table || '';
|
|
||||||
q('detailPkField').textContent = item.primary_key_field || '';
|
|
||||||
q('detailPkValue').textContent = item.primary_key_value || '';
|
|
||||||
try {
|
|
||||||
const pretty = JSON.stringify(item.extra_data || {}, null, 2);
|
|
||||||
q('detailJson').textContent = pretty;
|
|
||||||
} catch (_) {
|
|
||||||
q('detailJson').textContent = '';
|
|
||||||
}
|
|
||||||
const exportBtn = q('detailExportBtn');
|
|
||||||
exportBtn.onclick = () => exportSingleRow(item.id);
|
|
||||||
openModal('flexibleDetailModal');
|
|
||||||
}
|
|
||||||
|
|
||||||
function bindEvents() {
|
|
||||||
q('applyFiltersBtn').addEventListener('click', applyFilters);
|
|
||||||
q('exportCsvBtn').addEventListener('click', exportCsv);
|
|
||||||
const clearBtn = q('clearKeyChips');
|
|
||||||
if (clearBtn) clearBtn.addEventListener('click', (ev) => { ev.preventDefault(); clearKeyFilters(); });
|
|
||||||
// Quick search with debounce
|
|
||||||
const searchInput = q('quickSearch');
|
|
||||||
let searchTimer = null;
|
|
||||||
searchInput.addEventListener('input', () => {
|
|
||||||
const value = searchInput.value || '';
|
|
||||||
clearTimeout(searchTimer);
|
|
||||||
searchTimer = setTimeout(() => {
|
|
||||||
state.q = value.trim();
|
|
||||||
state.skip = 0;
|
|
||||||
loadRows();
|
|
||||||
}, 300);
|
|
||||||
});
|
|
||||||
searchInput.addEventListener('keydown', (ev) => {
|
|
||||||
if (ev.key === 'Enter') {
|
|
||||||
ev.preventDefault();
|
|
||||||
clearTimeout(searchTimer);
|
|
||||||
state.q = (searchInput.value || '').trim();
|
|
||||||
state.skip = 0;
|
|
||||||
loadRows();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
q('prevPageBtn').addEventListener('click', () => {
|
|
||||||
state.skip = Math.max(0, state.skip - state.limit);
|
|
||||||
loadRows();
|
|
||||||
});
|
|
||||||
q('nextPageBtn').addEventListener('click', () => {
|
|
||||||
if (state.skip + state.limit < state.total) {
|
|
||||||
state.skip += state.limit;
|
|
||||||
loadRows();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
document.addEventListener('DOMContentLoaded', () => {
|
|
||||||
bindEvents();
|
|
||||||
loadOptions().then(loadRows);
|
|
||||||
});
|
|
||||||
})();
|
|
||||||
|
|
||||||
|
|
||||||
@@ -199,14 +199,25 @@ function initializeBatchProgressUI() {
|
|||||||
|
|
||||||
async function cancelBatch(batchId) {
|
async function cancelBatch(batchId) {
|
||||||
try {
|
try {
|
||||||
const resp = await window.http.wrappedFetch(`/api/billing/statements/batch-progress/${encodeURIComponent(batchId)}`, { method: 'DELETE' });
|
if (!confirm(`Are you sure you want to cancel batch ${batchId}?`)) {
|
||||||
if (!resp.ok) {
|
return;
|
||||||
throw await window.http.toError(resp, 'Failed to cancel batch');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Import functionality removed
|
||||||
|
|
||||||
|
const result = await resp.json();
|
||||||
|
console.log('Import batch cancelled:', result.message);
|
||||||
|
|
||||||
// Let stream update the row; no-op here
|
// Let stream update the row; no-op here
|
||||||
|
// The progress will be updated via WebSocket
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.warn('Cancel failed', e);
|
console.warn('Cancel import batch failed', e);
|
||||||
try { alert(window.http.formatAlert(e, 'Cancel failed')); } catch (_) {}
|
try {
|
||||||
|
const errorMsg = window.http.formatAlert(e, 'Cancel import batch failed');
|
||||||
|
alert(errorMsg);
|
||||||
|
} catch (_) {
|
||||||
|
alert('Failed to cancel import batch');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -464,14 +475,7 @@ async function checkUserPermissions() {
|
|||||||
const adminDivider = document.getElementById('admin-menu-divider');
|
const adminDivider = document.getElementById('admin-menu-divider');
|
||||||
if (adminItem) adminItem.classList.remove('hidden');
|
if (adminItem) adminItem.classList.remove('hidden');
|
||||||
if (adminDivider) adminDivider.classList.remove('hidden');
|
if (adminDivider) adminDivider.classList.remove('hidden');
|
||||||
const importDesktop = document.getElementById('nav-import-desktop');
|
// Import navigation items removed
|
||||||
const importMobile = document.getElementById('nav-import-mobile');
|
|
||||||
if (importDesktop) importDesktop.classList.remove('hidden');
|
|
||||||
if (importMobile) importMobile.classList.remove('hidden');
|
|
||||||
const flexibleDesktop = document.getElementById('nav-flexible-desktop');
|
|
||||||
const flexibleMobile = document.getElementById('nav-flexible-mobile');
|
|
||||||
if (flexibleDesktop) flexibleDesktop.classList.remove('hidden');
|
|
||||||
if (flexibleMobile) flexibleMobile.classList.remove('hidden');
|
|
||||||
}
|
}
|
||||||
const userDropdownName = document.querySelector('#userDropdown button span');
|
const userDropdownName = document.querySelector('#userDropdown button span');
|
||||||
if (user.full_name && userDropdownName) {
|
if (user.full_name && userDropdownName) {
|
||||||
|
|||||||
@@ -1,113 +0,0 @@
|
|||||||
{% extends "base.html" %}
|
|
||||||
|
|
||||||
{% block content %}
|
|
||||||
<div class="space-y-6">
|
|
||||||
<div class="flex items-center justify-between">
|
|
||||||
<h1 class="text-2xl font-semibold">Flexible Imports</h1>
|
|
||||||
<div class="flex items-center gap-2">
|
|
||||||
<button id="exportCsvBtn" class="px-4 py-2 bg-primary-600 hover:bg-primary-700 text-white rounded-lg transition-colors">
|
|
||||||
<i class="fa-solid fa-file-csv mr-2"></i> Export CSV
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="bg-white dark:bg-neutral-800 rounded-xl border border-neutral-200 dark:border-neutral-700 p-4">
|
|
||||||
<div class="grid grid-cols-1 md:grid-cols-4 gap-4">
|
|
||||||
<div>
|
|
||||||
<label class="block text-sm font-medium mb-1">File Type</label>
|
|
||||||
<select id="filterFileType" class="w-full rounded-lg border border-neutral-300 dark:border-neutral-600 bg-white dark:bg-neutral-900 p-2">
|
|
||||||
<option value="">All</option>
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<label class="block text-sm font-medium mb-1">Target Table</label>
|
|
||||||
<select id="filterTargetTable" class="w-full rounded-lg border border-neutral-300 dark:border-neutral-600 bg-white dark:bg-neutral-900 p-2">
|
|
||||||
<option value="">All</option>
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<label class="block text-sm font-medium mb-1">Quick Search</label>
|
|
||||||
<input id="quickSearch" type="text" placeholder="Search file type, target table, keys and values" class="w-full rounded-lg border border-neutral-300 dark:border-neutral-600 bg-white dark:bg-neutral-900 p-2" />
|
|
||||||
</div>
|
|
||||||
<div class="flex items-end">
|
|
||||||
<button id="applyFiltersBtn" class="w-full md:w-auto px-4 py-2 bg-neutral-100 dark:bg-neutral-700 hover:bg-neutral-200 dark:hover:bg-neutral-600 rounded-lg border border-neutral-200 dark:border-neutral-600">Apply</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<!-- Key filter chips -->
|
|
||||||
<div id="keyChipsContainer" class="mt-3 hidden">
|
|
||||||
<div class="flex items-center gap-2 flex-wrap">
|
|
||||||
<span class="text-xs text-neutral-500">Filters:</span>
|
|
||||||
<div id="keyChips" class="flex items-center gap-2 flex-wrap"></div>
|
|
||||||
<button id="clearKeyChips" class="ml-auto text-xs text-neutral-600 hover:text-neutral-800 dark:text-neutral-300 dark:hover:text-white">Clear</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="bg-white dark:bg-neutral-800 rounded-xl border border-neutral-200 dark:border-neutral-700 overflow-hidden">
|
|
||||||
<div class="overflow-x-auto">
|
|
||||||
<table class="w-full text-sm">
|
|
||||||
<thead>
|
|
||||||
<tr class="bg-neutral-100 dark:bg-neutral-700 text-left">
|
|
||||||
<th class="px-3 py-2">ID</th>
|
|
||||||
<th class="px-3 py-2">File Type</th>
|
|
||||||
<th class="px-3 py-2">Target Table</th>
|
|
||||||
<th class="px-3 py-2">PK</th>
|
|
||||||
<th class="px-3 py-2">Unmapped Preview</th>
|
|
||||||
<th class="px-3 py-2 text-right">Actions</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody id="flexibleRows" class="divide-y divide-neutral-200 dark:divide-neutral-700">
|
|
||||||
</tbody>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
<div class="flex items-center justify-between p-3 border-t border-neutral-200 dark:border-neutral-700">
|
|
||||||
<div class="text-xs text-neutral-500" id="rowsMeta">Loading...</div>
|
|
||||||
<div class="flex items-center gap-2">
|
|
||||||
<button id="prevPageBtn" class="px-3 py-1.5 bg-neutral-100 dark:bg-neutral-700 disabled:opacity-50 rounded-lg">Prev</button>
|
|
||||||
<button id="nextPageBtn" class="px-3 py-1.5 bg-neutral-100 dark:bg-neutral-700 disabled:opacity-50 rounded-lg">Next</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Row detail modal -->
|
|
||||||
<div id="flexibleDetailModal" class="hidden fixed inset-0 bg-black/60 z-50 overflow-y-auto" aria-hidden="true">
|
|
||||||
<div class="flex min-h-full items-center justify-center p-4">
|
|
||||||
<div class="bg-white dark:bg-neutral-800 rounded-xl shadow-xl max-w-4xl w-full max-h-[85vh] overflow-hidden">
|
|
||||||
<div class="flex items-center justify-between px-6 py-4 border-b border-neutral-200 dark:border-neutral-700">
|
|
||||||
<h2 class="text-lg font-semibold">Flexible Row <span id="detailRowId"></span></h2>
|
|
||||||
<div class="flex items-center gap-2">
|
|
||||||
<button id="detailExportBtn" class="px-3 py-1.5 bg-primary-600 hover:bg-primary-700 text-white rounded-lg text-sm">
|
|
||||||
<i class="fa-solid fa-file-csv mr-1"></i> Export CSV
|
|
||||||
</button>
|
|
||||||
<button onclick="closeModal('flexibleDetailModal')" class="text-neutral-500 hover:text-neutral-700">
|
|
||||||
<i class="fa-solid fa-xmark text-xl"></i>
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="p-4">
|
|
||||||
<div class="grid grid-cols-1 md:grid-cols-2 gap-3 mb-3 text-xs text-neutral-600 dark:text-neutral-300">
|
|
||||||
<div>File Type: <span id="detailFileType" class="font-mono"></span></div>
|
|
||||||
<div>Target Table: <span id="detailTargetTable" class="font-mono"></span></div>
|
|
||||||
<div>PK Field: <span id="detailPkField" class="font-mono"></span></div>
|
|
||||||
<div>PK Value: <span id="detailPkValue" class="font-mono"></span></div>
|
|
||||||
</div>
|
|
||||||
<div class="border border-neutral-200 dark:border-neutral-700 rounded-lg overflow-hidden">
|
|
||||||
<pre id="detailJson" class="p-4 text-xs bg-neutral-50 dark:bg-neutral-900 overflow-auto max-h-[60vh]"></pre>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="flex items-center justify-end gap-3 px-6 py-4 border-t border-neutral-200 dark:border-neutral-700 bg-neutral-50 dark:bg-neutral-800/50">
|
|
||||||
<button onclick="closeModal('flexibleDetailModal')" class="px-4 py-2 bg-neutral-100 dark:bg-neutral-700 text-neutral-700 dark:text-neutral-300 hover:bg-neutral-200 dark:hover:bg-neutral-600 rounded-lg transition-colors duration-200">
|
|
||||||
Close
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block extra_scripts %}
|
|
||||||
<script src="/static/js/flexible.js"></script>
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
|
|
||||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user