import React, { useState, useRef, useEffect } from 'react'; import { useNavigate } from 'react-router-dom'; import { UploadIcon, StackIcon, DownloadIcon, ArrowLeftIcon, CheckCircleIcon, XCircleIcon } from './Icons'; import { BatchItem } from '../types'; import { uploadMultiple, classifyMultipleStream, clearUploads, getSamples, useSample } from '../services/apiService'; const BatchAnalysis: React.FC = () => { const navigate = useNavigate(); const [items, setItems] = useState([]); const [processing, setProcessing] = useState(false); const [showSamples, setShowSamples] = useState(false); const [samples, setSamples] = useState<{ id: number, path: string, name: string }[]>([]); const fileInputRef = useRef(null); useEffect(() => { const fetchSamples = async () => { try { const data = await getSamples(); if (Array.isArray(data)) { setSamples(data); } } catch (err) { console.error("Failed to fetch samples", err); } }; fetchSamples(); }, []); const handleFileChange = async (e: React.ChangeEvent) => { if (e.target.files && e.target.files.length > 0) { const newFiles = Array.from(e.target.files) as File[]; // Create preview items const newItems: BatchItem[] = newFiles.map(file => ({ id: Math.random().toString(36).substr(2, 9), file: file, previewUrl: URL.createObjectURL(file), status: 'pending' })); setItems(prev => [...prev, ...newItems]); // Upload files immediately try { await uploadMultiple(newFiles); } catch (err) { console.error("Upload failed", err); // Mark these items as error setItems(prev => prev.map(item => newItems.find(ni => ni.id === item.id) ? { ...item, status: 'error' } : item )); } } }; const addSampleToQueue = async (filename: string, url: string) => { try { // Call backend to copy sample await useSample(filename, 'multiple'); // Create a dummy file object for UI state consistency // The backend already has the file, so we don't need actual content here const file = new File([""], filename, { type: "image/png" }); const newItem: BatchItem = { id: Math.random().toString(36).substr(2, 9), file, previewUrl: url, status: 'pending' }; setItems(prev => [...prev, newItem]); } catch (err) { console.error("Failed to load sample", err); } }; const normalizeFilename = (name: string) => { // Basic emulation of werkzeug.secure_filename behavior // 1. ASCII only (remove non-ascii) - simplified here to just keep standard chars // 2. Replace whitespace with underscore // 3. Remove invalid chars let normalized = name.replace(/\s+/g, '_'); normalized = normalized.replace(/[^a-zA-Z0-9._-]/g, ''); return normalized; }; const runBatchProcessing = async () => { setProcessing(true); setItems(prev => prev.map(item => ({ ...item, status: 'processing', error: undefined }))); try { // Use the generator helper which handles buffering and parsing correctly for await (const result of classifyMultipleStream()) { console.log("Received result:", result); if (result.error) { console.error("Error for file:", result.filename, result.error); setItems(prev => prev.map(item => { // Check exact match or normalized match if (item.file.name === result.filename || normalizeFilename(item.file.name) === result.filename) { return { ...item, status: 'error', error: result.error }; } return item; })); continue; } setItems(prev => prev.map(item => { // Check exact match or normalized match if (item.file.name === result.filename || normalizeFilename(item.file.name) === result.filename) { return { ...item, status: 'completed', result: result.status === 'pass' ? 'pass' : 'fail', labels: result.labels }; } return item; })); } } catch (err) { console.error("Batch processing error:", err); setItems(prev => prev.map(item => item.status === 'processing' ? { ...item, status: 'error', error: 'Network or server error' } : item )); } finally { setProcessing(false); // Safety check: Mark any remaining processing items as error setItems(prev => prev.map(item => item.status === 'processing' ? { ...item, status: 'error', error: 'No result from server (Filename mismatch or timeout)' } : item )); } }; const getProgress = () => { if (items.length === 0) return 0; const completed = items.filter(i => i.status === 'completed' || i.status === 'error').length; return (completed / items.length) * 100; }; const downloadReport = () => { const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); const htmlContent = ` Prism Batch Report - ${timestamp}

Batch Classification Report

Generated on: ${new Date().toLocaleString()}

${items.map(item => ` `).join('')}
Filename Status Result Failure Reason
${item.file.name} ${item.status} ${item.result ? item.result.toUpperCase() : '-'} ${item.labels && item.labels.length > 0 ? `${item.labels.join(', ')}` : '-'}
`; const blob = new Blob([htmlContent], { type: 'text/html' }); const url = URL.createObjectURL(blob); const a = document.createElement('a'); a.href = url; a.download = `prism-batch-report-${timestamp}.html`; document.body.appendChild(a); a.click(); document.body.removeChild(a); URL.revokeObjectURL(url); }; const clearAll = async () => { setItems([]); await clearUploads(); }; const isComplete = items.length > 0 && items.every(i => i.status === 'completed' || i.status === 'error'); return (

Batch Image Analysis

{/* Controls */}
{items.length > 0 && ( )}
{Math.round(getProgress())}%
{/* Sample Gallery Toggle */}
{samples.map((sample) => { const isSelected = items.some(item => item.previewUrl === sample.url); return (
addSampleToQueue(sample.filename, sample.url)} > {`Sample
{isSelected && (
)}
); })}
{/* Status Bar */} {items.length > 0 && (

{items.length} items in queue

{processing && (

Running on CPU: Classification takes time, please be patient 🐨✨

)}
)} {/* Grid */}
{items.map((item) => (
Batch Item {/* Overlay Status */}
{item.status === 'processing' && ( ANALYZING... )} {item.status === 'pending' && ( PENDING )} {item.status === 'error' && (
ERROR {item.error && ( {item.error.length > 50 ? item.error.substring(0, 50) + '...' : item.error} )}
)} {item.status === 'completed' && (
{item.result === 'pass' ? : } {item.result}
{item.labels && item.labels.length > 0 && (
{item.labels.map((label, idx) => ( {label} ))}
)}
)}
))}
); }; export default BatchAnalysis;