File size: 3,711 Bytes
d790e98 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 |
import { SingleAnalysisReport, BatchStreamResult } from "../types";
/**
* Uploads a single file to the Flask backend.
*/
export const uploadSingle = async (file: File): Promise<string> => {
const formData = new FormData();
formData.append('file', file);
const response = await fetch('/upload_single', {
method: 'POST',
body: formData,
});
if (!response.ok) {
throw new Error(`Upload failed: ${response.statusText}`);
}
const data = await response.json();
return data.filename;
};
/**
* Triggers classification for a single image by filename.
* Expects the backend to return { result_table: "<html>..." }
*/
export const classifySingle = async (filename: string): Promise<SingleAnalysisReport> => {
const response = await fetch('/classify_single', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ filename }),
});
if (!response.ok) {
throw new Error(`Classification failed: ${response.statusText}`);
}
const data = await response.json();
return {
classification: data.classification,
detailed_results: data.detailed_results,
html: data.result_table // Optional fallback
};
};
/**
* Uploads multiple files for batch processing.
*/
export const uploadMultiple = async (files: File[]): Promise<void> => {
const formData = new FormData();
files.forEach(file => {
formData.append('file', file);
});
const response = await fetch('/upload_multiple', {
method: 'POST',
body: formData,
});
if (!response.ok) {
throw new Error(`Batch upload failed: ${response.statusText}`);
}
// Assuming success means files are ready for classification
};
/**
* Triggers batch classification and returns the raw response for manual streaming.
*/
export const classifyMultiple = async (): Promise<ReadableStream<Uint8Array>> => {
const response = await fetch('/classify_multiple', {
method: 'POST',
});
if (!response.ok || !response.body) {
throw new Error(`Batch classification failed: ${response.statusText}`);
}
return response.body;
};
/**
* Clears all uploaded files from the backend.
*/
export const clearUploads = async () => {
const response = await fetch('/clear_uploads', {
method: 'POST',
});
return response.json();
};
export const getSamples = async () => {
const response = await fetch('/api/samples');
return response.json();
};
export const useSample = async (filename: string, destination: 'single' | 'multiple') => {
const response = await fetch('/api/use_sample', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ filename, destination })
});
return response.json();
};
/**
* Triggers batch classification and yields results as they stream in.
*/
export async function* classifyMultipleStream(): AsyncGenerator<BatchStreamResult> {
const stream = await classifyMultiple();
const reader = stream.getReader();
const decoder = new TextDecoder();
let buffer = '';
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
// Process lines (assuming NDJSON or similar line-delimited JSON)
const lines = buffer.split('\n');
buffer = lines.pop() || ''; // Keep incomplete line in buffer
for (const line of lines) {
if (line.trim()) {
try {
const result = JSON.parse(line);
yield result as BatchStreamResult;
} catch (e) {
console.warn("Failed to parse stream chunk", e);
}
}
}
}
} finally {
reader.releaseLock();
}
} |