PRISM2.0 / frontend /services /apiService.ts
devranx's picture
Initial deploy with LFS images and audio
d790e98
raw
history blame
3.71 kB
import { SingleAnalysisReport, BatchStreamResult } from "../types";
/**
* Uploads a single file to the Flask backend.
*/
export const uploadSingle = async (file: File): Promise<string> => {
const formData = new FormData();
formData.append('file', file);
const response = await fetch('/upload_single', {
method: 'POST',
body: formData,
});
if (!response.ok) {
throw new Error(`Upload failed: ${response.statusText}`);
}
const data = await response.json();
return data.filename;
};
/**
* Triggers classification for a single image by filename.
* Expects the backend to return { result_table: "<html>..." }
*/
export const classifySingle = async (filename: string): Promise<SingleAnalysisReport> => {
const response = await fetch('/classify_single', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ filename }),
});
if (!response.ok) {
throw new Error(`Classification failed: ${response.statusText}`);
}
const data = await response.json();
return {
classification: data.classification,
detailed_results: data.detailed_results,
html: data.result_table // Optional fallback
};
};
/**
* Uploads multiple files for batch processing.
*/
export const uploadMultiple = async (files: File[]): Promise<void> => {
const formData = new FormData();
files.forEach(file => {
formData.append('file', file);
});
const response = await fetch('/upload_multiple', {
method: 'POST',
body: formData,
});
if (!response.ok) {
throw new Error(`Batch upload failed: ${response.statusText}`);
}
// Assuming success means files are ready for classification
};
/**
* Triggers batch classification and returns the raw response for manual streaming.
*/
export const classifyMultiple = async (): Promise<ReadableStream<Uint8Array>> => {
const response = await fetch('/classify_multiple', {
method: 'POST',
});
if (!response.ok || !response.body) {
throw new Error(`Batch classification failed: ${response.statusText}`);
}
return response.body;
};
/**
* Clears all uploaded files from the backend.
*/
export const clearUploads = async () => {
const response = await fetch('/clear_uploads', {
method: 'POST',
});
return response.json();
};
export const getSamples = async () => {
const response = await fetch('/api/samples');
return response.json();
};
export const useSample = async (filename: string, destination: 'single' | 'multiple') => {
const response = await fetch('/api/use_sample', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ filename, destination })
});
return response.json();
};
/**
* Triggers batch classification and yields results as they stream in.
*/
export async function* classifyMultipleStream(): AsyncGenerator<BatchStreamResult> {
const stream = await classifyMultiple();
const reader = stream.getReader();
const decoder = new TextDecoder();
let buffer = '';
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
// Process lines (assuming NDJSON or similar line-delimited JSON)
const lines = buffer.split('\n');
buffer = lines.pop() || ''; // Keep incomplete line in buffer
for (const line of lines) {
if (line.trim()) {
try {
const result = JSON.parse(line);
yield result as BatchStreamResult;
} catch (e) {
console.warn("Failed to parse stream chunk", e);
}
}
}
}
} finally {
reader.releaseLock();
}
}