Spaces:
Sleeping
Sleeping
File size: 3,785 Bytes
bba7252 d665701 bba7252 d665701 bba7252 d665701 bba7252 d665701 bba7252 d665701 bba7252 d665701 bba7252 d665701 bba7252 d665701 bba7252 d665701 bba7252 d665701 bba7252 5287154 bba7252 5287154 bba7252 d665701 bba7252 d665701 bba7252 5287154 bba7252 d665701 bba7252 d665701 bba7252 d665701 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 |
import os
import io
from PIL import Image
from flask import Flask, request, jsonify, send_file
from flask_cors import CORS
import re
import torch
from transformers import pipeline
# =================================================================
# 1. SETUP FLASK SERVER & CORS
# =================================================================
app = Flask(__name__)
CORS(app)
# =================================================================
# 2. SETUP MODEL AI (Kembali ke Pipeline Sederhana)
# =================================================================
MODEL_NAME = "prithivMLmods/BrainTumor-Classification-Mini"
classifier = None
def load_model():
"""Memuat model AI menggunakan pipeline standar."""
global classifier
try:
print("⏳ Sedang memuat model AI ({})...".format(MODEL_NAME))
device = "cuda" if torch.cuda.is_available() else "cpu"
classifier = pipeline(
"image-classification",
model=MODEL_NAME,
device=device
)
print("✅ Model {} berhasil dimuat ke device: {}".format(MODEL_NAME, device))
except Exception as e:
print("❌ Gagal memuat model: {}".format(e))
classifier = None
load_model()
# Pemetaan label
LABEL_MAPPING = {
'glioma': {'status': 'Tumor Terdeteksi', 'jenis': 'Glioma'},
'meningioma': {'status': 'Tumor Terdeteksi', 'jenis': 'Meningioma'},
'notumor': {'status': 'Tidak Ada Tumor', 'jenis': 'Tidak Ada'},
'pituitary': {'status': 'Tumor Terdeteksi', 'jenis': 'Pituitary Tumor'}
}
def clean_label(raw_label):
"""Membersihkan label mentah dari model."""
match = re.search(r'(glioma|meningioma|notumor|pituitary)', raw_label, re.IGNORECASE)
if match:
return match.group(0).lower()
return raw_label.lower()
# =================================================================
# 3. ENDPOINT WEB SERVER (Host HTML)
# =================================================================
@app.route('/', methods=['GET'])
def serve_html():
"""Endpoint untuk menampilkan file index.html di tab 'App'."""
try:
return send_file('index.html')
except Exception as e:
return f"<h1>Error: File index.html tidak ditemukan di server.</h1><p>Pastikan file index.html sudah di-COPY ke Docker container.</p><p>{e}</p>", 500
# =================================================================
# 4. ENDPOINT API (Predict)
# =================================================================
@app.route('/predict', methods=['POST'])
def predict():
"""Endpoint utama untuk prediksi."""
if classifier is None:
return jsonify({"error": "Model AI belum dimuat atau gagal dimuat."}), 500
if 'file' not in request.files:
return jsonify({"error": "no file uploaded"}), 400
file = request.files['file']
try:
image_bytes = file.read()
image = Image.open(io.BytesIO(image_bytes)).convert("RGB")
except Exception as e:
return jsonify({"error": f"Gagal memproses gambar: {e}"}), 400
# Lakukan Prediksi
try:
# Gunakan pipeline
results = classifier(images=image, top_k=1)
result = results[0]
raw_label = result['label']
confidence = result['score'] * 100
clean_key = clean_label(raw_label)
result_data = LABEL_MAPPING.get(clean_key, {
'status': 'Hasil Tidak Dikenal',
'jenis': 'N/A'
})
return jsonify({
"prediction_status": result_data['status'],
"tumor_type": result_data['jenis'],
"confidence": round(confidence, 2)
})
except Exception as e:
return jsonify({"error": "Terjadi kesalahan saat menjalankan prediksi model."}), 500 |