Spaces:
Paused
Paused
Commit
·
4aa9a25
1
Parent(s):
550276f
Update app.py
Browse files
app.py
CHANGED
|
@@ -24,6 +24,7 @@ sys.path.insert(0, './GroundingDINO')
|
|
| 24 |
|
| 25 |
import argparse
|
| 26 |
import copy
|
|
|
|
| 27 |
|
| 28 |
import numpy as np
|
| 29 |
import torch
|
|
@@ -81,6 +82,14 @@ sd_pipe = None
|
|
| 81 |
lama_cleaner_model= None
|
| 82 |
ram_model = None
|
| 83 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 84 |
def get_sam_vit_h_4b8939():
|
| 85 |
if not os.path.exists('./sam_vit_h_4b8939.pth'):
|
| 86 |
logger.info(f"get sam_vit_h_4b8939.pth...")
|
|
@@ -553,12 +562,17 @@ def run_anything_task(input_image, text_prompt, box_threshold, text_threshold,
|
|
| 553 |
color = np.concatenate([np.random.random(3), np.array([0.6])], axis=0)
|
| 554 |
# color = np.array([30/255, 144/255, 255/255, 0.6])
|
| 555 |
show_mask(mask.cpu().numpy(), plt.gca(), color)
|
| 556 |
-
|
|
|
|
| 557 |
"id": i,
|
| 558 |
-
"box": boxes_filt[i].tolist(),
|
| 559 |
-
"label":
|
|
|
|
| 560 |
"color": color
|
| 561 |
-
}
|
|
|
|
|
|
|
|
|
|
| 562 |
for box, label in zip(boxes_filt, pred_phrases):
|
| 563 |
show_box(box.cpu().numpy(), plt.gca(), label)
|
| 564 |
plt.axis('off')
|
|
@@ -567,8 +581,11 @@ def run_anything_task(input_image, text_prompt, box_threshold, text_threshold,
|
|
| 567 |
segment_image_result = cv2.cvtColor(cv2.imread(image_path), cv2.COLOR_BGR2RGB)
|
| 568 |
os.remove(image_path)
|
| 569 |
output_images.append(segment_image_result)
|
| 570 |
-
|
| 571 |
-
|
|
|
|
|
|
|
|
|
|
| 572 |
|
| 573 |
if __name__ == "__main__":
|
| 574 |
parser = argparse.ArgumentParser("Grounded SAM demo", add_help=True)
|
|
|
|
| 24 |
|
| 25 |
import argparse
|
| 26 |
import copy
|
| 27 |
+
import re
|
| 28 |
|
| 29 |
import numpy as np
|
| 30 |
import torch
|
|
|
|
| 82 |
lama_cleaner_model= None
|
| 83 |
ram_model = None
|
| 84 |
|
| 85 |
+
def parse_label_and_score(string):
|
| 86 |
+
match = re.match(r'([a-z]+)\(([0-9\.]+)\)', string)
|
| 87 |
+
if match:
|
| 88 |
+
label, score = match.groups()
|
| 89 |
+
return label, float(score)
|
| 90 |
+
else:
|
| 91 |
+
return None
|
| 92 |
+
|
| 93 |
def get_sam_vit_h_4b8939():
|
| 94 |
if not os.path.exists('./sam_vit_h_4b8939.pth'):
|
| 95 |
logger.info(f"get sam_vit_h_4b8939.pth...")
|
|
|
|
| 562 |
color = np.concatenate([np.random.random(3), np.array([0.6])], axis=0)
|
| 563 |
# color = np.array([30/255, 144/255, 255/255, 0.6])
|
| 564 |
show_mask(mask.cpu().numpy(), plt.gca(), color)
|
| 565 |
+
label, score = parse_label_and_score(pred_phrases[i])
|
| 566 |
+
item = {
|
| 567 |
"id": i,
|
| 568 |
+
# "box": boxes_filt[i].tolist(),
|
| 569 |
+
"label": label,
|
| 570 |
+
"score": score,
|
| 571 |
"color": color
|
| 572 |
+
}
|
| 573 |
+
print("label: " + label)
|
| 574 |
+
results.append(item)
|
| 575 |
+
|
| 576 |
for box, label in zip(boxes_filt, pred_phrases):
|
| 577 |
show_box(box.cpu().numpy(), plt.gca(), label)
|
| 578 |
plt.axis('off')
|
|
|
|
| 581 |
segment_image_result = cv2.cvtColor(cv2.imread(image_path), cv2.COLOR_BGR2RGB)
|
| 582 |
os.remove(image_path)
|
| 583 |
output_images.append(segment_image_result)
|
| 584 |
+
|
| 585 |
+
debug = {
|
| 586 |
+
"results": results
|
| 587 |
+
}
|
| 588 |
+
return debug, output_images, gr.Gallery.update(label='result images')
|
| 589 |
|
| 590 |
if __name__ == "__main__":
|
| 591 |
parser = argparse.ArgumentParser("Grounded SAM demo", add_help=True)
|