Spaces:
Runtime error
Runtime error
Upload caption_coco.yaml
Browse files
BLIP/configs/caption_coco.yaml
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
image_root: '/export/share/datasets/vision/coco/images/'
|
| 2 |
+
ann_root: 'annotation'
|
| 3 |
+
coco_gt_root: 'annotation/coco_gt'
|
| 4 |
+
|
| 5 |
+
# set pretrained as a file path or an url
|
| 6 |
+
pretrained: 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/models/model_base_caption_capfilt_large.pth'
|
| 7 |
+
|
| 8 |
+
# size of vit model; base or large
|
| 9 |
+
vit: 'base'
|
| 10 |
+
vit_grad_ckpt: False
|
| 11 |
+
vit_ckpt_layer: 0
|
| 12 |
+
batch_size: 32
|
| 13 |
+
init_lr: 1e-5
|
| 14 |
+
|
| 15 |
+
# vit: 'large'
|
| 16 |
+
# vit_grad_ckpt: True
|
| 17 |
+
# vit_ckpt_layer: 5
|
| 18 |
+
# batch_size: 16
|
| 19 |
+
# init_lr: 2e-6
|
| 20 |
+
|
| 21 |
+
image_size: 384
|
| 22 |
+
|
| 23 |
+
# generation configs
|
| 24 |
+
max_length: 20
|
| 25 |
+
min_length: 5
|
| 26 |
+
num_beams: 3
|
| 27 |
+
prompt: 'a picture of '
|
| 28 |
+
|
| 29 |
+
# optimizer
|
| 30 |
+
weight_decay: 0.05
|
| 31 |
+
min_lr: 0
|
| 32 |
+
max_epoch: 5
|
| 33 |
+
|