Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_115423/gpu_0_shard_0.log +80 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_115423/gpu_1_shard_1.log +38 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_115423/gpu_2_shard_2.log +80 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_115423/gpu_3_shard_3.log +38 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_115423/gpu_4_shard_4.log +80 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_115423/gpu_5_shard_5.log +38 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_115423/gpu_6_shard_6.log +80 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_115423/gpu_7_shard_7.log +38 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_133242/gpu_0_shard_0.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_133242/gpu_1_shard_1.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_133242/gpu_2_shard_2.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_133242/gpu_3_shard_3.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_133242/gpu_4_shard_4.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_133242/gpu_5_shard_5.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_133242/gpu_6_shard_6.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_133242/gpu_7_shard_7.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_135423/gpu_0_shard_0.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_135423/gpu_1_shard_1.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_135423/gpu_2_shard_2.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_135423/gpu_3_shard_3.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_135423/gpu_4_shard_4.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_135423/gpu_5_shard_5.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_135423/gpu_6_shard_6.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_135423/gpu_7_shard_7.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_140056/gpu_0_shard_0.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_140056/gpu_1_shard_1.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_140056/gpu_2_shard_2.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_140056/gpu_3_shard_3.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_140056/gpu_4_shard_4.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_140056/gpu_5_shard_5.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_140056/gpu_6_shard_6.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_140056/gpu_7_shard_7.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_150956/gpu_0_shard_0.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_150956/gpu_1_shard_1.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_150956/gpu_2_shard_2.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_150956/gpu_3_shard_3.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_150956/gpu_4_shard_4.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_150956/gpu_5_shard_5.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_150956/gpu_6_shard_6.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_150956/gpu_7_shard_7.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_163841/gpu_0_shard_0.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_163841/gpu_1_shard_1.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_163841/gpu_2_shard_2.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_163841/gpu_3_shard_3.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_163841/gpu_4_shard_4.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_163841/gpu_5_shard_5.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_163841/gpu_6_shard_6.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_163841/gpu_7_shard_7.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_204220/gpu_0_shard_0.log +1 -0
- data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_204220/gpu_1_shard_1.log +1 -0
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_115423/gpu_0_shard_0.log
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
[A
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
[A
|
|
|
|
|
|
|
|
|
| 1 |
+
Total files: 16. Assigned to Shard 0: 2 files.
|
| 2 |
+
Using GPU: 0, Batch Size: 32
|
| 3 |
+
Loading Qwen3-VL from: /apdcephfs_nj3/share_300377003/tomtaozhang/hf_models/Qwen3-VL-8B-Instruct/ ...
|
| 4 |
+
2026-01-09 11:54:28,972 - lmdeploy - WARNING - archs.py:45 - Fallback to pytorch engine because `/apdcephfs_nj3/share_300377003/tomtaozhang/hf_models/Qwen3-VL-8B-Instruct/` not supported by turbomind engine.
|
| 5 |
+
2026-01-09 11:54:35,595 - lmdeploy - WARNING - transformers.py:22 - LMDeploy requires transformers version: [4.33.0 ~ 4.56.1], but found version: 4.57.3
|
| 6 |
+
2026-01-09 11:54:38,418 - lmdeploy - WARNING - attention.py:233 - For higher performance, please install FlashAttention-3 https://github.com/Dao-AILab/flash-attention
|
| 7 |
+
|
| 8 |
+
Model loaded successfully!
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
|
| 44 |
[A
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
|
| 80 |
[A
|
| 81 |
+
Finished processing sam_sa_000136_part_4_of_8_chunk_0.jsonl -> /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000136/node_0_of_1/03_merged_tags_results/sam_sa_000136_part_4_of_8_chunk_0.jsonl
|
| 82 |
+
All tasks for this shard completed.
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_115423/gpu_1_shard_1.log
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
[A
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
[A
|
|
|
|
|
|
|
|
|
| 1 |
+
Total files: 16. Assigned to Shard 1: 2 files.
|
| 2 |
+
Using GPU: 1, Batch Size: 32
|
| 3 |
+
Loading Qwen3-VL from: /apdcephfs_nj3/share_300377003/tomtaozhang/hf_models/Qwen3-VL-8B-Instruct/ ...
|
| 4 |
+
2026-01-09 11:54:30,220 - lmdeploy - WARNING - archs.py:45 - Fallback to pytorch engine because `/apdcephfs_nj3/share_300377003/tomtaozhang/hf_models/Qwen3-VL-8B-Instruct/` not supported by turbomind engine.
|
| 5 |
+
2026-01-09 11:54:37,966 - lmdeploy - WARNING - transformers.py:22 - LMDeploy requires transformers version: [4.33.0 ~ 4.56.1], but found version: 4.57.3
|
| 6 |
+
2026-01-09 11:54:39,863 - lmdeploy - WARNING - attention.py:233 - For higher performance, please install FlashAttention-3 https://github.com/Dao-AILab/flash-attention
|
| 7 |
+
|
| 8 |
+
Model loaded successfully!
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
|
| 23 |
[A
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
|
| 38 |
[A
|
| 39 |
+
Finished processing sam_sa_000136_part_4_of_8_chunk_1.jsonl -> /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000136/node_0_of_1/03_merged_tags_results/sam_sa_000136_part_4_of_8_chunk_1.jsonl
|
| 40 |
+
All tasks for this shard completed.
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_115423/gpu_2_shard_2.log
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
[A
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
[A
|
|
|
|
|
|
|
|
|
| 1 |
+
Total files: 16. Assigned to Shard 2: 2 files.
|
| 2 |
+
Using GPU: 2, Batch Size: 32
|
| 3 |
+
Loading Qwen3-VL from: /apdcephfs_nj3/share_300377003/tomtaozhang/hf_models/Qwen3-VL-8B-Instruct/ ...
|
| 4 |
+
2026-01-09 11:54:32,191 - lmdeploy - WARNING - archs.py:45 - Fallback to pytorch engine because `/apdcephfs_nj3/share_300377003/tomtaozhang/hf_models/Qwen3-VL-8B-Instruct/` not supported by turbomind engine.
|
| 5 |
+
2026-01-09 11:54:39,654 - lmdeploy - WARNING - transformers.py:22 - LMDeploy requires transformers version: [4.33.0 ~ 4.56.1], but found version: 4.57.3
|
| 6 |
+
2026-01-09 11:54:42,314 - lmdeploy - WARNING - attention.py:233 - For higher performance, please install FlashAttention-3 https://github.com/Dao-AILab/flash-attention
|
| 7 |
+
|
| 8 |
+
Model loaded successfully!
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
|
| 44 |
[A
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
|
| 80 |
[A
|
| 81 |
+
Finished processing sam_sa_000136_part_5_of_8_chunk_0.jsonl -> /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000136/node_0_of_1/03_merged_tags_results/sam_sa_000136_part_5_of_8_chunk_0.jsonl
|
| 82 |
+
All tasks for this shard completed.
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_115423/gpu_3_shard_3.log
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
[A
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
[A
|
|
|
|
|
|
|
|
|
| 1 |
+
Total files: 16. Assigned to Shard 3: 2 files.
|
| 2 |
+
Using GPU: 3, Batch Size: 32
|
| 3 |
+
Loading Qwen3-VL from: /apdcephfs_nj3/share_300377003/tomtaozhang/hf_models/Qwen3-VL-8B-Instruct/ ...
|
| 4 |
+
2026-01-09 11:54:34,211 - lmdeploy - WARNING - archs.py:45 - Fallback to pytorch engine because `/apdcephfs_nj3/share_300377003/tomtaozhang/hf_models/Qwen3-VL-8B-Instruct/` not supported by turbomind engine.
|
| 5 |
+
2026-01-09 11:54:41,933 - lmdeploy - WARNING - transformers.py:22 - LMDeploy requires transformers version: [4.33.0 ~ 4.56.1], but found version: 4.57.3
|
| 6 |
+
2026-01-09 11:54:43,780 - lmdeploy - WARNING - attention.py:233 - For higher performance, please install FlashAttention-3 https://github.com/Dao-AILab/flash-attention
|
| 7 |
+
|
| 8 |
+
Model loaded successfully!
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
|
| 23 |
[A
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
|
| 38 |
[A
|
| 39 |
+
Finished processing sam_sa_000136_part_5_of_8_chunk_1.jsonl -> /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000136/node_0_of_1/03_merged_tags_results/sam_sa_000136_part_5_of_8_chunk_1.jsonl
|
| 40 |
+
All tasks for this shard completed.
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_115423/gpu_4_shard_4.log
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
[A
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
[A
|
|
|
|
|
|
|
|
|
| 1 |
+
Total files: 16. Assigned to Shard 4: 2 files.
|
| 2 |
+
Using GPU: 4, Batch Size: 32
|
| 3 |
+
Loading Qwen3-VL from: /apdcephfs_nj3/share_300377003/tomtaozhang/hf_models/Qwen3-VL-8B-Instruct/ ...
|
| 4 |
+
2026-01-09 11:54:36,291 - lmdeploy - WARNING - archs.py:45 - Fallback to pytorch engine because `/apdcephfs_nj3/share_300377003/tomtaozhang/hf_models/Qwen3-VL-8B-Instruct/` not supported by turbomind engine.
|
| 5 |
+
2026-01-09 11:54:43,612 - lmdeploy - WARNING - transformers.py:22 - LMDeploy requires transformers version: [4.33.0 ~ 4.56.1], but found version: 4.57.3
|
| 6 |
+
2026-01-09 11:54:46,045 - lmdeploy - WARNING - attention.py:233 - For higher performance, please install FlashAttention-3 https://github.com/Dao-AILab/flash-attention
|
| 7 |
+
|
| 8 |
+
Model loaded successfully!
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
|
| 44 |
[A
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
|
| 80 |
[A
|
| 81 |
+
Finished processing sam_sa_000136_part_6_of_8_chunk_0.jsonl -> /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000136/node_0_of_1/03_merged_tags_results/sam_sa_000136_part_6_of_8_chunk_0.jsonl
|
| 82 |
+
All tasks for this shard completed.
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_115423/gpu_5_shard_5.log
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
[A
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
[A
|
|
|
|
|
|
|
|
|
| 1 |
+
Total files: 16. Assigned to Shard 5: 2 files.
|
| 2 |
+
Using GPU: 5, Batch Size: 32
|
| 3 |
+
Loading Qwen3-VL from: /apdcephfs_nj3/share_300377003/tomtaozhang/hf_models/Qwen3-VL-8B-Instruct/ ...
|
| 4 |
+
2026-01-09 11:54:38,398 - lmdeploy - WARNING - archs.py:45 - Fallback to pytorch engine because `/apdcephfs_nj3/share_300377003/tomtaozhang/hf_models/Qwen3-VL-8B-Instruct/` not supported by turbomind engine.
|
| 5 |
+
2026-01-09 11:54:45,959 - lmdeploy - WARNING - transformers.py:22 - LMDeploy requires transformers version: [4.33.0 ~ 4.56.1], but found version: 4.57.3
|
| 6 |
+
2026-01-09 11:54:47,710 - lmdeploy - WARNING - attention.py:233 - For higher performance, please install FlashAttention-3 https://github.com/Dao-AILab/flash-attention
|
| 7 |
+
|
| 8 |
+
Model loaded successfully!
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
|
| 23 |
[A
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
|
| 38 |
[A
|
| 39 |
+
Finished processing sam_sa_000136_part_6_of_8_chunk_1.jsonl -> /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000136/node_0_of_1/03_merged_tags_results/sam_sa_000136_part_6_of_8_chunk_1.jsonl
|
| 40 |
+
All tasks for this shard completed.
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_115423/gpu_6_shard_6.log
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
[A
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
[A
|
|
|
|
|
|
|
|
|
| 1 |
+
Total files: 16. Assigned to Shard 6: 2 files.
|
| 2 |
+
Using GPU: 6, Batch Size: 32
|
| 3 |
+
Loading Qwen3-VL from: /apdcephfs_nj3/share_300377003/tomtaozhang/hf_models/Qwen3-VL-8B-Instruct/ ...
|
| 4 |
+
2026-01-09 11:54:40,196 - lmdeploy - WARNING - archs.py:45 - Fallback to pytorch engine because `/apdcephfs_nj3/share_300377003/tomtaozhang/hf_models/Qwen3-VL-8B-Instruct/` not supported by turbomind engine.
|
| 5 |
+
2026-01-09 11:54:47,177 - lmdeploy - WARNING - transformers.py:22 - LMDeploy requires transformers version: [4.33.0 ~ 4.56.1], but found version: 4.57.3
|
| 6 |
+
2026-01-09 11:54:48,951 - lmdeploy - WARNING - attention.py:233 - For higher performance, please install FlashAttention-3 https://github.com/Dao-AILab/flash-attention
|
| 7 |
+
|
| 8 |
+
Model loaded successfully!
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
|
| 44 |
[A
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
|
| 80 |
[A
|
| 81 |
+
Finished processing sam_sa_000136_part_7_of_8_chunk_0.jsonl -> /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000136/node_0_of_1/03_merged_tags_results/sam_sa_000136_part_7_of_8_chunk_0.jsonl
|
| 82 |
+
All tasks for this shard completed.
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_115423/gpu_7_shard_7.log
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
[A
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
[A
|
|
|
|
|
|
|
|
|
| 1 |
+
Total files: 16. Assigned to Shard 7: 2 files.
|
| 2 |
+
Using GPU: 7, Batch Size: 32
|
| 3 |
+
Loading Qwen3-VL from: /apdcephfs_nj3/share_300377003/tomtaozhang/hf_models/Qwen3-VL-8B-Instruct/ ...
|
| 4 |
+
2026-01-09 11:54:42,331 - lmdeploy - WARNING - archs.py:45 - Fallback to pytorch engine because `/apdcephfs_nj3/share_300377003/tomtaozhang/hf_models/Qwen3-VL-8B-Instruct/` not supported by turbomind engine.
|
| 5 |
+
2026-01-09 11:54:49,443 - lmdeploy - WARNING - transformers.py:22 - LMDeploy requires transformers version: [4.33.0 ~ 4.56.1], but found version: 4.57.3
|
| 6 |
+
2026-01-09 11:54:51,270 - lmdeploy - WARNING - attention.py:233 - For higher performance, please install FlashAttention-3 https://github.com/Dao-AILab/flash-attention
|
| 7 |
+
|
| 8 |
+
Model loaded successfully!
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
|
| 23 |
[A
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
|
| 38 |
[A
|
| 39 |
+
Finished processing sam_sa_000136_part_7_of_8_chunk_1.jsonl -> /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000136/node_0_of_1/03_merged_tags_results/sam_sa_000136_part_7_of_8_chunk_1.jsonl
|
| 40 |
+
All tasks for this shard completed.
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_133242/gpu_0_shard_0.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000602/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_133242/gpu_1_shard_1.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000602/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_133242/gpu_2_shard_2.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000602/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_133242/gpu_3_shard_3.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000602/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_133242/gpu_4_shard_4.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000602/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_133242/gpu_5_shard_5.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000602/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_133242/gpu_6_shard_6.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000602/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_133242/gpu_7_shard_7.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000602/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_135423/gpu_0_shard_0.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000341/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_135423/gpu_1_shard_1.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000341/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_135423/gpu_2_shard_2.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000341/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_135423/gpu_3_shard_3.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000341/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_135423/gpu_4_shard_4.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000341/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_135423/gpu_5_shard_5.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000341/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_135423/gpu_6_shard_6.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000341/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_135423/gpu_7_shard_7.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000341/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_140056/gpu_0_shard_0.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000473/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_140056/gpu_1_shard_1.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000473/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_140056/gpu_2_shard_2.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000473/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_140056/gpu_3_shard_3.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000473/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_140056/gpu_4_shard_4.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000473/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_140056/gpu_5_shard_5.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000473/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_140056/gpu_6_shard_6.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000473/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_140056/gpu_7_shard_7.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part1/sa_000473/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_150956/gpu_0_shard_0.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000109/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_150956/gpu_1_shard_1.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000109/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_150956/gpu_2_shard_2.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000109/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_150956/gpu_3_shard_3.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000109/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_150956/gpu_4_shard_4.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000109/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_150956/gpu_5_shard_5.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000109/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_150956/gpu_6_shard_6.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000109/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_150956/gpu_7_shard_7.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000109/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_163841/gpu_0_shard_0.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000298/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_163841/gpu_1_shard_1.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000298/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_163841/gpu_2_shard_2.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000298/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_163841/gpu_3_shard_3.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000298/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_163841/gpu_4_shard_4.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000298/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_163841/gpu_5_shard_5.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000298/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_163841/gpu_6_shard_6.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000298/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_163841/gpu_7_shard_7.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000298/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_204220/gpu_0_shard_0.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000834/node_0_of_1/02_qwenvl_tag_results
|
data_load_scripts/data_syn_pipe/pipe_scripts/logs/merge_tags_20260109_204220/gpu_1_shard_1.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
No .jsonl files found in /apdcephfs_nj3/share_300377003/tomtaozhang/data/sync_datas/sam_part2/sa_000834/node_0_of_1/02_qwenvl_tag_results
|