ariG23498 HF Staff commited on
Commit
7c40db8
·
verified ·
1 Parent(s): 7c33f69

Upload zai-org_GLM-4.6V-Flash_1.txt with huggingface_hub

Browse files
Files changed (1) hide show
  1. zai-org_GLM-4.6V-Flash_1.txt +32 -0
zai-org_GLM-4.6V-Flash_1.txt ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ```CODE:
2
+ # Load model directly
3
+ from transformers import AutoProcessor, AutoModelForMultimodalLM
4
+
5
+ processor = AutoProcessor.from_pretrained("zai-org/GLM-4.6V-Flash")
6
+ model = AutoModelForMultimodalLM.from_pretrained("zai-org/GLM-4.6V-Flash")
7
+ messages = [
8
+ {
9
+ "role": "user",
10
+ "content": [
11
+ {"type": "image", "url": "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/p-blog/candy.JPG"},
12
+ {"type": "text", "text": "What animal is on the candy?"}
13
+ ]
14
+ },
15
+ ]
16
+ inputs = processor.apply_chat_template(
17
+ messages,
18
+ add_generation_prompt=True,
19
+ tokenize=True,
20
+ return_dict=True,
21
+ return_tensors="pt",
22
+ ).to(model.device)
23
+
24
+ outputs = model.generate(**inputs, max_new_tokens=40)
25
+ print(processor.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
26
+ ```
27
+
28
+ ERROR:
29
+ Traceback (most recent call last):
30
+ File "/tmp/zai-org_GLM-4.6V-Flash_1wipBeU.py", line 24, in <module>
31
+ from transformers import AutoProcessor, AutoModelForMultimodalLM
32
+ ImportError: cannot import name 'AutoModelForMultimodalLM' from 'transformers' (/tmp/.cache/uv/environments-v2/bc85589e8ee2e8f2/lib/python3.13/site-packages/transformers/__init__.py)