ariG23498 HF Staff commited on
Commit
9ba6adf
·
verified ·
1 Parent(s): 9aa7c7f

Upload p-e-w_gemma-3-12b-it-heretic_0.txt with huggingface_hub

Browse files
Files changed (1) hide show
  1. p-e-w_gemma-3-12b-it-heretic_0.txt +59 -0
p-e-w_gemma-3-12b-it-heretic_0.txt ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ```CODE:
2
+ # Use a pipeline as a high-level helper
3
+ from transformers import pipeline
4
+
5
+ pipe = pipeline("image-text-to-text", model="p-e-w/gemma-3-12b-it-heretic")
6
+ messages = [
7
+ {
8
+ "role": "user",
9
+ "content": [
10
+ {"type": "image", "url": "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/p-blog/candy.JPG"},
11
+ {"type": "text", "text": "What animal is on the candy?"}
12
+ ]
13
+ },
14
+ ]
15
+ pipe(text=messages)
16
+ ```
17
+
18
+ ERROR:
19
+ Traceback (most recent call last):
20
+ File "/tmp/.cache/uv/environments-v2/a2b1707762f698b7/lib/python3.13/site-packages/transformers/image_processing_base.py", line 354, in get_image_processor_dict
21
+ resolved_image_processor_file = resolved_image_processor_files[0]
22
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^
23
+ IndexError: list index out of range
24
+
25
+ During handling of the above exception, another exception occurred:
26
+
27
+ Traceback (most recent call last):
28
+ File "/tmp/p-e-w_gemma-3-12b-it-heretic_01lzhRU.py", line 26, in <module>
29
+ pipe = pipeline("image-text-to-text", model="p-e-w/gemma-3-12b-it-heretic")
30
+ File "/tmp/.cache/uv/environments-v2/a2b1707762f698b7/lib/python3.13/site-packages/transformers/pipelines/__init__.py", line 1197, in pipeline
31
+ raise e
32
+ File "/tmp/.cache/uv/environments-v2/a2b1707762f698b7/lib/python3.13/site-packages/transformers/pipelines/__init__.py", line 1188, in pipeline
33
+ processor = AutoProcessor.from_pretrained(processor, _from_pipeline=task, **hub_kwargs, **model_kwargs)
34
+ File "/tmp/.cache/uv/environments-v2/a2b1707762f698b7/lib/python3.13/site-packages/transformers/models/auto/processing_auto.py", line 396, in from_pretrained
35
+ return processor_class.from_pretrained(
36
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^
37
+ pretrained_model_name_or_path, trust_remote_code=trust_remote_code, **kwargs
38
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
39
+ )
40
+ ^
41
+ File "/tmp/.cache/uv/environments-v2/a2b1707762f698b7/lib/python3.13/site-packages/transformers/processing_utils.py", line 1394, in from_pretrained
42
+ args = cls._get_arguments_from_pretrained(pretrained_model_name_or_path, **kwargs)
43
+ File "/tmp/.cache/uv/environments-v2/a2b1707762f698b7/lib/python3.13/site-packages/transformers/processing_utils.py", line 1453, in _get_arguments_from_pretrained
44
+ args.append(attribute_class.from_pretrained(pretrained_model_name_or_path, **kwargs))
45
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
46
+ File "/tmp/.cache/uv/environments-v2/a2b1707762f698b7/lib/python3.13/site-packages/transformers/models/auto/image_processing_auto.py", line 494, in from_pretrained
47
+ raise initial_exception
48
+ File "/tmp/.cache/uv/environments-v2/a2b1707762f698b7/lib/python3.13/site-packages/transformers/models/auto/image_processing_auto.py", line 476, in from_pretrained
49
+ config_dict, _ = ImageProcessingMixin.get_image_processor_dict(
50
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^
51
+ pretrained_model_name_or_path, image_processor_filename=image_processor_filename, **kwargs
52
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
53
+ )
54
+ ^
55
+ File "/tmp/.cache/uv/environments-v2/a2b1707762f698b7/lib/python3.13/site-packages/transformers/image_processing_base.py", line 361, in get_image_processor_dict
56
+ raise OSError(
57
+ ...<4 lines>...
58
+ )
59
+ OSError: Can't load image processor for 'p-e-w/gemma-3-12b-it-heretic'. If you were trying to load it from 'https://huggingface.co/models', make sure you don't have a local directory with the same name. Otherwise, make sure 'p-e-w/gemma-3-12b-it-heretic' is the correct path to a directory containing a preprocessor_config.json file