ariG23498 HF Staff commited on
Commit
f4e6e4a
·
verified ·
1 Parent(s): 74f7ab5

Upload stabilityai_stable-diffusion-3.5-large_1.txt with huggingface_hub

Browse files
stabilityai_stable-diffusion-3.5-large_1.txt ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ```CODE:
2
+ import torch
3
+ from diffusers import DiffusionPipeline
4
+
5
+ # switch to "mps" for apple devices
6
+ pipe = DiffusionPipeline.from_pretrained("stabilityai/stable-diffusion-3.5-large", dtype=torch.bfloat16, device_map="cuda")
7
+
8
+ prompt = "Astronaut in a jungle, cold color palette, muted colors, detailed, 8k"
9
+ image = pipe(prompt).images[0]
10
+ ```
11
+
12
+ ERROR:
13
+ Traceback (most recent call last):
14
+ File "/tmp/stabilityai_stable-diffusion-3.5-large_1YAeNM1.py", line 25, in <module>
15
+ pipe = DiffusionPipeline.from_pretrained("stabilityai/stable-diffusion-3.5-large", dtype=torch.bfloat16, device_map="cuda")
16
+ File "/tmp/.cache/uv/environments-v2/b4c16b19a8353fbb/lib/python3.13/site-packages/huggingface_hub/utils/_validators.py", line 114, in _inner_fn
17
+ return fn(*args, **kwargs)
18
+ File "/tmp/.cache/uv/environments-v2/b4c16b19a8353fbb/lib/python3.13/site-packages/diffusers/pipelines/pipeline_utils.py", line 1025, in from_pretrained
19
+ loaded_sub_model = load_sub_model(
20
+ library_name=library_name,
21
+ ...<21 lines>...
22
+ quantization_config=quantization_config,
23
+ )
24
+ File "/tmp/.cache/uv/environments-v2/b4c16b19a8353fbb/lib/python3.13/site-packages/diffusers/pipelines/pipeline_loading_utils.py", line 860, in load_sub_model
25
+ loaded_sub_model = load_method(os.path.join(cached_folder, name), **loading_kwargs)
26
+ File "/tmp/.cache/uv/environments-v2/b4c16b19a8353fbb/lib/python3.13/site-packages/transformers/tokenization_utils_base.py", line 2097, in from_pretrained
27
+ return cls._from_pretrained(
28
+ ~~~~~~~~~~~~~~~~~~~~^
29
+ resolved_vocab_files,
30
+ ^^^^^^^^^^^^^^^^^^^^^
31
+ ...<9 lines>...
32
+ **kwargs,
33
+ ^^^^^^^^^
34
+ )
35
+ ^
36
+ File "/tmp/.cache/uv/environments-v2/b4c16b19a8353fbb/lib/python3.13/site-packages/transformers/tokenization_utils_base.py", line 2343, in _from_pretrained
37
+ tokenizer = cls(*init_inputs, **init_kwargs)
38
+ File "/tmp/.cache/uv/environments-v2/b4c16b19a8353fbb/lib/python3.13/site-packages/transformers/models/t5/tokenization_t5_fast.py", line 119, in __init__
39
+ super().__init__(
40
+ ~~~~~~~~~~~~~~~~^
41
+ vocab_file=vocab_file,
42
+ ^^^^^^^^^^^^^^^^^^^^^^
43
+ ...<7 lines>...
44
+ **kwargs,
45
+ ^^^^^^^^^
46
+ )
47
+ ^
48
+ File "/tmp/.cache/uv/environments-v2/b4c16b19a8353fbb/lib/python3.13/site-packages/transformers/tokenization_utils_fast.py", line 108, in __init__
49
+ raise ValueError(
50
+ ...<2 lines>...
51
+ )
52
+ ValueError: Cannot instantiate this tokenizer from a slow version. If it's based on sentencepiece, make sure you have sentencepiece installed.