ariG23498 HF Staff commited on
Commit
14ea61d
·
verified ·
1 Parent(s): f4e6e4a

Upload black-forest-labs_FLUX.1-Kontext-dev_1.txt with huggingface_hub

Browse files
black-forest-labs_FLUX.1-Kontext-dev_1.txt CHANGED
@@ -1,8 +1,10 @@
1
  ```CODE:
 
2
  from diffusers import DiffusionPipeline
3
  from diffusers.utils import load_image
4
 
5
- pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-Kontext-dev")
 
6
 
7
  prompt = "Turn this cat into a dog"
8
  input_image = load_image("https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/diffusers/cat.png")
@@ -12,29 +14,8 @@ image = pipe(image=input_image, prompt=prompt).images[0]
12
 
13
  ERROR:
14
  Traceback (most recent call last):
15
- File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/transformers/tokenization_utils_base.py", line 2343, in _from_pretrained
16
- tokenizer = cls(*init_inputs, **init_kwargs)
17
- File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/transformers/models/t5/tokenization_t5_fast.py", line 119, in __init__
18
- super().__init__(
19
- ~~~~~~~~~~~~~~~~^
20
- vocab_file=vocab_file,
21
- ^^^^^^^^^^^^^^^^^^^^^^
22
- ...<7 lines>...
23
- **kwargs,
24
- ^^^^^^^^^
25
- )
26
- ^
27
- File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/transformers/tokenization_utils_fast.py", line 108, in __init__
28
- raise ValueError(
29
- ...<2 lines>...
30
- )
31
- ValueError: Cannot instantiate this tokenizer from a slow version. If it's based on sentencepiece, make sure you have sentencepiece installed.
32
-
33
- During handling of the above exception, another exception occurred:
34
-
35
- Traceback (most recent call last):
36
- File "/tmp/black-forest-labs_FLUX.1-Kontext-dev_1JO1im2.py", line 19, in <module>
37
- pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-Kontext-dev")
38
  File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/huggingface_hub/utils/_validators.py", line 114, in _inner_fn
39
  return fn(*args, **kwargs)
40
  File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/diffusers/pipelines/pipeline_utils.py", line 1025, in from_pretrained
@@ -55,13 +36,20 @@ Traceback (most recent call last):
55
  ^^^^^^^^^
56
  )
57
  ^
58
- File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/transformers/tokenization_utils_base.py", line 2344, in _from_pretrained
59
- except import_protobuf_decode_error():
60
- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^
61
- File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/transformers/tokenization_utils_base.py", line 87, in import_protobuf_decode_error
62
- raise ImportError(PROTOBUF_IMPORT_ERROR.format(error_message))
63
- ImportError:
64
- requires the protobuf library but it was not found in your environment. Check out the instructions on the
65
- installation page of its repo: https://github.com/protocolbuffers/protobuf/tree/master/python#installation and follow the ones
66
- that match your environment. Please note that you may need to restart your runtime after installation.
67
-
 
 
 
 
 
 
 
 
1
  ```CODE:
2
+ import torch
3
  from diffusers import DiffusionPipeline
4
  from diffusers.utils import load_image
5
 
6
+ # switch to "mps" for apple devices
7
+ pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-Kontext-dev", dtype=torch.bfloat16, device_map="cuda")
8
 
9
  prompt = "Turn this cat into a dog"
10
  input_image = load_image("https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/diffusers/cat.png")
 
14
 
15
  ERROR:
16
  Traceback (most recent call last):
17
+ File "/tmp/black-forest-labs_FLUX.1-Kontext-dev_1rIMizY.py", line 26, in <module>
18
+ pipe = DiffusionPipeline.from_pretrained("black-forest-labs/FLUX.1-Kontext-dev", dtype=torch.bfloat16, device_map="cuda")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/huggingface_hub/utils/_validators.py", line 114, in _inner_fn
20
  return fn(*args, **kwargs)
21
  File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/diffusers/pipelines/pipeline_utils.py", line 1025, in from_pretrained
 
36
  ^^^^^^^^^
37
  )
38
  ^
39
+ File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/transformers/tokenization_utils_base.py", line 2343, in _from_pretrained
40
+ tokenizer = cls(*init_inputs, **init_kwargs)
41
+ File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/transformers/models/t5/tokenization_t5_fast.py", line 119, in __init__
42
+ super().__init__(
43
+ ~~~~~~~~~~~~~~~~^
44
+ vocab_file=vocab_file,
45
+ ^^^^^^^^^^^^^^^^^^^^^^
46
+ ...<7 lines>...
47
+ **kwargs,
48
+ ^^^^^^^^^
49
+ )
50
+ ^
51
+ File "/tmp/.cache/uv/environments-v2/ca96a1cdc2ecd6c7/lib/python3.13/site-packages/transformers/tokenization_utils_fast.py", line 108, in __init__
52
+ raise ValueError(
53
+ ...<2 lines>...
54
+ )
55
+ ValueError: Cannot instantiate this tokenizer from a slow version. If it's based on sentencepiece, make sure you have sentencepiece installed.