Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -12,15 +12,17 @@ from rudalle import get_rudalle_model, get_tokenizer, get_vae
|
|
| 12 |
tqdm.__init__ = partialmethod(tqdm.__init__, disable=True)
|
| 13 |
|
| 14 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 15 |
-
translation_model = FSMTForConditionalGeneration.from_pretrained("facebook/wmt19-en-ru", torch_dtype=torch.float16)
|
| 16 |
tokenizer = FSMTTokenizer.from_pretrained("facebook/wmt19-en-ru")
|
| 17 |
-
translation_pipe = pipeline("translation", model=translation_model, tokenizer=tokenizer, device=0)
|
| 18 |
dalle = get_rudalle_model("Malevich", pretrained=True, fp16=True, device=device)
|
| 19 |
tokenizer = get_tokenizer()
|
| 20 |
vae = get_vae().to(device)
|
| 21 |
|
| 22 |
def translation_wrapper(text: str):
|
| 23 |
-
|
|
|
|
|
|
|
|
|
|
| 24 |
|
| 25 |
def dalle_wrapper(prompt: str):
|
| 26 |
top_k, top_p = random.choice([
|
|
|
|
| 12 |
tqdm.__init__ = partialmethod(tqdm.__init__, disable=True)
|
| 13 |
|
| 14 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 15 |
+
translation_model = FSMTForConditionalGeneration.from_pretrained("facebook/wmt19-en-ru", torch_dtype=torch.float16).to(device)
|
| 16 |
tokenizer = FSMTTokenizer.from_pretrained("facebook/wmt19-en-ru")
|
|
|
|
| 17 |
dalle = get_rudalle_model("Malevich", pretrained=True, fp16=True, device=device)
|
| 18 |
tokenizer = get_tokenizer()
|
| 19 |
vae = get_vae().to(device)
|
| 20 |
|
| 21 |
def translation_wrapper(text: str):
|
| 22 |
+
input_ids = tokenizer.encode(text, return_tensors="pt")
|
| 23 |
+
outputs = translation_model.generate(input_ids)
|
| 24 |
+
decoded = tokenizer.decode(outputs[0].float(), skip_special_tokens=True)
|
| 25 |
+
return decoded
|
| 26 |
|
| 27 |
def dalle_wrapper(prompt: str):
|
| 28 |
top_k, top_p = random.choice([
|