Spaces:
Runtime error
Runtime error
Fix generate process
Browse files- spanish_medica_llm.py +3 -2
spanish_medica_llm.py
CHANGED
|
@@ -737,8 +737,9 @@ def generate_response(query):
|
|
| 737 |
tokenizer = loadSpanishTokenizer()
|
| 738 |
model = loadBaseModel(HUB_MODEL_ID)
|
| 739 |
|
| 740 |
-
system = f"[INST]\nYou are a helpful coding assistant.[/INST]\n"
|
| 741 |
-
|
|
|
|
| 742 |
print(prompt)
|
| 743 |
inputs = tokenizer(prompt, return_tensors="pt")
|
| 744 |
input_ids = inputs["input_ids"].to("cuda")
|
|
|
|
| 737 |
tokenizer = loadSpanishTokenizer()
|
| 738 |
model = loadBaseModel(HUB_MODEL_ID)
|
| 739 |
|
| 740 |
+
#system = f"[INST]\nYou are a helpful coding assistant.[/INST]\n"
|
| 741 |
+
query = get_chat_format({'raw_text':query, 'topic':''})
|
| 742 |
+
prompt = tokenizer.apply_chat_template(query, tokenize=False)
|
| 743 |
print(prompt)
|
| 744 |
inputs = tokenizer(prompt, return_tensors="pt")
|
| 745 |
input_ids = inputs["input_ids"].to("cuda")
|