Update utils.py
Browse files
utils.py
CHANGED
|
@@ -379,9 +379,10 @@ def process_chatverlauf(prompt, model, oai_key):
|
|
| 379 |
}
|
| 380 |
return headers, payload
|
| 381 |
|
| 382 |
-
def process_chatverlauf_hf(
|
| 383 |
-
|
| 384 |
-
|
|
|
|
| 385 |
return result
|
| 386 |
|
| 387 |
|
|
|
|
| 379 |
}
|
| 380 |
return headers, payload
|
| 381 |
|
| 382 |
+
def process_chatverlauf_hf(history, llm):
|
| 383 |
+
input = generate_prompt_with_history("Gib folgendem Text eine Überschrift mit maximal 3 Worten", history)
|
| 384 |
+
llm_chain = LLMChain(llm = llm, prompt = "")
|
| 385 |
+
result = llm_chain.run({"question": input})
|
| 386 |
return result
|
| 387 |
|
| 388 |
|