Commit
·
1990ffe
1
Parent(s):
b0e4b09
remove chat handler
Browse files
app.py
CHANGED
|
@@ -24,14 +24,8 @@ class MyModel:
|
|
| 24 |
min_p,
|
| 25 |
):
|
| 26 |
if model != self.current_model or self.current_model is None:
|
| 27 |
-
chat_handler = MoondreamChatHandler.from_pretrained(
|
| 28 |
-
repo_id="lab2-as/lora_model_gguf",
|
| 29 |
-
filename="*mmproj*",
|
| 30 |
-
)
|
| 31 |
client = Llama.from_pretrained(
|
| 32 |
repo_id="lab2-as/lora_model_gguf",
|
| 33 |
-
filename="*text-model*",
|
| 34 |
-
chat_handler=chat_handler,
|
| 35 |
n_ctx=2048, # n_ctx should be increased to accommodate the image embedding
|
| 36 |
)
|
| 37 |
|
|
|
|
| 24 |
min_p,
|
| 25 |
):
|
| 26 |
if model != self.current_model or self.current_model is None:
|
|
|
|
|
|
|
|
|
|
|
|
|
| 27 |
client = Llama.from_pretrained(
|
| 28 |
repo_id="lab2-as/lora_model_gguf",
|
|
|
|
|
|
|
| 29 |
n_ctx=2048, # n_ctx should be increased to accommodate the image embedding
|
| 30 |
)
|
| 31 |
|