add upload notice
Browse files
README.md
CHANGED
|
@@ -32,6 +32,8 @@ quantized_by: Suparious
|
|
| 32 |
---
|
| 33 |
# l3utterfly/mistral-7b-v0.1-layla-v4-chatml AWQ
|
| 34 |
|
|
|
|
|
|
|
| 35 |
- Model creator: [l3utterfly](https://huggingface.co/l3utterfly)
|
| 36 |
- Original model: [mistral-7b-v0.1-layla-v4-chatml](https://huggingface.co/l3utterfly/mistral-7b-v0.1-layla-v4-chatml)
|
| 37 |
|
|
@@ -62,7 +64,7 @@ from awq import AutoAWQForCausalLM
|
|
| 62 |
from transformers import AutoTokenizer, TextStreamer
|
| 63 |
|
| 64 |
model_path = "solidrust/Layla-7B-v4-AWQ"
|
| 65 |
-
system_message = "You are
|
| 66 |
|
| 67 |
# Load model
|
| 68 |
model = AutoAWQForCausalLM.from_quantized(model_path,
|
|
|
|
| 32 |
---
|
| 33 |
# l3utterfly/mistral-7b-v0.1-layla-v4-chatml AWQ
|
| 34 |
|
| 35 |
+
**UPLOAD IN PROGRESS**
|
| 36 |
+
|
| 37 |
- Model creator: [l3utterfly](https://huggingface.co/l3utterfly)
|
| 38 |
- Original model: [mistral-7b-v0.1-layla-v4-chatml](https://huggingface.co/l3utterfly/mistral-7b-v0.1-layla-v4-chatml)
|
| 39 |
|
|
|
|
| 64 |
from transformers import AutoTokenizer, TextStreamer
|
| 65 |
|
| 66 |
model_path = "solidrust/Layla-7B-v4-AWQ"
|
| 67 |
+
system_message = "You are Layla, incarnated as a powerful AI."
|
| 68 |
|
| 69 |
# Load model
|
| 70 |
model = AutoAWQForCausalLM.from_quantized(model_path,
|