Update README.md
Browse files
README.md
CHANGED
|
@@ -32,16 +32,29 @@ For CUDA:
|
|
| 32 |
|
| 33 |
```bash
|
| 34 |
# Download the model directly using the Hugging Face CLI
|
| 35 |
-
huggingface-cli download onnxruntime/DeepSeek-R1-Distill-ONNX --include 'deepseek-r1-distill-qwen-1.5B/
|
| 36 |
|
| 37 |
# Install the CUDA package of ONNX Runtime GenAI
|
| 38 |
pip install onnxruntime-genai-cuda
|
| 39 |
|
| 40 |
# Please adjust the model directory (-m) accordingly
|
| 41 |
curl -o https://raw.githubusercontent.com/microsoft/onnxruntime-genai/refs/heads/main/examples/python/model-chat.py
|
| 42 |
-
python model-chat.py -m /path/to/
|
| 43 |
```
|
| 44 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 45 |
|
| 46 |
|
| 47 |
## ONNX Models
|
|
|
|
| 32 |
|
| 33 |
```bash
|
| 34 |
# Download the model directly using the Hugging Face CLI
|
| 35 |
+
huggingface-cli download onnxruntime/DeepSeek-R1-Distill-ONNX --include 'deepseek-r1-distill-qwen-1.5B/gpu/*' --local-dir .
|
| 36 |
|
| 37 |
# Install the CUDA package of ONNX Runtime GenAI
|
| 38 |
pip install onnxruntime-genai-cuda
|
| 39 |
|
| 40 |
# Please adjust the model directory (-m) accordingly
|
| 41 |
curl -o https://raw.githubusercontent.com/microsoft/onnxruntime-genai/refs/heads/main/examples/python/model-chat.py
|
| 42 |
+
python model-chat.py -m /path/to/gpu-int4-rtn-block-32/ -e cuda --chat_template "<|begin▁of▁sentence|><|User|>{input}<|Assistant|>"
|
| 43 |
```
|
| 44 |
|
| 45 |
+
For DirectML:
|
| 46 |
+
|
| 47 |
+
```bash
|
| 48 |
+
# Download the model directly using the Hugging Face CLI
|
| 49 |
+
huggingface-cli download onnxruntime/DeepSeek-R1-Distill-ONNX --include 'deepseek-r1-distill-qwen-1.5B/gpu/*' --local-dir .
|
| 50 |
+
|
| 51 |
+
# Install the DirectML package of ONNX Runtime GenAI
|
| 52 |
+
pip install onnxruntime-genai-directml
|
| 53 |
+
|
| 54 |
+
# Please adjust the model directory (-m) accordingly
|
| 55 |
+
curl -o https://raw.githubusercontent.com/microsoft/onnxruntime-genai/refs/heads/main/examples/python/model-chat.py
|
| 56 |
+
python model-chat.py -m /path/to/gpu-int4-rtn-block-32/ -e dml --chat_template "<|begin▁of▁sentence|><|User|>{input}<|Assistant|>"
|
| 57 |
+
```
|
| 58 |
|
| 59 |
|
| 60 |
## ONNX Models
|