Upload meta-llama_Llama-3.2-1B_2.py with huggingface_hub
Browse files
meta-llama_Llama-3.2-1B_2.py
CHANGED
|
@@ -1,9 +1,16 @@
|
|
| 1 |
# /// script
|
| 2 |
# requires-python = ">=3.12"
|
| 3 |
# dependencies = [
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
# "torch",
|
|
|
|
| 5 |
# "torchvision",
|
| 6 |
# "transformers",
|
|
|
|
| 7 |
# "diffusers",
|
| 8 |
# "sentence-transformers",
|
| 9 |
# "accelerate",
|
|
@@ -25,7 +32,7 @@ except Exception as e:
|
|
| 25 |
from slack_sdk import WebClient
|
| 26 |
client = WebClient(token=os.environ['SLACK_TOKEN'])
|
| 27 |
client.chat_postMessage(
|
| 28 |
-
channel='#
|
| 29 |
text='Problem in <https://huggingface.co/datasets/model-metadata/code_execution_files/blob/main/meta-llama_Llama-3.2-1B_2.txt|meta-llama_Llama-3.2-1B_2.txt>',
|
| 30 |
)
|
| 31 |
|
|
|
|
| 1 |
# /// script
|
| 2 |
# requires-python = ">=3.12"
|
| 3 |
# dependencies = [
|
| 4 |
+
# "numpy",
|
| 5 |
+
# "einops",
|
| 6 |
+
# "pandas",
|
| 7 |
+
# "matplotlib",
|
| 8 |
+
# "protobuf",
|
| 9 |
# "torch",
|
| 10 |
+
# "sentencepiece",
|
| 11 |
# "torchvision",
|
| 12 |
# "transformers",
|
| 13 |
+
# "timm",
|
| 14 |
# "diffusers",
|
| 15 |
# "sentence-transformers",
|
| 16 |
# "accelerate",
|
|
|
|
| 32 |
from slack_sdk import WebClient
|
| 33 |
client = WebClient(token=os.environ['SLACK_TOKEN'])
|
| 34 |
client.chat_postMessage(
|
| 35 |
+
channel='#hub-model-metadata-snippets-sprint',
|
| 36 |
text='Problem in <https://huggingface.co/datasets/model-metadata/code_execution_files/blob/main/meta-llama_Llama-3.2-1B_2.txt|meta-llama_Llama-3.2-1B_2.txt>',
|
| 37 |
)
|
| 38 |
|