ariG23498 HF Staff commited on
Commit
7dc0d56
·
verified ·
1 Parent(s): cc57b45

Upload meta-llama_Llama-3.1-8B-Instruct_1.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. meta-llama_Llama-3.1-8B-Instruct_1.py +10 -30
meta-llama_Llama-3.1-8B-Instruct_1.py CHANGED
@@ -11,24 +11,14 @@
11
  # ///
12
 
13
  try:
14
- # Load model directly
15
- from transformers import AutoTokenizer, AutoModelForCausalLM
16
 
17
- tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-3.1-8B-Instruct")
18
- model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-3.1-8B-Instruct")
19
  messages = [
20
  {"role": "user", "content": "Who are you?"},
21
  ]
22
- inputs = tokenizer.apply_chat_template(
23
- messages,
24
- add_generation_prompt=True,
25
- tokenize=True,
26
- return_dict=True,
27
- return_tensors="pt",
28
- ).to(model.device)
29
-
30
- outputs = model.generate(**inputs, max_new_tokens=40)
31
- print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
32
  with open('meta-llama_Llama-3.1-8B-Instruct_1.txt', 'w', encoding='utf-8') as f:
33
  f.write('Everything was good in meta-llama_Llama-3.1-8B-Instruct_1.txt')
34
  except Exception as e:
@@ -42,27 +32,17 @@ except Exception as e:
42
 
43
  with open('meta-llama_Llama-3.1-8B-Instruct_1.txt', 'a', encoding='utf-8') as f:
44
  import traceback
45
- f.write('''
46
- ```CODE:
47
- # Load model directly
48
- from transformers import AutoTokenizer, AutoModelForCausalLM
49
 
50
- tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-3.1-8B-Instruct")
51
- model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-3.1-8B-Instruct")
52
  messages = [
53
  {"role": "user", "content": "Who are you?"},
54
  ]
55
- inputs = tokenizer.apply_chat_template(
56
- messages,
57
- add_generation_prompt=True,
58
- tokenize=True,
59
- return_dict=True,
60
- return_tensors="pt",
61
- ).to(model.device)
62
-
63
- outputs = model.generate(**inputs, max_new_tokens=40)
64
- print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
65
  ```
 
66
  ERROR:
67
  ''')
68
  traceback.print_exc(file=f)
 
11
  # ///
12
 
13
  try:
14
+ # Use a pipeline as a high-level helper
15
+ from transformers import pipeline
16
 
17
+ pipe = pipeline("text-generation", model="meta-llama/Llama-3.1-8B-Instruct")
 
18
  messages = [
19
  {"role": "user", "content": "Who are you?"},
20
  ]
21
+ pipe(messages)
 
 
 
 
 
 
 
 
 
22
  with open('meta-llama_Llama-3.1-8B-Instruct_1.txt', 'w', encoding='utf-8') as f:
23
  f.write('Everything was good in meta-llama_Llama-3.1-8B-Instruct_1.txt')
24
  except Exception as e:
 
32
 
33
  with open('meta-llama_Llama-3.1-8B-Instruct_1.txt', 'a', encoding='utf-8') as f:
34
  import traceback
35
+ f.write('''```CODE:
36
+ # Use a pipeline as a high-level helper
37
+ from transformers import pipeline
 
38
 
39
+ pipe = pipeline("text-generation", model="meta-llama/Llama-3.1-8B-Instruct")
 
40
  messages = [
41
  {"role": "user", "content": "Who are you?"},
42
  ]
43
+ pipe(messages)
 
 
 
 
 
 
 
 
 
44
  ```
45
+
46
  ERROR:
47
  ''')
48
  traceback.print_exc(file=f)