Update modeling_phi.py
Browse files- modeling_phi.py +3 -5
modeling_phi.py
CHANGED
|
@@ -59,8 +59,6 @@ _CONFIG_FOR_DOC = "PhiConfig"
|
|
| 59 |
|
| 60 |
PHI_PRETRAINED_MODEL_ARCHIVE_LIST = [
|
| 61 |
"microsoft/phi-1",
|
| 62 |
-
"microsoft/phi-1_5",
|
| 63 |
-
"microsoft/phi-2",
|
| 64 |
# See all Phi models at https://huggingface.co/models?filter=phi
|
| 65 |
]
|
| 66 |
|
|
@@ -1021,8 +1019,8 @@ class PhiForCausalLM(PhiPreTrainedModel):
|
|
| 1021 |
```python
|
| 1022 |
>>> from transformers import AutoTokenizer, PhiForCausalLM
|
| 1023 |
|
| 1024 |
-
>>> model = PhiForCausalLM.from_pretrained("microsoft/phi-
|
| 1025 |
-
>>> tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-
|
| 1026 |
|
| 1027 |
>>> prompt = "This is an example script ."
|
| 1028 |
>>> inputs = tokenizer(prompt, return_tensors="pt")
|
|
@@ -1030,7 +1028,7 @@ class PhiForCausalLM(PhiPreTrainedModel):
|
|
| 1030 |
>>> # Generate
|
| 1031 |
>>> generate_ids = model.generate(inputs.input_ids, max_length=30)
|
| 1032 |
>>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
|
| 1033 |
-
'This is an example script
|
| 1034 |
```"""
|
| 1035 |
|
| 1036 |
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
|
|
|
|
| 59 |
|
| 60 |
PHI_PRETRAINED_MODEL_ARCHIVE_LIST = [
|
| 61 |
"microsoft/phi-1",
|
|
|
|
|
|
|
| 62 |
# See all Phi models at https://huggingface.co/models?filter=phi
|
| 63 |
]
|
| 64 |
|
|
|
|
| 1019 |
```python
|
| 1020 |
>>> from transformers import AutoTokenizer, PhiForCausalLM
|
| 1021 |
|
| 1022 |
+
>>> model = PhiForCausalLM.from_pretrained("microsoft/phi-1")
|
| 1023 |
+
>>> tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-1")
|
| 1024 |
|
| 1025 |
>>> prompt = "This is an example script ."
|
| 1026 |
>>> inputs = tokenizer(prompt, return_tensors="pt")
|
|
|
|
| 1028 |
>>> # Generate
|
| 1029 |
>>> generate_ids = model.generate(inputs.input_ids, max_length=30)
|
| 1030 |
>>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
|
| 1031 |
+
'This is an example script .\n\n\n\nfrom typing import List\n\ndef find_most_common_letter(words: List[str'
|
| 1032 |
```"""
|
| 1033 |
|
| 1034 |
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
|