Update README.md
Browse files
README.md
CHANGED
|
@@ -34,7 +34,7 @@ language:
|
|
| 34 |
|
| 35 |
|
| 36 |
|
| 37 |
-
# T5-
|
| 38 |
|
| 39 |
This model is a fine-tuned T5 model designed specifically for **automatic question generation** from any given context or passage. It supports different types of questions like **short answer**, **multiple choice question**, and **true or false quesiton**, while also allowing customization by **difficulty level** — easy, medium or hard.
|
| 40 |
|
|
@@ -123,8 +123,9 @@ def format_prompt(qtype, difficulty, context, answer=""):
|
|
| 123 |
from transformers import T5Tokenizer, T5ForConditionalGeneration
|
| 124 |
|
| 125 |
# Load model from Hugging Face Hub
|
| 126 |
-
|
| 127 |
-
|
|
|
|
| 128 |
|
| 129 |
# Format input prompt
|
| 130 |
def format_prompt(qtype, difficulty, context, answer=""):
|
|
@@ -132,6 +133,8 @@ def format_prompt(qtype, difficulty, context, answer=""):
|
|
| 132 |
return f"<extra_id_97>{qtype} <extra_id_98>{difficulty} <extra_id_99>{answer_part} {context}"
|
| 133 |
|
| 134 |
context = "The sun is the center of our solar system."
|
|
|
|
|
|
|
| 135 |
prompt = format_prompt("short answer", "easy", context)
|
| 136 |
|
| 137 |
# Tokenize and generate
|
|
|
|
| 34 |
|
| 35 |
|
| 36 |
|
| 37 |
+
# Finetuned T5-Base Question Generator Model
|
| 38 |
|
| 39 |
This model is a fine-tuned T5 model designed specifically for **automatic question generation** from any given context or passage. It supports different types of questions like **short answer**, **multiple choice question**, and **true or false quesiton**, while also allowing customization by **difficulty level** — easy, medium or hard.
|
| 40 |
|
|
|
|
| 123 |
from transformers import T5Tokenizer, T5ForConditionalGeneration
|
| 124 |
|
| 125 |
# Load model from Hugging Face Hub
|
| 126 |
+
model_name = "Avinash250325/T5BaseQuestionGeneration"
|
| 127 |
+
tokenizer = T5Tokenizer.from_pretrained(model_name)
|
| 128 |
+
model = T5ForConditionalGeneration.from_pretrained(model_name)
|
| 129 |
|
| 130 |
# Format input prompt
|
| 131 |
def format_prompt(qtype, difficulty, context, answer=""):
|
|
|
|
| 133 |
return f"<extra_id_97>{qtype} <extra_id_98>{difficulty} <extra_id_99>{answer_part} {context}"
|
| 134 |
|
| 135 |
context = "The sun is the center of our solar system."
|
| 136 |
+
qtype = "short answer" # qtype: ("short answer", "multiple choice question", "true or false question")
|
| 137 |
+
difficulty = "easy" # difficulty: ("easy", "medium", "hard")
|
| 138 |
prompt = format_prompt("short answer", "easy", context)
|
| 139 |
|
| 140 |
# Tokenize and generate
|