Try out caching some functions to solve spaces issue
Browse files
app.py
CHANGED
|
@@ -28,8 +28,24 @@ from transformers import pipeline
|
|
| 28 |
import os
|
| 29 |
from transformers_interpret import SequenceClassificationExplainer
|
| 30 |
|
|
|
|
| 31 |
# USE_model = hub.load("https://tfhub.dev/google/universal-sentence-encoder/4")
|
| 32 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
|
| 34 |
# Page setup
|
| 35 |
st.set_page_config(
|
|
@@ -85,23 +101,16 @@ def display_summary(article_name: str):
|
|
| 85 |
return HTML_WRAPPER.format(soup)
|
| 86 |
|
| 87 |
|
| 88 |
-
##@st.cache(hash_funcs={preshed.maps.PreshMap: my_hash_func})
|
| 89 |
-
def get_spacy():
|
| 90 |
-
nlp = spacy.load('en_core_web_lg')
|
| 91 |
-
return nlp
|
| 92 |
|
| 93 |
|
| 94 |
-
|
| 95 |
-
|
| 96 |
-
def get_flair_tagger():
|
| 97 |
-
tagger = SequenceTagger.load("flair/ner-english-ontonotes-fast")
|
| 98 |
-
return tagger
|
| 99 |
|
| 100 |
|
| 101 |
def get_all_entities_per_sentence(text):
|
| 102 |
# load all NER models
|
| 103 |
-
nlp = get_spacy()
|
| 104 |
-
tagger = get_flair_tagger()
|
| 105 |
doc = nlp(text)
|
| 106 |
|
| 107 |
sentences = list(doc.sents)
|
|
@@ -186,7 +195,7 @@ def render_dependency_parsing(text: str):
|
|
| 186 |
|
| 187 |
# If deps for article: True, otherwise deps for summary calc
|
| 188 |
def check_dependency(article: bool):
|
| 189 |
-
nlp = spacy.load('en_core_web_lg')
|
| 190 |
if article:
|
| 191 |
text = st.session_state.article_text
|
| 192 |
all_entities = get_all_entities_per_sentence(text)
|
|
@@ -266,6 +275,10 @@ metric, indicating the trustworthiness of the generated summary. Throughout this
|
|
| 266 |
results for some methods on specific examples. These text blocks will be indicated and they change according to the
|
| 267 |
currently selected article.""")
|
| 268 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 269 |
# GENERATING SUMMARIES PART
|
| 270 |
st.header("Generating summaries")
|
| 271 |
st.markdown("Let’s start by selecting an article text for which we want to generate a summary, or you can provide "
|
|
|
|
| 28 |
import os
|
| 29 |
from transformers_interpret import SequenceClassificationExplainer
|
| 30 |
|
| 31 |
+
|
| 32 |
# USE_model = hub.load("https://tfhub.dev/google/universal-sentence-encoder/4")
|
| 33 |
+
|
| 34 |
+
@st.experimental_singleton
|
| 35 |
+
def get_sentence_embedding_model():
|
| 36 |
+
return SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
|
| 37 |
+
|
| 38 |
+
@st.experimental_singleton
|
| 39 |
+
def get_spacy():
|
| 40 |
+
nlp = spacy.load('en_core_web_lg')
|
| 41 |
+
return nlp
|
| 42 |
+
|
| 43 |
+
@st.experimental_singleton
|
| 44 |
+
def get_flair_tagger():
|
| 45 |
+
tagger = SequenceTagger.load("flair/ner-english-ontonotes-fast")
|
| 46 |
+
return tagger
|
| 47 |
+
|
| 48 |
+
|
| 49 |
|
| 50 |
# Page setup
|
| 51 |
st.set_page_config(
|
|
|
|
| 101 |
return HTML_WRAPPER.format(soup)
|
| 102 |
|
| 103 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 104 |
|
| 105 |
|
| 106 |
+
|
| 107 |
+
|
|
|
|
|
|
|
|
|
|
| 108 |
|
| 109 |
|
| 110 |
def get_all_entities_per_sentence(text):
|
| 111 |
# load all NER models
|
| 112 |
+
#nlp = get_spacy()
|
| 113 |
+
#tagger = get_flair_tagger()
|
| 114 |
doc = nlp(text)
|
| 115 |
|
| 116 |
sentences = list(doc.sents)
|
|
|
|
| 195 |
|
| 196 |
# If deps for article: True, otherwise deps for summary calc
|
| 197 |
def check_dependency(article: bool):
|
| 198 |
+
#nlp = spacy.load('en_core_web_lg')
|
| 199 |
if article:
|
| 200 |
text = st.session_state.article_text
|
| 201 |
all_entities = get_all_entities_per_sentence(text)
|
|
|
|
| 275 |
results for some methods on specific examples. These text blocks will be indicated and they change according to the
|
| 276 |
currently selected article.""")
|
| 277 |
|
| 278 |
+
tagger = get_flair_tagger()
|
| 279 |
+
sentence_embedding_model = get_sentence_embedding_model()
|
| 280 |
+
nlp = get_spacy()
|
| 281 |
+
|
| 282 |
# GENERATING SUMMARIES PART
|
| 283 |
st.header("Generating summaries")
|
| 284 |
st.markdown("Let’s start by selecting an article text for which we want to generate a summary, or you can provide "
|