Change caching type for flair tagger
Browse files
app.py
CHANGED
|
@@ -43,8 +43,10 @@ def get_spacy():
|
|
| 43 |
return nlp
|
| 44 |
|
| 45 |
|
| 46 |
-
#
|
| 47 |
-
|
|
|
|
|
|
|
| 48 |
def get_flair_tagger():
|
| 49 |
return SequenceTagger.load("flair/ner-english-ontonotes-fast")
|
| 50 |
|
|
|
|
| 43 |
return nlp
|
| 44 |
|
| 45 |
|
| 46 |
+
#TODO: might look into which one is the best here
|
| 47 |
+
#TODO: might be useful to make an ml6 preloaded model for flair as this takes ridiculously long to load the first time
|
| 48 |
+
@st.experimental_singleton
|
| 49 |
+
#@st.cache(suppress_st_warning=True, allow_output_mutation=True)
|
| 50 |
def get_flair_tagger():
|
| 51 |
return SequenceTagger.load("flair/ner-english-ontonotes-fast")
|
| 52 |
|