Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -22,7 +22,7 @@ def loading_models(model='roberta-base'):
|
|
22 |
suppress_st_warning=True,
|
23 |
hash_funcs={'tokenizers.Tokenizer': hash_func, 'tokenizers.AddedToken': hash_func})
|
24 |
def infer(text):
|
25 |
-
# global nlp
|
26 |
return nlp(text+' '+nlp.tokenizer.mask_token)
|
27 |
|
28 |
|
@@ -33,8 +33,6 @@ def sim(predicted_seq, sem_list):
|
|
33 |
return semantic_model.encode(predicted_seq, convert_to_tensor=True), \
|
34 |
semantic_model.encode(sem_list, convert_to_tensor=True)
|
35 |
|
36 |
-
|
37 |
-
|
38 |
@st.cache(allow_output_mutation=True,
|
39 |
suppress_st_warning=True,
|
40 |
hash_funcs={'tokenizers.Tokenizer': hash_func, 'tokenizers.AddedToken': hash_func})
|
|
|
22 |
suppress_st_warning=True,
|
23 |
hash_funcs={'tokenizers.Tokenizer': hash_func, 'tokenizers.AddedToken': hash_func})
|
24 |
def infer(text):
|
25 |
+
# global nlp
|
26 |
return nlp(text+' '+nlp.tokenizer.mask_token)
|
27 |
|
28 |
|
|
|
33 |
return semantic_model.encode(predicted_seq, convert_to_tensor=True), \
|
34 |
semantic_model.encode(sem_list, convert_to_tensor=True)
|
35 |
|
|
|
|
|
36 |
@st.cache(allow_output_mutation=True,
|
37 |
suppress_st_warning=True,
|
38 |
hash_funcs={'tokenizers.Tokenizer': hash_func, 'tokenizers.AddedToken': hash_func})
|