Spaces:
Sleeping
Sleeping
raj-tomar001
commited on
Commit
•
f842376
1
Parent(s):
6a09a4b
Update app.py
Browse files
app.py
CHANGED
@@ -2,13 +2,13 @@ import gradio as gr
|
|
2 |
from transformers import DebertaTokenizer, DebertaForSequenceClassification
|
3 |
from transformers import pipeline
|
4 |
|
5 |
-
save_path_abstract = '
|
6 |
model_abstract = DebertaForSequenceClassification.from_pretrained(save_path_abstract)
|
7 |
tokenizer_abstract = DebertaTokenizer.from_pretrained(save_path_abstract)
|
8 |
|
9 |
classifier_abstract = pipeline('text-classification', model=model_abstract, tokenizer=tokenizer_abstract)
|
10 |
|
11 |
-
save_path_essay = '
|
12 |
model_essay = DebertaForSequenceClassification.from_pretrained(save_path_essay)
|
13 |
tokenizer_essay = DebertaTokenizer.from_pretrained(save_path_essay)
|
14 |
|
|
|
2 |
from transformers import DebertaTokenizer, DebertaForSequenceClassification
|
3 |
from transformers import pipeline
|
4 |
|
5 |
+
save_path_abstract = './fine-tuned-deberta'
|
6 |
model_abstract = DebertaForSequenceClassification.from_pretrained(save_path_abstract)
|
7 |
tokenizer_abstract = DebertaTokenizer.from_pretrained(save_path_abstract)
|
8 |
|
9 |
classifier_abstract = pipeline('text-classification', model=model_abstract, tokenizer=tokenizer_abstract)
|
10 |
|
11 |
+
save_path_essay = './fine-tuned-deberta'
|
12 |
model_essay = DebertaForSequenceClassification.from_pretrained(save_path_essay)
|
13 |
tokenizer_essay = DebertaTokenizer.from_pretrained(save_path_essay)
|
14 |
|