KoichiYasuoka
commited on
Commit
•
6cf32b2
1
Parent(s):
e829ec5
bug fix
Browse files
maker.sh
CHANGED
@@ -83,7 +83,7 @@ class MistralForTokenClassification(MistralPreTrainedModel):
|
|
83 |
loss_fct=nn.CrossEntropyLoss()
|
84 |
loss=loss_fct(logits.view(-1,self.num_labels),labels.view(-1))
|
85 |
if not return_dict:
|
86 |
-
output=(logits,)+transformer_outputs[
|
87 |
return ((loss,)+output) if loss is not None else output
|
88 |
return TokenClassifierOutput(loss=loss,logits=logits,hidden_states=transformer_outputs.hidden_states,attentions=transformer_outputs.attentions)
|
89 |
|
|
|
83 |
loss_fct=nn.CrossEntropyLoss()
|
84 |
loss=loss_fct(logits.view(-1,self.num_labels),labels.view(-1))
|
85 |
if not return_dict:
|
86 |
+
output=(logits,)+transformer_outputs[2:]
|
87 |
return ((loss,)+output) if loss is not None else output
|
88 |
return TokenClassifierOutput(loss=loss,logits=logits,hidden_states=transformer_outputs.hidden_states,attentions=transformer_outputs.attentions)
|
89 |
|
upos.py
CHANGED
@@ -71,6 +71,6 @@ class MistralForTokenClassification(MistralPreTrainedModel):
|
|
71 |
loss_fct=nn.CrossEntropyLoss()
|
72 |
loss=loss_fct(logits.view(-1,self.num_labels),labels.view(-1))
|
73 |
if not return_dict:
|
74 |
-
output=(logits,)+transformer_outputs[
|
75 |
return ((loss,)+output) if loss is not None else output
|
76 |
return TokenClassifierOutput(loss=loss,logits=logits,hidden_states=transformer_outputs.hidden_states,attentions=transformer_outputs.attentions)
|
|
|
71 |
loss_fct=nn.CrossEntropyLoss()
|
72 |
loss=loss_fct(logits.view(-1,self.num_labels),labels.view(-1))
|
73 |
if not return_dict:
|
74 |
+
output=(logits,)+transformer_outputs[2:]
|
75 |
return ((loss,)+output) if loss is not None else output
|
76 |
return TokenClassifierOutput(loss=loss,logits=logits,hidden_states=transformer_outputs.hidden_states,attentions=transformer_outputs.attentions)
|