ojasaar commited on
Commit
420dbf6
1 Parent(s): fd6366e

Update readme

Browse files
Files changed (2) hide show
  1. .gitattributes +1 -0
  2. README.md +50 -0
.gitattributes CHANGED
@@ -6,3 +6,4 @@
6
  *.tar.gz filter=lfs diff=lfs merge=lfs -text
7
  *.ot filter=lfs diff=lfs merge=lfs -text
8
  *.onnx filter=lfs diff=lfs merge=lfs -text
 
 
6
  *.tar.gz filter=lfs diff=lfs merge=lfs -text
7
  *.ot filter=lfs diff=lfs merge=lfs -text
8
  *.onnx filter=lfs diff=lfs merge=lfs -text
9
+ *.pt filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -13,6 +13,9 @@ datasets:
13
  - cnn_dailymail
14
  metrics:
15
  - f1
 
 
 
16
  ---
17
  # T5 Base with QA + Summary + Emotion
18
 
@@ -32,6 +35,8 @@ Summarisation and emotion detection has not been evaluated yet.
32
 
33
  ### Question answering
34
 
 
 
35
  ```python
36
  from transformers import T5ForConditionalGeneration, T5Tokenizer
37
  model = T5ForConditionalGeneration.from_pretrained("kiri-ai/t5-base-qa-summary-emotion")
@@ -54,8 +59,24 @@ context = "Elon Musk left OpenAI to avoid possible future conflicts with his rol
54
  print(get_answer("Why not?", [("Does Elon Musk still work with OpenAI", "No")], context)) # to avoid possible future conflicts with his role as CEO of Tesla
55
  ```
56
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
  ### Summarisation
58
 
 
 
59
  ```python
60
  from transformers import T5ForConditionalGeneration, T5Tokenizer
61
  model = T5ForConditionalGeneration.from_pretrained("kiri-ai/t5-base-qa-summary-emotion")
@@ -69,8 +90,21 @@ def summary(context):
69
  return tokenizer.decode(tokens[0], skip_special_tokens=True)
70
  ```
71
 
 
 
 
 
 
 
 
 
 
 
 
72
  ### Emotion detection
73
 
 
 
74
  ```python
75
  from transformers import T5ForConditionalGeneration, T5Tokenizer
76
  model = T5ForConditionalGeneration.from_pretrained("kiri-ai/t5-base-qa-summary-emotion")
@@ -83,3 +117,19 @@ def emotion(context):
83
  attention_mask=features['attention_mask'], max_length=64)
84
  return tokenizer.decode(tokens[0], skip_special_tokens=True)
85
  ```
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  - cnn_dailymail
14
  metrics:
15
  - f1
16
+ widget:
17
+ - text: "q: Who is Elon Musk? a: an entrepreneur q: When was he born? c: Elon Musk is an entrepreneur born in 1971."
18
+ - text: "emotion: I hope this works!"
19
  ---
20
  # T5 Base with QA + Summary + Emotion
21
 
 
35
 
36
  ### Question answering
37
 
38
+ #### With Transformers
39
+
40
  ```python
41
  from transformers import T5ForConditionalGeneration, T5Tokenizer
42
  model = T5ForConditionalGeneration.from_pretrained("kiri-ai/t5-base-qa-summary-emotion")
 
59
  print(get_answer("Why not?", [("Does Elon Musk still work with OpenAI", "No")], context)) # to avoid possible future conflicts with his role as CEO of Tesla
60
  ```
61
 
62
+ #### With Kiri
63
+
64
+ ```python
65
+ from kiri.models import T5QASummaryEmotion
66
+
67
+ context = "Elon Musk left OpenAI to avoid possible future conflicts with his role as CEO of Tesla."
68
+ prev_qa = [("Does Elon Musk still work with OpenAI", "No")]
69
+ model = T5QASummaryEmotion()
70
+
71
+ # Leave prev_qa blank for non conversational question-answering
72
+ model.qa("Why not?", context, prev_qa=prev_qa)
73
+ > "to avoid possible future conflicts with his role as CEO of Tesla"
74
+ ```
75
+
76
  ### Summarisation
77
 
78
+ #### With Transformers
79
+
80
  ```python
81
  from transformers import T5ForConditionalGeneration, T5Tokenizer
82
  model = T5ForConditionalGeneration.from_pretrained("kiri-ai/t5-base-qa-summary-emotion")
 
90
  return tokenizer.decode(tokens[0], skip_special_tokens=True)
91
  ```
92
 
93
+ #### With Kiri
94
+
95
+ ```python
96
+ from kiri.models import T5QASummaryEmotion
97
+
98
+ model = T5QASummaryEmotion()
99
+
100
+ model.summarise("Long text to summarise")
101
+ > "Short summary of long text"
102
+ ```
103
+
104
  ### Emotion detection
105
 
106
+ #### With Transformers
107
+
108
  ```python
109
  from transformers import T5ForConditionalGeneration, T5Tokenizer
110
  model = T5ForConditionalGeneration.from_pretrained("kiri-ai/t5-base-qa-summary-emotion")
 
117
  attention_mask=features['attention_mask'], max_length=64)
118
  return tokenizer.decode(tokens[0], skip_special_tokens=True)
119
  ```
120
+
121
+ #### With Kiri
122
+
123
+ ```python
124
+ from kiri.models import T5QASummaryEmotion
125
+
126
+ model = T5QASummaryEmotion()
127
+
128
+ model.emotion("I hope this works!")
129
+ > "optimism"
130
+ ```
131
+
132
+ ## About us
133
+
134
+ Kiri makes using state-of-the-art models easy, accessible and scalable.
135
+ [Website](https://kiri.ai) | [Natural Language Engine](https://github.com/kiri-ai/kiri)