Ashishkr commited on
Commit
d5454c2
1 Parent(s): 0abea0e

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +18 -18
README.md CHANGED
@@ -17,24 +17,24 @@ Training data :
17
  http://dl.fbaipublicfiles.com/rephrasing/rephrasing_dataset.tar.gz
18
 
19
  ``` .py
20
- from transformers import AutoTokenizer, AutoModelWithLMHead
21
- tokenizer = AutoTokenizer.from_pretrained("salesken/natural_rephrase")
22
- model = AutoModelWithLMHead.from_pretrained("salesken/natural_rephrase")
23
-
24
-
25
- Input_query="Hey Siri, Send message to mom to say thank you for the delicious dinner yesterday"
26
- query= Input_query + " ~~ "
27
- input_ids = tokenizer.encode(query.lower(), return_tensors='pt')
28
- sample_outputs = model.generate(input_ids,
29
- do_sample=True,
30
- num_beams=1,
31
- max_length=len(Input_query),
32
- temperature=0.2,
33
- top_k = 10,
34
- num_return_sequences=1)
35
- for i in range(len(sample_outputs)):
36
- result = tokenizer.decode(sample_outputs[i], skip_special_tokens=True).split('||')[0].split('~~')[1]
37
- print(result)
38
 
39
  ```
40
 
 
17
  http://dl.fbaipublicfiles.com/rephrasing/rephrasing_dataset.tar.gz
18
 
19
  ``` .py
20
+ from transformers import AutoTokenizer, AutoModelWithLMHead
21
+ tokenizer = AutoTokenizer.from_pretrained("salesken/natural_rephrase")
22
+ model = AutoModelWithLMHead.from_pretrained("salesken/natural_rephrase")
23
+
24
+
25
+ Input_query="Hey Siri, Send message to mom to say thank you for the delicious dinner yesterday"
26
+ query= Input_query + " ~~ "
27
+ input_ids = tokenizer.encode(query.lower(), return_tensors='pt')
28
+ sample_outputs = model.generate(input_ids,
29
+ do_sample=True,
30
+ num_beams=1,
31
+ max_length=len(Input_query),
32
+ temperature=0.2,
33
+ top_k = 10,
34
+ num_return_sequences=1)
35
+ for i in range(len(sample_outputs)):
36
+ result = tokenizer.decode(sample_outputs[i], skip_special_tokens=True).split('||')[0].split('~~')[1]
37
+ print(result)
38
 
39
  ```
40