DaryaTereshchenko commited on
Commit
6b233a1
1 Parent(s): 00ed639

fix readme

Browse files
Files changed (1) hide show
  1. README.md +4 -4
README.md CHANGED
@@ -73,22 +73,22 @@ from transformers import PrismForConditionalGeneration, PrismTokenizer
73
  uk_text = "Життя як коробка шоколаду"
74
  ja_text = "人生はチョコレートの箱のようなもの。"
75
 
76
- model = PrismForConditionalGeneration.from_pretrained("facebook/prism")
77
- tokenizer = PrismTokenizer.from_pretrained("facebook/prism")
78
 
79
  # Translate Ukrainian to French
80
  tokenizer.src_lang = "uk"
81
  encoded_uk = tokenizer(uk_text, return_tensors="pt")
82
  generated_tokens = model.generate(**encoded_uk, forced_bos_token_id=tokenizer.get_lang_id("fr"), max_new_tokens=20)
83
  print(tokenizer.batch_decode(generated_tokens, skip_special_tokens=True))
84
- # => '<fr> La vie comme une boîte de chocolat.'
85
 
86
  # Translate Japanese to English
87
  tokenizer.src_lang = "ja"
88
  encoded_ja = tokenizer(ja_text, return_tensors="pt")
89
  generated_tokens = model.generate(**encoded_ja, forced_bos_token_id=tokenizer.get_lang_id("en"), max_new_tokens=20)
90
  print(tokenizer.batch_decode(generated_tokens, skip_special_tokens=True))
91
- # => '<en> Life is like a box of chocolate.'
92
  ```
93
 
94
  ## Languages Covered
 
73
  uk_text = "Життя як коробка шоколаду"
74
  ja_text = "人生はチョコレートの箱のようなもの。"
75
 
76
+ model = PrismForConditionalGeneration.from_pretrained("dariast/prism")
77
+ tokenizer = PrismTokenizer.from_pretrained("dariast/prism")
78
 
79
  # Translate Ukrainian to French
80
  tokenizer.src_lang = "uk"
81
  encoded_uk = tokenizer(uk_text, return_tensors="pt")
82
  generated_tokens = model.generate(**encoded_uk, forced_bos_token_id=tokenizer.get_lang_id("fr"), max_new_tokens=20)
83
  print(tokenizer.batch_decode(generated_tokens, skip_special_tokens=True))
84
+ # => 'La vie comme une boîte de chocolat.'
85
 
86
  # Translate Japanese to English
87
  tokenizer.src_lang = "ja"
88
  encoded_ja = tokenizer(ja_text, return_tensors="pt")
89
  generated_tokens = model.generate(**encoded_ja, forced_bos_token_id=tokenizer.get_lang_id("en"), max_new_tokens=20)
90
  print(tokenizer.batch_decode(generated_tokens, skip_special_tokens=True))
91
+ # => 'Life is like a box of chocolate.'
92
  ```
93
 
94
  ## Languages Covered