File size: 189 Bytes
2631d60
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
# @package _global_
# just used for debugging or when we just want to populate the cache
# and do not care about training.

transformer_lm:
  dim: 64
  num_heads: 2
  num_layers: 2