Rotary Positional Embeddings (RoPE) Experiment

This is an annotated PyTorch experiment to train a transformer model with Rotary Positional Embeddings (RoPE).

12from labml import experiment
13from labml.configs import calculate
14from labml_nn.transformers import TransformerConfigs
15from labml_nn.transformers.rope.experiment import Configs as RoPEConfigs

Rotary PE attention

20class Configs(RoPEConfigs):  # , ArithmeticAutoregression):
21    pass
24def _rotary_value_pe_mha(c: TransformerConfigs):
25    from labml_nn.transformers.rope.value_pe import RotaryValuePEMultiHeadAttention
26    return RotaryValuePEMultiHeadAttention(c.n_heads, c.d_model, 1., 1.)

Configuration options

30calculate(TransformerConfigs.encoder_attn, 'rotary_value', _rotary_value_pe_mha)
31calculate(TransformerConfigs.decoder_attn, 'rotary_value', _rotary_value_pe_mha)
32calculate(TransformerConfigs.decoder_mem_attn, 'rotary_value', _rotary_value_pe_mha)
35def main():

Create experiment

37    experiment.create(name="rotary_shakespeare", comment="rotary value", writers={'screen', 'labml'})

Create configs

39    conf = Configs()

Override configurations

41    experiment.configs(conf, {

No fixed positional embeddings

43        'transformer.src_embed': 'no_pos',
44        'transformer.tgt_embed': 'no_pos',

Encoder with RoPE

47        'transformer.encoder_attn': 'rotary_value',

'transformer.encoder_attn': 'rotary',

51        'model': 'rotary_pe_transformer',

Use character level tokenizer

54        'tokenizer': 'character',

Prompt separator is blank

56        'prompt_separator': '',

Starting prompt for sampling

58        'prompt': 'It is ',

Use Tiny Shakespeare dataset

60        'text': 'tiny_shakespeare',

Use a context size of

63        'seq_len': 512,

Train for 32 epochs

65        'epochs': 24,

Batch size

67        'batch_size': 16,

Switch between training and validation for times per epoch

70        'inner_iterations': 4,

Model size

73        'd_model': 128,
74        'transformer.ffn.d_ff': 512,
75        'transformer.n_heads': 4,
76        'transformer.dropout': 0.0,
79        'optimizer.optimizer': 'Adam',
80        'optimizer.learning_rate': 2.5e-4,
81
82        'dataloader_shuffle_with_replacement': True
83    })

Set models for saving and loading

86    experiment.add_pytorch_models({'model': conf.model})

Start the experiment

89    with experiment.start():

Run training

91        conf.run()

95if __name__ == '__main__':
96    main()