labml_nn/normalization/deep_norm/experiment.ipynb
This is an experiment training Shakespeare dataset with a deep transformer using DeepNorm.
!pip install labml-nn --quiet
from labml import experiment
from labml_nn.normalization.deep_norm.experiment import Configs
experiment.create(name="deep_norm", writers={'screen'})
conf = Configs()
Set experiment configurations and assign a configurations dictionary to override configurations
experiment.configs(conf, {
# Use character level tokenizer
'tokenizer': 'character',
# Prompt separator is blank
'prompt_separator': '',
# Starting prompt for sampling
'prompt': 'It is ',
# Use Tiny Shakespeare dataset
'text': 'tiny_shakespeare',
# Use a context size of $256$
'seq_len': 256,
# Train for 32 epochs
'epochs': 32,
# Batch size $16$
'batch_size': 16,
# Switch between training and validation for $10$ times per epoch
'inner_iterations': 10,
# Adam optimizer with no warmup
'optimizer.optimizer': 'Adam',
'optimizer.learning_rate': 3e-4,
})
Set PyTorch models for loading and saving
experiment.add_pytorch_models({'model': conf.model})
# Start the experiment
with experiment.start():
conf.run()