[docs]defxavier_normal_initialization(module):r"""using `xavier_normal_`_ in PyTorch to initialize the parameters in nn.Embedding and nn.Linear layers. For bias in nn.Linear layers, using constant 0 to initialize. .. _`xavier_normal_`: https://pytorch.org/docs/stable/nn.init.html?highlight=xavier_normal_#torch.nn.init.xavier_normal_ Examples: >>> self.apply(xavier_normal_initialization) """ifisinstance(module,nn.Embedding):xavier_normal_(module.weight.data)elifisinstance(module,nn.Linear):xavier_normal_(module.weight.data)ifmodule.biasisnotNone:constant_(module.bias.data,0)
[docs]defxavier_uniform_initialization(module):r"""using `xavier_uniform_`_ in PyTorch to initialize the parameters in nn.Embedding and nn.Linear layers. For bias in nn.Linear layers, using constant 0 to initialize. .. _`xavier_uniform_`: https://pytorch.org/docs/stable/nn.init.html?highlight=xavier_uniform_#torch.nn.init.xavier_uniform_ Examples: >>> self.apply(xavier_uniform_initialization) """ifisinstance(module,nn.Embedding):xavier_uniform_(module.weight.data)elifisinstance(module,nn.Linear):xavier_uniform_(module.weight.data)ifmodule.biasisnotNone:constant_(module.bias.data,0)