對於初始化基本概念不清楚的可以點擊
權值初始化的十種方法
Talking is cheap,please give me code!
前題當然是導入pytorch中相應的用於參數初始化的包
from torch.nn import init
方法一
#define the initial function to init the layer's parameters for the network
def weigth_init(m):
if isinstance(m, nn.Conv2d): # 如果模型中是二維卷積層Conv2d那麼就使用xavier_uniform 初始化
init.xavier_uniform_(m.weight.data)
init.constant_(m.bias.data,0.1)
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear): # 如果模型中是全連接層,那麼就使用如下初始化方式
m.weight.data.normal_(0,0.01)
m.bias.data.zero_()
首先定義了一個初始化函數,接着進行調用就ok了,不過要先把網絡模型實例化
#Define Network
model = Net(args.input_channel,args.output_channel)
model.apply(weigth_init)
方法二
def initNetParams(net):
'''Init net parameters.'''
for m in net.modules():
if isinstance(m, nn.Conv2d):
init.xavier_uniform(m.weight)
if m.bias:
init.constant(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
init.constant(m.weight, 1)
init.constant(m.bias, 0)
elif isinstance(m, nn.Linear):
init.normal(m.weight, std=1e-3)
if m.bias:
init.constant(m.bias, 0)
initNetParams(net)
方法三
# Common practise for initialization.
for layer in model.modules():
if isinstance(layer, torch.nn.Conv2d):
torch.nn.init.kaiming_normal_(layer.weight, mode='fan_out',
nonlinearity='relu') # 傳說中的凱明初始化
if layer.bias is not None:
torch.nn.init.constant_(layer.bias, val=0.0)
elif isinstance(layer, torch.nn.BatchNorm2d):
torch.nn.init.constant_(layer.weight, val=1.0)
torch.nn.init.constant_(layer.bias, val=0.0)
elif isinstance(layer, torch.nn.Linear):
torch.nn.init.xavier_normal_(layer.weight)
if layer.bias is not None:
torch.nn.init.constant_(layer.bias, val=0.0)
# Initialization with given tensor.
layer.weight = torch.nn.Parameter(tensor)