"""嵌套块访问参数"""
def block1():
return nn.Sequential(nn.Linear(4,8),nn.ReLU(),nn.Linear(8,4),nn.ReLU())
def block2():
net =nn.Sequential()
for i in range(4):
net.add_module(f"block{i}",block1())
return net
rgnet = nn.Sequential(block2(),nn.Linear(4,1))
"""Sequential(
(0): Sequential(
(block0): Sequential(
(0): Linear(in_features=4, out_features=8, bias=True)
(1): ReLU()
(2): Linear(in_features=8, out_features=4, bias=True)
(3): ReLU()
)
(block1): Sequential(
(0): Linear(in_features=4, out_features=8, bias=True)
(1): ReLU()
(2): Linear(in_features=8, out_features=4, bias=True)
(3): ReLU()
)
(block2): Sequential(
(0): Linear(in_features=4, out_features=8, bias=True)
(1): ReLU()
(2): Linear(in_features=8, out_features=4, bias=True)
(3): ReLU()
)
(block3): Sequential(
(0): Linear(in_features=4, out_features=8, bias=True)
(1): ReLU()
(2): Linear(in_features=8, out_features=4, bias=True)
(3): ReLU()
)
)
(1): Linear(in_features=4, out_features=1, bias=True)
)
Process finished with exit code 0
"""
"""内置初始化"""
def init_normal(m):
if type(m)==nn.Linear:
nn.init.normal_(m.weight,mean=0,std=0.01) # nn.init.constant(m.weight,1) 如果全初始化为1 等价于一个神经元
nn.init.zeros_(m.bias)
net.apply(init_normal)
print(net[0].weight.data[0])
# tensor([ 0.0123, 0.0025, -0.0029, 0.0007])