1. """嵌套块访问参数"""
    2. def block1():
    3. return nn.Sequential(nn.Linear(4,8),nn.ReLU(),nn.Linear(8,4),nn.ReLU())
    4. def block2():
    5. net =nn.Sequential()
    6. for i in range(4):
    7. net.add_module(f"block{i}",block1())
    8. return net
    9. rgnet = nn.Sequential(block2(),nn.Linear(4,1))
    10. """Sequential(
    11. (0): Sequential(
    12. (block0): Sequential(
    13. (0): Linear(in_features=4, out_features=8, bias=True)
    14. (1): ReLU()
    15. (2): Linear(in_features=8, out_features=4, bias=True)
    16. (3): ReLU()
    17. )
    18. (block1): Sequential(
    19. (0): Linear(in_features=4, out_features=8, bias=True)
    20. (1): ReLU()
    21. (2): Linear(in_features=8, out_features=4, bias=True)
    22. (3): ReLU()
    23. )
    24. (block2): Sequential(
    25. (0): Linear(in_features=4, out_features=8, bias=True)
    26. (1): ReLU()
    27. (2): Linear(in_features=8, out_features=4, bias=True)
    28. (3): ReLU()
    29. )
    30. (block3): Sequential(
    31. (0): Linear(in_features=4, out_features=8, bias=True)
    32. (1): ReLU()
    33. (2): Linear(in_features=8, out_features=4, bias=True)
    34. (3): ReLU()
    35. )
    36. )
    37. (1): Linear(in_features=4, out_features=1, bias=True)
    38. )
    39. Process finished with exit code 0
    40. """
    1. """内置初始化"""
    2. def init_normal(m):
    3. if type(m)==nn.Linear:
    4. nn.init.normal_(m.weight,mean=0,std=0.01) # nn.init.constant(m.weight,1) 如果全初始化为1 等价于一个神经元
    5. nn.init.zeros_(m.bias)
    6. net.apply(init_normal)
    7. print(net[0].weight.data[0])
    8. # tensor([ 0.0123, 0.0025, -0.0029, 0.0007])