11_nn.Non-linear Activations

以ReLu举例,ReLU是NLP常用的激活函数

https://pytorch.org/docs/stable/generated/torch.nn.ReLU.html#torch.nn.ReLU

  1. torch.nn.ReLU(inplace=False)
  2. Parameters:
  3. inplace can optionally do the operation in-place. Default: False
  4. #及是否原地替换
  5. Shape:
  6. Input: (*) where * means any number of dimensions.
  7. Output: (*) ,same shape as the input.

inplace理解

  1. import torch
  2. from torch import nn
  3. from torch.nn import ReLU
  4. input = torch.tensor([[1, -0.5],
  5. [-2, 3]])
  6. class DEMO(nn.Module):
  7. def __init__(self):
  8. super(DEMO, self).__init__()
  9. self.relu1 = ReLU()
  10. def forward(self,x):
  11. x = self.relu1(x)
  12. return x
  13. demo = DEMO()
  14. output = demo(input)
  15. print(output)
  1. #以CIFAR10数据集为例
  2. import torch
  3. import torchvision
  4. from torch import nn
  5. from torch.nn import ReLU, Sigmoid
  6. from torch.utils.data import DataLoader
  7. from torch.utils.tensorboard import SummaryWriter
  8. dataset = torchvision.datasets.CIFAR10('./dataset',train=False,transform=torchvision.transforms.ToTensor(),
  9. download=True)
  10. dataloader = DataLoader(dataset,batch_size=64)
  11. class DEMO(nn.Module):
  12. def __init__(self):
  13. super(DEMO, self).__init__()
  14. self.relu1 = ReLU()
  15. self.sigmoid = Sigmoid()
  16. def forward(self,x):
  17. x = self.sigmoid(x)
  18. return x
  19. demo = DEMO()
  20. step = 0
  21. writer = SummaryWriter('./logs')
  22. for data in dataloader:
  23. imgs,targets = data
  24. output = demo(imgs)
  25. writer.add_images('input_sigmoid', imgs, step)
  26. writer.add_images('output_sigmoid', output, step)
  27. writer.close()