11_nn.Non-linear Activations
以ReLu举例,ReLU是NLP常用的激活函数
https://pytorch.org/docs/stable/generated/torch.nn.ReLU.html#torch.nn.ReLU
torch.nn.ReLU(inplace=False)Parameters:inplace – can optionally do the operation in-place. Default: False#及是否原地替换Shape:Input: (*) where * means any number of dimensions.Output: (*) ,same shape as the input.
inplace理解
import torchfrom torch import nnfrom torch.nn import ReLUinput = torch.tensor([[1, -0.5],[-2, 3]])class DEMO(nn.Module):def __init__(self):super(DEMO, self).__init__()self.relu1 = ReLU()def forward(self,x):x = self.relu1(x)return xdemo = DEMO()output = demo(input)print(output)
#以CIFAR10数据集为例import torchimport torchvisionfrom torch import nnfrom torch.nn import ReLU, Sigmoidfrom torch.utils.data import DataLoaderfrom torch.utils.tensorboard import SummaryWriterdataset = torchvision.datasets.CIFAR10('./dataset',train=False,transform=torchvision.transforms.ToTensor(),download=True)dataloader = DataLoader(dataset,batch_size=64)class DEMO(nn.Module):def __init__(self):super(DEMO, self).__init__()self.relu1 = ReLU()self.sigmoid = Sigmoid()def forward(self,x):x = self.sigmoid(x)return xdemo = DEMO()step = 0writer = SummaryWriter('./logs')for data in dataloader:imgs,targets = dataoutput = demo(imgs)writer.add_images('input_sigmoid', imgs, step)writer.add_images('output_sigmoid', output, step)writer.close()
