CLASS torch.nn.ReLU(inplace=False)
实战:
import torch
from torch import nn
from torch.nn import ReLU
input = torch.tensor([[1, -0.5],
[-1, 3]])
input = torch.reshape(input, (-1, 1, 2, 2))
print(input.shape)
class Peipei(nn.Module):
def __init__(self) -> None:
super(Peipei, self).__init__()
self.relu1 = ReLU()
def forward(self, input):
output = self.relu1(input)
return output
peipei = Peipei()
output = peipei(input)
print(output)
输出:
tensor([[[[1., 0.],
[0., 3.]]]])
sigmoid
import torch
import torchvision
from torch import nn
from torch.nn import ReLU, Sigmoid
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter
dataset = torchvision.datasets.CIFAR10("data", train=False, download=True,
transform=torchvision.transforms.ToTensor())
dataloader = DataLoader(dataset, batch_size=64)
class Peipei(nn.Module):
def __init__(self) -> None:
super(Peipei, self).__init__()
self.sigmoid1 = Sigmoid()
def forward(self, input):
output = self.sigmoid1(input)
return output
peipei = Peipei()
writer = SummaryWriter("logs_sigmoid")
step = 0
# output = peipei(input)
# print(output)
for data in dataloader:
imgs, target = data
output = peipei(imgs)
writer.add_images("input", imgs, step)
writer.add_images("output", output, step)
step = step + 1
writer.close()
输出:
欢迎分享,转载请注明来源:内存溢出
评论列表(0条)