Below is my code I have to go with the same channels but when concatenating the data while upsampling, I am getting an error.
class Unet(nn.Module):
def __init__(self):
super(Unet,self).__init__()
self.conv1=nn.Sequential(
nn.Conv1d(1,8,3,padding=1),
nn.ReLU(),
nn.Conv1d(8,8,15,padding=1),
nn.ReLU(),
nn.MaxPool1d(kernel_size=(2))
)
self.conv2=nn.Sequential(
nn.Conv1d(8,16,3,padding=1),
nn.ReLU(),
nn.Conv1d(16,16,15,padding=1),
nn.ReLU(),
nn.MaxPool1d(kernel_size=(2))
)
self.conv3=nn.Sequential(
nn.Conv1d(16,32,3,padding=1),
nn.ReLU(),
nn.Conv1d(32,32,15,padding=1),
nn.ReLU(),
nn.MaxPool1d(kernel_size=(2))
)
self.conv4=nn.Sequential(
nn.Conv1d(32,64,3,padding=1),
nn.ReLU(),
nn.Conv1d(64,64,15,padding=1),
nn.ReLU(),
nn.MaxPool1d(kernel_size=(2))
)
self.conv5=nn.Sequential(
nn.Conv1d(64,128,3,padding=1),
nn.ReLU(),
)
self.upconv1=nn.Sequential(
nn.Conv1d(128,64,3,padding=1),
nn.ReLU(),
nn.Conv1d(64,64,3,padding=1),
nn.ReLU(),
nn.Upsample(scale_factor=8)
)
self.upconv2=nn.Sequential(
nn.Conv1d(64,32,3,padding=1),
nn.ReLU(),
nn.Conv1d(32,32,3,padding=1),
nn.ReLU(),
nn.Upsample(scale_factor=8)
)
self.upconv3=nn.Sequential(
nn.Conv1d(32,16,3,padding=1),
nn.ReLU(),
nn.Conv1d(16,16,3,padding=1),
nn.ReLU(),
nn.Upsample(scale_factor=8)
)
self.upconv4=nn.Sequential(
nn.Conv1d(16,8,3,padding=1),
nn.ReLU(),
nn.Conv1d(8,8,3,padding=1),
nn.ReLU(),
nn.Upsample(scale_factor=8)
)
self.upconv5=nn.Sequential(
nn.Conv1d(8,1,3,padding=1),
nn.ReLU()
)
def forward(self,x):
conv1=self.conv1(x)
conv2=self.conv2(conv1)
conv3=self.conv3(conv2)
conv4=self.conv4(conv3)
conv5=self.conv5(conv4)
upconv1=self.upconv1(conv5)
upconv1=torch.cat((upconv1,conv4),dim=1)
upconv2=self.upconv2(upconv1)
upconv2=torch.cat((upconv2,conv3),dim=1)
upconv3=self.upconv3(upconv2)
upconv3=torch.cat((upconv3,conv2),dim=1)
upconv4=self.upconv4(upconv3)
upconv4=torch.cat((upconv4,conv1),dim=1)
upconv5=self.upconv5(conv1)
return upconv5
Below is my error. When the first upsampling with concatenating is occurring it throws the error. I tried a lot but not getting what was wrong with my code. Also my given input is in the shape of [1,1,1400].
Input In [27], in Unet. forward(self, x)
74 conv5=self.conv5(conv4)
76 upconv1=self.upconv1(conv5)
---> 77 upconv1=torch.cat((upconv1,conv4),dim=1)
79 upconv2=self.upconv2(upconv1)
80 upconv2=torch.cat((upconv2,conv3),dim=1)
RuntimeError: Sizes of tensors must match except in dimension 1. Expected size 608 but got size 76 for tensor number 1 in the list.