I got this error RuntimeError: Given groups=1, weight of size [32, 3, 3, 3], expected input[8, 1, 256, 256] to have 3 channels, but got 1 channels instead. This is my code
I haven't found the cause for this. Can anyone help me to figure out the problems? Thank you
"'"
"'"
class UNet(nn.Module):
def __init__(self, in_channels=3, features=[32,64,128,512], num_classes=3):
super().__init__()
self.down = []
self.pool = nn.MaxPool2d(2,2)
for i in range(len(features)):
if i == 0:
self.down.append(
nn.Sequential(
DoubleConv(in_channels, features[i])
)
)
else:
self.down.append(
nn.Sequential(
DoubleConv(features[i-1], features[i])
)
)
self.down = nn.ModuleList(self.down)
self.lower = DoubleConv(features[-1], features[-1]*2)
self.up = []
self.dbc = []
features.reverse()
for i in range(len(features)):
if i == 0:
self.up.append(
nn.Sequential(
nn.ConvTranspose2d(features[0]*2,features[i], 2, 2),
nn.ReLU()
)
)
self.dbc.append(DoubleConv(features[i]*2, features[i]))
else:
self.up.append(
nn.Sequential(
nn.ConvTranspose2d(features[i-1], features[i],2,2),
nn.ReLU(),
)
)
self.dbc.append(DoubleConv(features[i]*2, features[i]))
self.up = nn.ModuleList(self.up)
self.classifier = nn.Conv2d(features[-1], num_classes, 3, 1, 1)
def forward(self, x):
x1 = self
x_down = []
for i, layer in enumerate(self.down):
x_down.append(layer(x))
x = self.pool(x_down[-1])
print(x.shape)
x_lower = self.lower(x)
print(x_lower.shape)
x_down.reverse()
x_up = []
for i, (layer_up, layer_dbc) in enumerate(zip(self.up, self.dbc)):
if i == 0:
temp_x = layer_up(x_lower)
print(temp_x.shape)
temp_x = torch.cat((temp_x, x_down[i]), dim=1)
x_up.append(layer_dbc(temp_x))
else:
temp_x = layer_up(x_up[-1])
temp_x = torch.cat((temp_x, x_down[i]), dim=1)
x_up.append(layer_dbc(temp_x))
# print(x_up[-1].shape)
x_classifier = self.classifier(x_up[-1])
print(x_classifier.shape)
return x_classifier
I tried to check the model with the input
model = UNet()
x = torch.randn(3,3,256,256)
output = model(x)
and work