0

Full code used:

import os
import numpy as np
from keras_unet_collection import models, losses
from tensorflow import keras
from PIL import Image


def hybrid_loss(y_true, y_pred):
    loss_focal = losses.focal_tversky(y_true, y_pred, alpha=0.5, gamma=4 / 3)
    loss_iou = losses.iou_seg(y_true, y_pred)
    return loss_focal + loss_iou


model = models.unet_3plus_2d((128, 128, 3), n_labels=2, filter_num_down=[64, 128, 256, 512],
                             filter_num_skip=[64, 64, 64], filter_num_aggregate=256,
                             stack_num_down=2, stack_num_up=1, activation='ReLU', output_activation='Sigmoid',
                             batch_norm=True, pool='max', unpool=False, deep_supervision=True, name='unet3plus')

model.compile(loss=[hybrid_loss, hybrid_loss, hybrid_loss, hybrid_loss, hybrid_loss],
                  loss_weights=[0.25, 0.25, 0.25, 0.25, 1.0],
                  optimizer=keras.optimizers.Adam(learning_rate=1e-4))


def load_images(path, target_size=(128, 128), grayscale=False):
    images = []
    for filename in sorted(os.listdir(path)):
        image = Image.open(os.path.join(path, filename))
        if grayscale:
            image = image.convert('L')
        else:
            image = image.convert('RGB')
        image = image.resize(target_size, resample=Image.BICUBIC)
        images.append(np.array(image))
    return np.array(images)


train_images_path = 'data/train/images'
train_masks_path = 'data/train/mask'
val_images_path = 'data/val/images'
val_masks_path = 'data/val/mask'

image_size = (128, 128)

# Load the images and masks as numpy arrays
train_images = load_images(train_images_path, target_size=image_size)
train_masks = load_images(train_masks_path, target_size=image_size, grayscale=True)
val_images = load_images(val_images_path, target_size=image_size)
val_masks = load_images(val_masks_path, target_size=image_size, grayscale=True)

history = model.fit(train_images, train_masks, batch_size=8, epochs=50, validation_data=(val_images, val_masks))

Error:

Node: 'gradient_tape/hybrid_loss_2/mul/BroadcastGradientArgs'
Incompatible shapes: [131072] vs. [393216]
     [[{{node gradient_tape/hybrid_loss_2/mul/BroadcastGradientArgs}}]] [Op:__inference_train_function_14372]

I have to the best of my abilities followed the examples given in the documentation for the keras-unet-collection module but I am struggling to actually get it to work. I am using my own image loading function because my mask images are not in grayscale before loading. My immediate research suggested that there was a mismatch between the shapes of y_true and y_pred in the hybrid_loss function but when I print something from the hybrid_loss function it outputs multiple times successfully before failing.

I, evidently, dont have much experience with tensorflow etc so if you could help me figure out what is causing this problem and how I may be able to fix it that would be very helpful

James Hall
  • 53
  • 6

0 Answers0