0

I'm using Detectron2 to do instance segmentation as in the tutorial. Below is the code:

from detectron2.config import CfgNode
import detectron2.data.transforms as T
from detectron2.data import build_detection_train_loader, DatasetMapper

os.makedirs(cfg.OUTPUT_DIR, exist_ok=True)


transform_list = [
    # T.Resize(shape=(200,300)), 
    T.RandomRotation(angle=90.0),
    # T.RandomContrast(intensity_min=0.75, intensity_max=1.25),
    # T.RandomBrightness(intensity_min=0.75, intensity_max=1.25),
    # T.RandomSaturation(intensity_min=0.75, intensity_max=1.25),
    # T.RandomLighting(scale=0.1),
    T.RandomFlip(),
    # T.RandomCrop(crop_type="absolute", crop_size=(180, 270))
]

# custom_mapper = get_custom_mapper(transfrom_list)
custom_mapper = DatasetMapper(
    cfg,
    is_train=True,
    augmentations=transform_list,
    use_instance_mask=True,
    instance_mask_format="bitmask",
    )

class CustomTrainer(DefaultTrainer):
    @classmethod
    def build_test_loader(cls, cfg: CfgNode, dataset_name):
        return build_detection_test_loader(cfg, dataset_name, mapper=custom_mapper)

    @classmethod
    def build_train_loader(cls, cfg: CfgNode):
        return build_detection_train_loader(cfg, mapper=custom_mapper)

cfg.INPUT.MASK_FORMAT = 'bitmask'
cfg.DATALOADER.FILTER_EMPTY_ANNOTATIONS = False

trainer = CustomTrainer(cfg) 
# trainer = DefaultTrainer(cfg)

# trainer.resume_or_load(resume=False)
# trainer.train()

However, in this case I don't care about instances and more like I want to do semantic segmentation but there is no tutorial or examples to do that nor I'm seeing a semantic model I can start with. Misc/semantic_R_50_FPN_1x.yaml throws an error saying there is no pretrained model available.

So instead I'm trying to use the SemSegEvaluator instead of COCO evaluator to give me metrics around semantic rather than instances. Below is the code:

from detectron2.evaluation import COCOEvaluator, inference_on_dataset, SemSegEvaluator
from detectron2.data import build_detection_test_loader
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.4
# evaluator = COCOEvaluator(val_dataset_name, output_dir=os.path.join(cfg.OUTPUT_DIR, 'val'), use_fast_impl=False, tasks=['segm'])
evaluator = SemSegEvaluator(val_dataset_name, output_dir=os.path.join(cfg.OUTPUT_DIR, 'val'))
val_loader = build_detection_test_loader(cfg, val_dataset_name)
eval_result = inference_on_dataset(predictor.model, val_loader, evaluator) 
print(eval_result)

However, this is failing with the following error:

[12/20 16:29:02 d2.data.datasets.coco]: Loaded 50 imagesss abdul in COCO format from /content/gdrive/MyDrive/SolarDetection/datasets/train8//val/labels.json
---------------------------------------------------------------------------
KeyError                                  Traceback (most recent call last)
<ipython-input-10-61bd5aaec8ea> in <module>
      3 cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.4
      4 # evaluator = COCOEvaluator(val_dataset_name, output_dir=os.path.join(cfg.OUTPUT_DIR, 'val'), use_fast_impl=False, tasks=['segm'])
----> 5 evaluator = SemSegEvaluator(val_dataset_name, output_dir=os.path.join(cfg.OUTPUT_DIR, 'val'))
      6 val_loader = build_detection_test_loader(cfg, val_dataset_name)
      7 # ipdb.set_trace(context=6)

1 frames
/content/gdrive/MyDrive/repos/detectron2/detectron2/evaluation/sem_seg_evaluation.py in <dictcomp>(.0)
     69 
     70         self.input_file_to_gt_file = {
---> 71             dataset_record["file_name"]: dataset_record["sem_seg_file_name"]
     72             for dataset_record in DatasetCatalog.get(dataset_name)
     73         }

KeyError: 'sem_seg_file_name'

Any idea or hint how I can setup and use the SemSegEvaluator?

0 Answers0