0

I'm trying to convert my OpenCV remap to use distortion correctionnvidia vpi remap.

My original code:

cv::Mat src = /* read image */;
cv::Mat dst(src.size(), src.type());
cv::Mat correction_x = /* image x correction */;
cv::Mat correction_y = /* image y correction */;

remap(src, dst, correction_x, correction_y, INTER_LINEAR, BORDER_CONSTANT, Scalar(0, 0, 0));

Where cv::Mat correction_x, correction_x are camera corrections that pre-calculated.

First I looked at Lens Distortion Models but it doesn't take pre-calculated matrix.

From nvidia vpi remap documentation I take this code (I'm using vpi1):

cv::Mat src = /* read image */;
VPIImage src_image;
vpiImageCreateOpenCVMatWrapper(src, 0, &src_image);
cv::Mat correction_x = /* image x correction */;
cv::Mat correction_y = /* image y correction */;

int32_t w, h;
vpiImageGetSize(src_image, &w, &h);
 
VPIImageFormat type;
vpiImageGetFormat(src_image, &type);
 
VPIImage output;
vpiImageCreate(w, h, type, 0, &output);

VPIStream stream;
vpiStreamCreate(0, &stream);

VPIWarpMap map;
memset(&map, 0, sizeof(map));
map.grid.numHorizRegions  = 1;
map.grid.numVertRegions   = 1;
map.grid.regionWidth[0]   = w;
map.grid.regionHeight[0]  = h;
map.grid.horizInterval[0] = 1;
map.grid.vertInterval[0]  = 1;
vpiWarpMapAllocData(&map);        

vpiWarpMapGenerateIdentity(&map);
int i;
for (i = 0; i < map.numVertPoints; ++i)
{
    VPIKeypoint *row = (VPIKeypoint *)((uint8_t *)map.keypoints + map.pitchBytes * i);
    int j;
    for (j = 0; j < map.numHorizPoints; ++j)
    {
        row[j].x = correction_x.at<float>(i, j);
        row[j].y = correction_y.at<float>(i, j);
    }
}

VPIPayload warp;
vpiCreateRemap(VPI_BACKEND_CUDA, &map, &warp);

vpiSubmitRemap(stream, VPI_BACKEND_CUDA, warp, src_image, output, VPI_INTERP_LINEAR, VPI_BORDER_ZERO, 0);
vpiStreamSync(stream);

Is it the best way to do it?

Is there a way to init the VPIWarpMap with the pre-calculated matrix correction without this for loops?

lior.i
  • 573
  • 1
  • 7
  • 20

0 Answers0