I want this effect. [1]: https://i.stack.imgur.com/OivzQ.jpg
As you can see, when the object is moving the background around it is distorting and warping. I cannot sure how I can explain exactly. So I've tried by trying motion blur effect along the moving object. This is my code.
import cv2
from MotionBlur import LinearMotionBlur
from skimage.draw import line
import numpy as np
from skimage.filters import gaussian
# Load the input video and create a VideoCapture object
cap = cv2.VideoCapture('input_video.mp4')
first_frame = True
init_x = 0
init_y = 0
# Create a background subtractor object
bg_subtractor = cv2.createBackgroundSubtractorMOG2(history=250, varThreshold=500)
blurIntensity = 200
# The direction to blur, can be any of number type
# angle = 320
while True:
# Read a frame from the video
ret, frame = cap.read()
if not ret:
break
# Apply background subtraction to detect moving objects
fg_mask = bg_subtractor.apply(frame)
# Apply morphological operations to remove noise from the foreground mask
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (5, 5))
fg_mask = cv2.morphologyEx(fg_mask, cv2.MORPH_OPEN, kernel)
# Find contours in the foreground mask
contours, hierarchy = cv2.findContours(fg_mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# Draw bounding boxes around the detected contours
for contour in contours:
x, y, w, h = cv2.boundingRect(contour)
if first_frame:
init_x = x
init_y = y
first_frame = False
rr, cc = line(0,0,200,200)
kernel_size = 50
# Create the vertical kernel.
kernel_v = np.zeros((kernel_size, kernel_size))
# Create a copy of the same for creating the horizontal kernel.
kernel_h = np.copy(kernel_v)
# Fill the middle row with ones.
kernel_v[:, int((kernel_size - 1)/2)] = np.ones(kernel_size)
kernel_h[int((kernel_size - 1)/2), :] = np.ones(kernel_size)
# Normalize.
kernel_v /= kernel_size
kernel_h /= kernel_size
# Apply the vertical kernel.
# frame = cv2.filter2D(frame, -1, kernel_v)
# Apply the horizontal kernel.
frame[x+w:init_x-x, y:h] = cv2.filter2D(frame[x+w:init_x-x, y:h], -1, kernel_h)
# Apply a Gaussian filter to the line segment to simulate motion blur
# frame[rr, cc] = gaussian(frame[rr, cc], sigma=5)
cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 255), 2)
# frame[rr, cc] = LinearMotionBlur(frame[rr, cc], blurIntensity, angle, linetype="left")
# Display the output frame
cv2.imshow('Output', frame)
# Exit if 'q' is pressed
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# Release the VideoCapture object and close all windows
cap.release()
cv2.destroyAllWindows()
And when I was trying to run this code. The Assertion Failed Error is occurred in line
frame[x+w:init_x-x, y:h] = cv2.filter2D(frame[x+w:init_x-x, y:h], -1, kernel_h)
I cannot find out why this happens. I'm new to video streaming using Python. I'm looking forward to hearing from anybody who can help me. Can I get the result with this effect? If you have any other proper way, could you let me know?