I'm using trying to stream live view of a SONY FDR-X1000V camera on desktop. I use python to call the API and download the package and use opencv to decode jpeg. When I run it, it can hardly catch one frame per second. Later I found out that the payload size of a jpeg can be 8MB. However, FDR-X1000V doesn't support changing live view size. But when I use the app on iPhone to do the liveview, it streams smoothly. So here is my question: 1. Is it normal that a jpeg payload can be as large as 8MB? 2. If so, how can I smoothly stream the live view?
Here is my code:
try:
result = api.do('startLiveview')
url = result['result'][0]
except KeyError:
print result
f = urllib2.urlopen(url)
buff = ''
chunk_size = 32768
for i in xrange(3000):
if len(buff) < chunk_size:
time_s = time.time()
buff = buff + f.read(chunk_size)
print "Download Speed %f KB/s"%(chunk_size/1000/(time.time() - time_s))
time_s = time.time()
start_code = ''.join(buff).find('$5hy')
# print "LCS time cost", time.time() - time_s
if start_code < 0:
buff = buff[-12:]
print "skip", len(buff)-12
elif start_code < 8:
buff = buff[8:]
else:
if start_code > len(buff) - 129:
buff = buff + f.read(chunk_size)
payload_type = ord(buff[start_code-7])
payload_size, = struct.unpack('<I', buff[start_code+4:start_code+8].ljust(4,'\0'))
padding_size = ord(buff[start_code+8])
print "Type:%d\tPayload:%d\tPadding:%d\t"%(payload_type,payload_size,padding_size)
buff = buff[start_code+128:]
if payload_type == 1:
if payload_size + padding_size > len(buff):
time_s = time.time()
download_size = payload_size+padding_size-len(buff)
buff = buff + f.read(download_size)
print "Download Speed %f KB/s"%(download_size/1000/(time.time() - time_s))
img_data = buff[:payload_size]
buff = buff[payload_size:]
time_s = time.time()
d = np.asarray(bytearray(img_data), dtype='uint8')
img = cv2.imdecode(d,cv2.IMREAD_COLOR)
cv2.imshow('postview',img)
cv2.waitKey(30)
# print "Decode time cost", time.time() - time_s
Some output:
Type:1 Payload:8410624 Padding:0
Download Speed 679.626326 KB/s