0

I am not interested in webrtc; it doesn't work for my use case.

Here is my basic flow

a. Open webpage-> stream live video frames from webcam(MediaRecorder used with video/webm; codecs="vp8" codec) ->

b.send video frames to websocket server(Java E.E websocket server in this case) ->

c. Broadcast video frames to subscribed websocket clients ->

d. receive video frames at client webpage->

e. Playback live video frame.

My primary issue seems to be coming from playing back the live video frames streamed to the client side.

I have tried using MSE(media source extension) but it didn't work smoothly; I ended up with flickering video and if client connects to websocket after streams begin on streamer web page, I will have to restart streamer web page to reinitialize and redisplay streams on client side. I am able to log and see logs of continuous arraybuffer data of video frames at client side browser console.

below is the streamer.html code

<html>
<head>
<title>TODO supply a title</title>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
</head>
<body>
<div>TODO write content</div>

<script>
navigator.mediaDevices.getUserMedia({video:true,audio:true}).then(stream=>{
var ws,mediaRecorder;
var options = {
mimeType: 'video/webm; codecs="vp8"',
bitsPerSecond:5000 //quality
};
var stateVal = '0';
var socket;
var counter = 0;
function handleVideo(){

try{
mediaRecorder.stop()
}catch(e){}
mediaRecorder=null;
mediaRecorder = new MediaRecorder(stream,options);
mediaRecorder.ondataavailable =function(e) {

if(e.data&&e.data.size>0) {
    e.data.arrayBuffer().then(buffer=>{
        //
        //const data= new Uint8Array(buffer);
        //console.log(data);

           socket.send(buffer);
           //console.log('counter still counting to 100');


           // ws.send(buffer)
      })
}
}
mediaRecorder.start(200);
}

function connect(){
socket = new WebSocket("ws://localhost:8813/mainws/actions");
socket.binaryType = "arraybuffer";
socket.onopen = function(evt) {stateInfo ='open'; handleVideo(); console.log("Socket opened");};
socket.onclose = function(evt) {console.log("Socket closed"); connect() };
socket.onerror = function(evt) {console.log("Error: "+evt.data);};
socket.onmessage = function(evt){//console.log("new client connected");
  };
//ws = new WebSocket("ws://localhost:8813/zyconnectws/actions")
//ws.binaryType = "arraybuffer"
//ws.onopen=handleVideo
//ws.onmessage=handleVideo
//ws.onclose=connect
}
connect()

})
// so onmessage function neccessary for when someone join the socket stream again, because webm format need embl header
</script>
</body>
</html>

I recently had a recommendation from a colleague to try playing the stream at client side using jsmpeg.js lib.

I do not know if I am on the right path; Any push there would be highly appreciated, thanks.

0 Answers0