Currently, I have a working prototype app that streams the output from one phone's camera to another phone's screen.
However, the delay between the stream and the actual video ranges from 1 second to up to 5 seconds. For my particular use case, I need the delay to be less than 1 second.
I have found that bitrate / resolution does not affect the delay.
I am using the libstreaming library to stream the h264 video over wifi-direct.
It only appears to support encoding of h263 or h264... and I've found h263 does not work as consistently as h264.
Here is the code that captures the video and streams to an RTSP server:
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_server);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
mSurfaceView = (SurfaceView) findViewById(R.id.surface);
// Sets the port of the RTSP server to 8988
Editor editor = PreferenceManager.getDefaultSharedPreferences(
getApplicationContext()).edit();
editor.putString(RtspServer.KEY_PORT, String.valueOf(8988));
editor.commit();
// Get bitrate
int bitrate = Integer.valueOf(getIntent().getStringExtra(BITRATE));
if (bitrate < 100000)
bitrate = 100000;
// Get resolution
String resolution = getIntent().getStringExtra(RESOLUTION);
int resX = 176;
int resY = 144;
if (resolution.equals("352x288")) {
resX = 352;
resY = 288;
} else if (resolution.equals("528x432")) {
resX = 528;
resY = 432;
} else if (resolution.equals("704x576")) {
resX = 704;
resY = 576;
} else {
resX = 176;
resY = 144;
}
Toast.makeText(this, "Resolution: " + resX + "x" + resY + ", Bitrate: "
+ bitrate, Toast.LENGTH_LONG).show();
// Configures the SessionBuilder
SessionBuilder.getInstance().setSurfaceView(mSurfaceView)
.setPreviewOrientation(0).setContext(this)
.setVideoQuality(new VideoQuality(resX, resY, 20, bitrate))
.setAudioEncoder(SessionBuilder.AUDIO_NONE)
.setVideoEncoder(SessionBuilder.VIDEO_H264);
// Starts the RTSP server
getApplicationContext().startService(
new Intent(getApplicationContext(), RtspServer.class));
}
And this is the code for viewing the streaming video:
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_client);
mVideoIP = getIntent().getStringExtra(SERVER_IP);
mSurfaceView = (SurfaceView) findViewById(R.id.surface_view);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(this);
if (savedInstanceState == null) {
}
}
...
@Override
public void surfaceCreated(SurfaceHolder holder) {
try {
mMediaPlayer = new MediaPlayer();
mMediaPlayer.setDisplay(mSurfaceHolder);
mMediaPlayer.setDataSource("rtsp://" + mVideoIP
+ ":8988");
mMediaPlayer.prepare();
mMediaPlayer.setOnPreparedListener(this);
mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
mMediaPlayer.setScreenOnWhilePlaying(true);
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (SecurityException e) {
e.printStackTrace();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}