I'm using TextureView
with MediaPlayer
for my custom video component.
If a video' size is larger or the same as the TextureView
's size, then everything's fine.
But if the size of the video is smaller (say 720x576 and the TextureView
size is 1280x1024, then the quality is bad, as if there was no anti-aliasing applied).
What's interesting, if I use SurfaceView
in exactly the same situation, it seems that the SurfaceView
applies some kind of anti-aliasing, so I get a better picture.
I tried to apply a Paint
object to the TextureView
:
Paint paint = new Paint();
paint.setFlags(Paint.ANTI_ALIAS_FLAG);
paint.setAntiAlias(true);
setLayerPaint(paint);
But it doesn't improve the situation.
I found that using setScaleX(1.00001f);
helps, but not much, the quality is still bad.
Is there a way to apply anti-aliasing to TextureView
?
Here's the component code.
package com.example.app;
import android.app.Activity;
import android.content.Context;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.SurfaceTexture;
import android.media.MediaPlayer;
import android.net.Uri;
import android.view.Display;
import android.view.Surface;
import android.view.TextureView;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import java.util.HashMap;
import com.example.app.entities.Channel;
public class TextureVideoView extends TextureView implements MediaPlayer.OnPreparedListener, TextureView.SurfaceTextureListener {
private Context context;
private MediaPlayer mediaPlayer;
private SurfaceTexture surfaceTexture;
private Uri uri;
private Surface surface;
private Channel.AspectRatio currentAspectRatio;
private Channel.AspectRatio targetAspectRatio;
private int videoWidth = 0;
private int videoHeight = 0;
private int screenWidth;
private int screenHeight;
private int targetState = STATE_IDLE;
private int currentState = STATE_IDLE;
private static final int STATE_IDLE = 0;
private static final int STATE_PLAYING = 1;
private static final int STATE_PAUSED = 2;
private static final int STATE_PREPARING = 3;
private static final int STATE_PREPARED = 4;
public TextureVideoView(Context context) {
super(context);
this.context = context;
Display display = ((Activity)context).getWindowManager().getDefaultDisplay();
Point size = new Point();
display.getSize(size);
screenWidth = size.x;
screenHeight = size.y;
setScaleX(1.00001f);
Paint paint = new Paint();
paint.setDither(true);
paint.setFilterBitmap(true);
paint.setFlags(Paint.ANTI_ALIAS_FLAG);
paint.setAntiAlias(true);
setLayerPaint(paint);
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
setLayoutParams(params);
currentAspectRatio = Channel.getInstance().getFullScreenAspectRatio();
setSurfaceTextureListener(this);
}
public void setVideoURI(Uri uri) {
release();
this.uri = uri;
if (surfaceTexture == null) {
return;
}
try {
mediaPlayer = new MediaPlayer();
mediaPlayer.setOnPreparedListener(this);
mediaPlayer.setDataSource(context, uri, new HashMap<String, String>());
mediaPlayer.setScreenOnWhilePlaying(true);
mediaPlayer.prepareAsync();
surface = new Surface(surfaceTexture);
mediaPlayer.setSurface(surface);
currentState = STATE_PREPARING;
}
catch (Exception e) {
}
}
public void start() {
if (isInPlaybackState()) {
mediaPlayer.start();
}
targetState = STATE_PLAYING;
}
public void pause() {
if (isInPlaybackState()) {
mediaPlayer.pause();
currentState = STATE_PAUSED;
}
targetState = STATE_PAUSED;
}
public void stopPlayback() {
if (mediaPlayer != null) {
mediaPlayer.stop();
mediaPlayer.release();
mediaPlayer = null;
currentState = STATE_IDLE;
targetState = STATE_IDLE;
}
}
public int getCurrentPosition() {
return mediaPlayer.getCurrentPosition();
}
public boolean isPlaying() {
return mediaPlayer.isPlaying();
}
private boolean isInPlaybackState() {
return mediaPlayer != null && (currentState == STATE_PLAYING || currentState == STATE_PREPARED);
}
private void release() {
if (mediaPlayer != null) {
mediaPlayer.reset();
mediaPlayer.release();
}
if (surface != null) {
surface.release();
}
}
@Override
public void onPrepared(MediaPlayer mp) {
currentState = STATE_PREPARED;
if (targetState == STATE_PLAYING) {
start();
}
else if (targetState == STATE_PAUSED) {
pause();
}
videoWidth = mediaPlayer.getVideoWidth();
videoHeight = mediaPlayer.getVideoHeight();
applyAspectRatio();
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
surfaceTexture = surface;
if (currentState == STATE_IDLE && uri != null) {
setVideoURI(uri);
}
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
public void setAspectRatio(Channel.AspectRatio aspectRatio) {
targetAspectRatio = aspectRatio;
applyAspectRatio();
}
public void clearCurrentAspectRatio() {
currentAspectRatio = null;
videoWidth = 0;
videoHeight = 0;
}
private void applyAspectRatio() {
if (videoWidth == 0 || videoHeight == 0) {
return;
}
currentAspectRatio = targetAspectRatio;
System.out.println(currentAspectRatio.label);
System.out.println("screen width: " + screenWidth);
System.out.println("screen height: " + screenHeight);
System.out.println("original video width: " + videoWidth);
System.out.println("original video height: " + videoHeight);
ViewGroup.LayoutParams params = getLayoutParams();
if (currentAspectRatio.ratio == Channel.RATIO_FULL_WIDTH) {
params.width = screenWidth;
params.height = videoHeight * screenWidth / videoWidth;
}
else {
params.height = screenHeight;
switch (currentAspectRatio.ratio) {
case (Channel.RATIO_16_9):
params.width = screenHeight * 16 / 9;
break;
case (Channel.RATIO_4_3):
params.width = screenHeight * 4 / 3;
break;
case (Channel.RATIO_ORIGINAL):
params.width = videoWidth * screenHeight / videoHeight;
break;
}
}
System.out.println("video width: " + params.width);
System.out.println("video height: " + params.height);
if (params.width == getWidth() && params.height == getHeight()) {
return;
}
setLayoutParams(params);
}
}
Update
Based on the fadden's answer, I wrote this code:
TextureView textureView = new TextureView(this);
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
textureView.setLayoutParams(params);
((ViewGroup)findViewById(android.R.id.content)).addView(textureView);
textureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
try {
//stand-alone Surface with TextureView to receive
//data from MediaPlayer
Surface source = new Surface(new SurfaceTexture(111));
EglCore mEglCore = new EglCore(null, EglCore.FLAG_TRY_GLES3);
//WindowSurface backed by a SurfaceTexture that was received
//from a TextureView that is in my layout
WindowSurface windowSurface = new WindowSurface(mEglCore,
new Surface(surfaceTexture), true);
//Make that WindowSurface read data from the source
//(stand-alone Surface), which in turn receives data
//from the MediaPlayer
windowSurface.makeCurrentReadFrom(new WindowSurface(mEglCore,
source, true));
//Change the scaling mode.
//is it ok that I use GLES20.GL_TEXTURE_2D?
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
MediaPlayer mediaPlayer = new MediaPlayer();
//The MediaPlayer directs data to the
//stand-alone Surface, as a result the
//windowSurface must output that data with
//GL_TEXTURE_MAG_FILTER set to GLES20.GL_LINEAR
mediaPlayer.setSurface(source);
mediaPlayer.setDataSource(TestActivity.this,
Uri.parse("http://some.source"));
mediaPlayer.prepare();
mediaPlayer.start();
} catch (Exception e) {
}
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
});
But I get this error:
E/BufferQueue: [unnamed-28441-1] connect: already connected (cur=1, req=3)
What is wrong?
Update
Finally I got working what @fadden had suggested. But GL_LINEAR
is not enough in my case. SurfaceView uses something more advanced (like bi-cubic interpolation).
My attempt to use Bi-Cubic interpolation in GLSL
as a fragment-shader (based from source from here: http://www.codeproject.com/Articles/236394/Bi-Cubic-and-Bi-Linear-Interpolation-with-GLSL)
But in my case it doesn't work properly. The picture gets more dim and the performance is worse (~5 fps), and also I get horizontal and vertical stripes. What could be wrong?
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 vTextureCoord;
uniform samplerExternalOES sTexture;
uniform vec2 invScreenSize;
float BellFunc( float f )
{
float f = ( x / 2.0 ) * 1.5; // Converting -2 to +2 to -1.5 to +1.5
if( f > -1.5 && f < -0.5 )
{
return( 0.5 * pow(f + 1.5, 2.0));
}
else if( f > -0.5 && f < 0.5 )
{
return 3.0 / 4.0 - ( f * f );
}
else if( ( f > 0.5 && f < 1.5 ) )
{
return( 0.5 * pow(f - 1.5, 2.0));
}
return 0.0;
}
vec4 BiCubic( samplerExternalOES textureSampler, vec2 TexCoord )
{
float texelSizeX = 1.0 / invScreenSize.x; //size of one texel
float texelSizeY = 1.0 / invScreenSize.y; //size of one texel
vec4 nSum = vec4( 0.0, 0.0, 0.0, 0.0 );
vec4 nDenom = vec4( 0.0, 0.0, 0.0, 0.0 );
float a = fract( TexCoord.x * invScreenSize.x ); // get the decimal part
float b = fract( TexCoord.y * invScreenSize.y ); // get the decimal part
for( int m = -1; m <=2; m++ )
{
for( int n =-1; n<= 2; n++)
{
vec4 vecData = texture2D(textureSampler,
TexCoord + vec2(texelSizeX * float( m ),
texelSizeY * float( n )));
float f = BellFunc( float( m ) - a );
vec4 vecCooef1 = vec4( f,f,f,f );
float f1 = BellFunc ( -( float( n ) - b ) );
vec4 vecCoeef2 = vec4( f1, f1, f1, f1 );
nSum = nSum + ( vecData * vecCoeef2 * vecCooef1 );
nDenom = nDenom + (( vecCoeef2 * vecCooef1 ));
}
}
return nSum / nDenom;
}
void main() {
gl_FragColor = BiCubic(sTexture, vTextureCoord);
}