0

I'm using TextureView with MediaPlayer for my custom video component.

If a video' size is larger or the same as the TextureView's size, then everything's fine.

But if the size of the video is smaller (say 720x576 and the TextureView size is 1280x1024, then the quality is bad, as if there was no anti-aliasing applied).

What's interesting, if I use SurfaceView in exactly the same situation, it seems that the SurfaceView applies some kind of anti-aliasing, so I get a better picture.

I tried to apply a Paint object to the TextureView:

    Paint paint = new Paint();
    paint.setFlags(Paint.ANTI_ALIAS_FLAG);
    paint.setAntiAlias(true);
    setLayerPaint(paint);

But it doesn't improve the situation.

I found that using setScaleX(1.00001f); helps, but not much, the quality is still bad.

Is there a way to apply anti-aliasing to TextureView?

Here's the component code.

package com.example.app;

import android.app.Activity;
import android.content.Context;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.SurfaceTexture;
import android.media.MediaPlayer;
import android.net.Uri;
import android.view.Display;
import android.view.Surface;
import android.view.TextureView;
import android.view.ViewGroup;
import android.widget.LinearLayout;

import java.util.HashMap;

import com.example.app.entities.Channel;

public class TextureVideoView extends TextureView implements MediaPlayer.OnPreparedListener, TextureView.SurfaceTextureListener {

    private Context context;
    private MediaPlayer mediaPlayer;
    private SurfaceTexture surfaceTexture;
    private Uri uri;
    private Surface surface;
    private Channel.AspectRatio currentAspectRatio;
    private Channel.AspectRatio targetAspectRatio;

    private int videoWidth = 0;
    private int videoHeight = 0;

    private int screenWidth;
    private int screenHeight;

    private int targetState = STATE_IDLE;
    private int currentState = STATE_IDLE;

    private static final int STATE_IDLE = 0;
    private static final int STATE_PLAYING = 1;
    private static final int STATE_PAUSED = 2;
    private static final int STATE_PREPARING = 3;
    private static final int STATE_PREPARED = 4;

    public TextureVideoView(Context context) {
        super(context);

        this.context = context;

        Display display = ((Activity)context).getWindowManager().getDefaultDisplay();
        Point size = new Point();
        display.getSize(size);
        screenWidth = size.x;
        screenHeight = size.y;

        setScaleX(1.00001f);
        Paint paint = new Paint();
        paint.setDither(true);
        paint.setFilterBitmap(true);
        paint.setFlags(Paint.ANTI_ALIAS_FLAG);
        paint.setAntiAlias(true);
        setLayerPaint(paint);
        LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
        setLayoutParams(params);
        currentAspectRatio = Channel.getInstance().getFullScreenAspectRatio();
        setSurfaceTextureListener(this);
    }

    public void setVideoURI(Uri uri) {
        release();
        this.uri = uri;
        if (surfaceTexture == null) {
            return;
        }
        try {
            mediaPlayer = new MediaPlayer();
            mediaPlayer.setOnPreparedListener(this);
            mediaPlayer.setDataSource(context, uri, new HashMap<String, String>());
            mediaPlayer.setScreenOnWhilePlaying(true);
            mediaPlayer.prepareAsync();
            surface = new Surface(surfaceTexture);
            mediaPlayer.setSurface(surface);
            currentState = STATE_PREPARING;
        }
        catch (Exception e) {

        }
    }

    public void start() {
        if (isInPlaybackState()) {
            mediaPlayer.start();
        }
        targetState = STATE_PLAYING;
    }

    public void pause() {
        if (isInPlaybackState()) {
            mediaPlayer.pause();
            currentState = STATE_PAUSED;
        }
        targetState = STATE_PAUSED;
    }

    public void stopPlayback() {
        if (mediaPlayer != null) {
            mediaPlayer.stop();
            mediaPlayer.release();
            mediaPlayer = null;
            currentState = STATE_IDLE;
            targetState  = STATE_IDLE;
        }
    }

    public int getCurrentPosition() {
        return mediaPlayer.getCurrentPosition();
    }

    public boolean isPlaying() {
        return mediaPlayer.isPlaying();
    }

    private boolean isInPlaybackState() {
        return mediaPlayer != null && (currentState == STATE_PLAYING || currentState == STATE_PREPARED);
    }

    private void release() {
        if (mediaPlayer != null) {
            mediaPlayer.reset();
            mediaPlayer.release();
        }
        if (surface != null) {
            surface.release();
        }
    }

    @Override
    public void onPrepared(MediaPlayer mp) {
        currentState = STATE_PREPARED;
        if (targetState == STATE_PLAYING) {
            start();
        }
        else if (targetState == STATE_PAUSED) {
            pause();
        }

        videoWidth = mediaPlayer.getVideoWidth();
        videoHeight = mediaPlayer.getVideoHeight();
        applyAspectRatio();
    }

    @Override
    public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
        surfaceTexture = surface;
        if (currentState == STATE_IDLE && uri != null) {
            setVideoURI(uri);
        }
    }

    @Override
    public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {

    }

    @Override
    public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
        return false;
    }

    @Override
    public void onSurfaceTextureUpdated(SurfaceTexture surface) {

    }

    public void setAspectRatio(Channel.AspectRatio aspectRatio) {
        targetAspectRatio = aspectRatio;
        applyAspectRatio();
    }

    public void clearCurrentAspectRatio() {
        currentAspectRatio = null;
        videoWidth = 0;
        videoHeight = 0;
    }

    private void applyAspectRatio() {
        if (videoWidth == 0 || videoHeight == 0) {
            return;
        }
        currentAspectRatio = targetAspectRatio;
        System.out.println(currentAspectRatio.label);

        System.out.println("screen width: " + screenWidth);
        System.out.println("screen height: " + screenHeight);

        System.out.println("original video width: " + videoWidth);
        System.out.println("original video height: " + videoHeight);

        ViewGroup.LayoutParams params = getLayoutParams();

        if (currentAspectRatio.ratio == Channel.RATIO_FULL_WIDTH) {
            params.width = screenWidth;
            params.height = videoHeight * screenWidth / videoWidth;
        }
        else {
            params.height = screenHeight;
            switch (currentAspectRatio.ratio) {
                case (Channel.RATIO_16_9):
                    params.width = screenHeight * 16 / 9;
                    break;
                case (Channel.RATIO_4_3):
                    params.width = screenHeight * 4 / 3;
                    break;
                case (Channel.RATIO_ORIGINAL):
                    params.width = videoWidth * screenHeight / videoHeight;
                    break;

            }
        }

        System.out.println("video width: " + params.width);
        System.out.println("video height: " + params.height);

        if (params.width == getWidth() && params.height == getHeight()) {
            return;
        }

        setLayoutParams(params);
    }
}

Update

Based on the fadden's answer, I wrote this code:

    TextureView textureView = new TextureView(this);
    LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
    textureView.setLayoutParams(params);
    ((ViewGroup)findViewById(android.R.id.content)).addView(textureView);
    textureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
        @Override
        public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
            try {
                //stand-alone Surface with TextureView to receive 
                //data from MediaPlayer
                Surface source = new Surface(new SurfaceTexture(111));

                EglCore mEglCore = new EglCore(null, EglCore.FLAG_TRY_GLES3);
                //WindowSurface backed by a SurfaceTexture that was received 
                //from a TextureView that is in my layout
                WindowSurface windowSurface = new WindowSurface(mEglCore, 
                        new Surface(surfaceTexture), true);
                //Make that WindowSurface read data from the source 
                //(stand-alone Surface), which in turn receives data 
                //from the MediaPlayer
                windowSurface.makeCurrentReadFrom(new WindowSurface(mEglCore, 
                        source, true));
                //Change the scaling mode. 
                //is it ok that I use GLES20.GL_TEXTURE_2D?
                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, 
                        GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);

                MediaPlayer mediaPlayer = new MediaPlayer();
                //The MediaPlayer directs data to the 
                //stand-alone Surface, as a result the 
                //windowSurface must output that data with 
                //GL_TEXTURE_MAG_FILTER set to GLES20.GL_LINEAR
                mediaPlayer.setSurface(source);
                mediaPlayer.setDataSource(TestActivity.this, 
                        Uri.parse("http://some.source"));
                mediaPlayer.prepare();
                mediaPlayer.start();
            } catch (Exception e) {

            }
        }

        @Override
        public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {

        }

        @Override
        public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
            return false;
        }

        @Override
        public void onSurfaceTextureUpdated(SurfaceTexture surface) {

        }
    });

But I get this error:

E/BufferQueue: [unnamed-28441-1] connect: already connected (cur=1, req=3)

What is wrong?

Update Finally I got working what @fadden had suggested. But GL_LINEAR is not enough in my case. SurfaceView uses something more advanced (like bi-cubic interpolation).

My attempt to use Bi-Cubic interpolation in GLSL as a fragment-shader (based from source from here: http://www.codeproject.com/Articles/236394/Bi-Cubic-and-Bi-Linear-Interpolation-with-GLSL) But in my case it doesn't work properly. The picture gets more dim and the performance is worse (~5 fps), and also I get horizontal and vertical stripes. What could be wrong?

#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 vTextureCoord;
uniform samplerExternalOES sTexture;
uniform vec2 invScreenSize;

 float BellFunc( float f )
{
    float f = ( x / 2.0 ) * 1.5; // Converting -2 to +2 to -1.5 to +1.5
    if( f > -1.5 && f < -0.5 )
    {
        return( 0.5 * pow(f + 1.5, 2.0));
    }
    else if( f > -0.5 && f < 0.5 )
    {
        return 3.0 / 4.0 - ( f * f );
    }
    else if( ( f > 0.5 && f < 1.5 ) )
    {
        return( 0.5 * pow(f - 1.5, 2.0));
    }
    return 0.0;
}

vec4 BiCubic( samplerExternalOES textureSampler, vec2 TexCoord )
{
    float texelSizeX = 1.0 / invScreenSize.x; //size of one texel 
    float texelSizeY = 1.0 / invScreenSize.y; //size of one texel 
    vec4 nSum = vec4( 0.0, 0.0, 0.0, 0.0 );
    vec4 nDenom = vec4( 0.0, 0.0, 0.0, 0.0 );
    float a = fract( TexCoord.x * invScreenSize.x ); // get the decimal part
    float b = fract( TexCoord.y * invScreenSize.y ); // get the decimal part
    for( int m = -1; m <=2; m++ )
    {
        for( int n =-1; n<= 2; n++)
        {
        vec4 vecData = texture2D(textureSampler, 
               TexCoord + vec2(texelSizeX * float( m ), 
                texelSizeY * float( n )));
        float f  = BellFunc( float( m ) - a );
        vec4 vecCooef1 = vec4( f,f,f,f );
        float f1 = BellFunc ( -( float( n ) - b ) );
        vec4 vecCoeef2 = vec4( f1, f1, f1, f1 );
        nSum = nSum + ( vecData * vecCoeef2 * vecCooef1  );
        nDenom = nDenom + (( vecCoeef2 * vecCooef1 ));
        }
    }
    return nSum / nDenom;
}

void main() {
    gl_FragColor = BiCubic(sTexture, vTextureCoord);
}
fadden
  • 51,356
  • 5
  • 116
  • 166
serg66
  • 1,148
  • 1
  • 17
  • 31
  • 1
    Duplicate of http://stackoverflow.com/questions/12474260/antialiasing-in-textureview ? – fadden Oct 27 '15 at 17:54
  • @fadden, actually, I saw that question. I tried to apply the solution in that question (`textureView.setScaleX(1.00001f);`), it does work, but the result is still worse than that of a `SurfaceView` – serg66 Oct 28 '15 at 06:34
  • 2
    SurfaceView scaling is performed by the display hardware or the GPU in the compositor, TextureView scaling is performed by the GPU in the app process. The TextureView output will use whatever texture scale mode is configured into GLES `GL_TEXTURE_MAG_FILTER` for that texture. If it didn't get set, it'll be using GL_NEAREST, which doesn't look as good as GL_LINEAR. You can try digging through the framework sources to figure out what it's doing, but I suspect you'll need to send the output to a SurfaceTexture, then draw onto TextureView's Surface yourself. – fadden Oct 28 '15 at 15:20
  • @fadden thank you for the answer! Do I understand correctly, I set the output to TextureView's Surface, and at some points I lock canvas on `TextureSurface`, call `TextureView.getBitmap()`, prepare this `Bitmap` myself using some algorithm and set this `Bitmap` back using `unlockCanvasAndPost`? – serg66 Oct 29 '15 at 07:15
  • 1
    No. I'm saying you render it to a stand-alone SurfaceTexture, which gives you an OpenGL ES "external" texture of the frame. You then render that with GLES to the TextureView, selecting the GL_LINEAR scale mode. If you try to use Bitmap and Canvas you'll be moving all the pixels through software, rather than the GPU, and will have trouble keeping the frame rate up. You can find various examples of video manipulation in Grafika (https://github.com/google/grafika). – fadden Oct 29 '15 at 15:57
  • @fadden could you have a look at my updated question? The problem is that I can't connect two windows, so one receives data from the MediaPlayer and another one takes this data, applies the scaling mode and outputs it. – serg66 Oct 30 '15 at 14:49
  • 1
    The SurfaceTexture just converts what it receives on the Surface into a GLES "external" texture. It doesn't render it. The part you're missing is that your app needs to be notified of each incoming frame (via on onFrameAvailable callback) and then render the texture onto the TextureView. This is a manual step -- you can't just plug pieces together. The FullFrameRect class in Grafika is useful for this. – fadden Oct 30 '15 at 17:41
  • @fadden, finally I managed to draw video-frames to a `TextureView`, thank you! But it turned out that `TextureView` already uses `GL_LINEAR` by default. It means that `SurfaceView` uses something more advanced than Bi-Linear interpolation (as it's video quality is much better). I know, there's a way to implement any interpolation method using shaders. Could you have a look at my last update section? – serg66 Nov 02 '15 at 12:45
  • FWIW, you may want to examine the convolution filters in Grafika. The implementation makes it easy to experiment. Demo here: https://www.youtube.com/watch?v=kH9kCP2T5Gg – fadden Nov 19 '15 at 23:49
  • @serg66 Did you implement a better interpolation shader at last? I came up with the same problem. – dragonfly Jul 21 '16 at 10:18

0 Answers0