Rabbit-R1/switch port/java/sources/androidx/media3/exoplayer/video/VideoDecoderGLSurfaceView.java

180 lines
9.2 KiB
Java
Raw Normal View History

2024-05-21 16:08:36 -05:00
package androidx.media3.exoplayer.video;
import android.content.Context;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import androidx.media3.common.util.Assertions;
import androidx.media3.common.util.GlProgram;
import androidx.media3.common.util.GlUtil;
import androidx.media3.decoder.VideoDecoderOutputBuffer;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.util.concurrent.atomic.AtomicReference;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/* loaded from: classes2.dex */
public final class VideoDecoderGLSurfaceView extends GLSurfaceView implements VideoDecoderOutputBufferRenderer {
private static final String TAG = "VideoDecoderGLSV";
private final Renderer renderer;
@Deprecated
public VideoDecoderOutputBufferRenderer getVideoDecoderOutputBufferRenderer() {
return this;
}
public VideoDecoderGLSurfaceView(Context context) {
this(context, null);
}
public VideoDecoderGLSurfaceView(Context context, AttributeSet attributeSet) {
super(context, attributeSet);
Renderer renderer = new Renderer(this);
this.renderer = renderer;
setPreserveEGLContextOnPause(true);
setEGLContextClientVersion(2);
setRenderer(renderer);
setRenderMode(0);
}
@Override // androidx.media3.exoplayer.video.VideoDecoderOutputBufferRenderer
public void setOutputBuffer(VideoDecoderOutputBuffer videoDecoderOutputBuffer) {
this.renderer.setOutputBuffer(videoDecoderOutputBuffer);
}
/* loaded from: classes2.dex */
private static final class Renderer implements GLSurfaceView.Renderer {
private static final String FRAGMENT_SHADER = "precision mediump float;\nvarying vec2 interp_tc_y;\nvarying vec2 interp_tc_u;\nvarying vec2 interp_tc_v;\nuniform sampler2D y_tex;\nuniform sampler2D u_tex;\nuniform sampler2D v_tex;\nuniform mat3 mColorConversion;\nvoid main() {\n vec3 yuv;\n yuv.x = texture2D(y_tex, interp_tc_y).r - 0.0625;\n yuv.y = texture2D(u_tex, interp_tc_u).r - 0.5;\n yuv.z = texture2D(v_tex, interp_tc_v).r - 0.5;\n gl_FragColor = vec4(mColorConversion * yuv, 1.0);\n}\n";
private static final String VERTEX_SHADER = "varying vec2 interp_tc_y;\nvarying vec2 interp_tc_u;\nvarying vec2 interp_tc_v;\nattribute vec4 in_pos;\nattribute vec2 in_tc_y;\nattribute vec2 in_tc_u;\nattribute vec2 in_tc_v;\nvoid main() {\n gl_Position = in_pos;\n interp_tc_y = in_tc_y;\n interp_tc_u = in_tc_u;\n interp_tc_v = in_tc_v;\n}\n";
private int colorMatrixLocation;
private GlProgram program;
private VideoDecoderOutputBuffer renderedOutputBuffer;
private final GLSurfaceView surfaceView;
private static final float[] kColorConversion601 = {1.164f, 1.164f, 1.164f, 0.0f, -0.392f, 2.017f, 1.596f, -0.813f, 0.0f};
private static final float[] kColorConversion709 = {1.164f, 1.164f, 1.164f, 0.0f, -0.213f, 2.112f, 1.793f, -0.533f, 0.0f};
private static final float[] kColorConversion2020 = {1.168f, 1.168f, 1.168f, 0.0f, -0.188f, 2.148f, 1.683f, -0.652f, 0.0f};
private static final String[] TEXTURE_UNIFORMS = {"y_tex", "u_tex", "v_tex"};
private static final FloatBuffer TEXTURE_VERTICES = GlUtil.createBuffer(new float[]{-1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, -1.0f});
private final int[] yuvTextures = new int[3];
private final int[] texLocations = new int[3];
private final int[] previousWidths = new int[3];
private final int[] previousStrides = new int[3];
private final AtomicReference<VideoDecoderOutputBuffer> pendingOutputBufferReference = new AtomicReference<>();
private final FloatBuffer[] textureCoords = new FloatBuffer[3];
public Renderer(GLSurfaceView gLSurfaceView) {
this.surfaceView = gLSurfaceView;
for (int i = 0; i < 3; i++) {
int[] iArr = this.previousWidths;
this.previousStrides[i] = -1;
iArr[i] = -1;
}
}
@Override // android.opengl.GLSurfaceView.Renderer
public void onSurfaceCreated(GL10 gl10, EGLConfig eGLConfig) {
try {
GlProgram glProgram = new GlProgram(VERTEX_SHADER, FRAGMENT_SHADER);
this.program = glProgram;
GLES20.glVertexAttribPointer(glProgram.getAttributeArrayLocationAndEnable("in_pos"), 2, 5126, false, 0, (Buffer) TEXTURE_VERTICES);
this.texLocations[0] = this.program.getAttributeArrayLocationAndEnable("in_tc_y");
this.texLocations[1] = this.program.getAttributeArrayLocationAndEnable("in_tc_u");
this.texLocations[2] = this.program.getAttributeArrayLocationAndEnable("in_tc_v");
this.colorMatrixLocation = this.program.getUniformLocation("mColorConversion");
GlUtil.checkGlError();
setupTextures();
GlUtil.checkGlError();
} catch (GlUtil.GlException e) {
Log.e(VideoDecoderGLSurfaceView.TAG, "Failed to set up the textures and program", e);
}
}
@Override // android.opengl.GLSurfaceView.Renderer
public void onSurfaceChanged(GL10 gl10, int i, int i2) {
GLES20.glViewport(0, 0, i, i2);
}
@Override // android.opengl.GLSurfaceView.Renderer
public void onDrawFrame(GL10 gl10) {
VideoDecoderOutputBuffer andSet = this.pendingOutputBufferReference.getAndSet(null);
if (andSet == null && this.renderedOutputBuffer == null) {
return;
}
if (andSet != null) {
VideoDecoderOutputBuffer videoDecoderOutputBuffer = this.renderedOutputBuffer;
if (videoDecoderOutputBuffer != null) {
videoDecoderOutputBuffer.release();
}
this.renderedOutputBuffer = andSet;
}
VideoDecoderOutputBuffer videoDecoderOutputBuffer2 = (VideoDecoderOutputBuffer) Assertions.checkNotNull(this.renderedOutputBuffer);
float[] fArr = kColorConversion709;
int i = videoDecoderOutputBuffer2.colorspace;
if (i == 1) {
fArr = kColorConversion601;
} else if (i == 3) {
fArr = kColorConversion2020;
}
GLES20.glUniformMatrix3fv(this.colorMatrixLocation, 1, false, fArr, 0);
int[] iArr = (int[]) Assertions.checkNotNull(videoDecoderOutputBuffer2.yuvStrides);
ByteBuffer[] byteBufferArr = (ByteBuffer[]) Assertions.checkNotNull(videoDecoderOutputBuffer2.yuvPlanes);
int i2 = 0;
while (i2 < 3) {
int i3 = i2 == 0 ? videoDecoderOutputBuffer2.height : (videoDecoderOutputBuffer2.height + 1) / 2;
GLES20.glActiveTexture(33984 + i2);
GLES20.glBindTexture(3553, this.yuvTextures[i2]);
GLES20.glPixelStorei(3317, 1);
GLES20.glTexImage2D(3553, 0, 6409, iArr[i2], i3, 0, 6409, 5121, byteBufferArr[i2]);
i2++;
}
int i4 = videoDecoderOutputBuffer2.width;
int i5 = (i4 + 1) / 2;
int[] iArr2 = {i4, i5, i5};
for (int i6 = 0; i6 < 3; i6++) {
if (this.previousWidths[i6] != iArr2[i6] || this.previousStrides[i6] != iArr[i6]) {
Assertions.checkState(iArr[i6] != 0);
float f = iArr2[i6] / iArr[i6];
this.textureCoords[i6] = GlUtil.createBuffer(new float[]{0.0f, 0.0f, 0.0f, 1.0f, f, 0.0f, f, 1.0f});
GLES20.glVertexAttribPointer(this.texLocations[i6], 2, 5126, false, 0, (Buffer) this.textureCoords[i6]);
this.previousWidths[i6] = iArr2[i6];
this.previousStrides[i6] = iArr[i6];
}
}
GLES20.glClear(16384);
GLES20.glDrawArrays(5, 0, 4);
try {
GlUtil.checkGlError();
} catch (GlUtil.GlException e) {
Log.e(VideoDecoderGLSurfaceView.TAG, "Failed to draw a frame", e);
}
}
public void setOutputBuffer(VideoDecoderOutputBuffer videoDecoderOutputBuffer) {
VideoDecoderOutputBuffer andSet = this.pendingOutputBufferReference.getAndSet(videoDecoderOutputBuffer);
if (andSet != null) {
andSet.release();
}
this.surfaceView.requestRender();
}
@RequiresNonNull({"program"})
private void setupTextures() {
try {
GLES20.glGenTextures(3, this.yuvTextures, 0);
for (int i = 0; i < 3; i++) {
GLES20.glUniform1i(this.program.getUniformLocation(TEXTURE_UNIFORMS[i]), i);
GLES20.glActiveTexture(33984 + i);
GlUtil.bindTexture(3553, this.yuvTextures[i]);
}
GlUtil.checkGlError();
} catch (GlUtil.GlException e) {
Log.e(VideoDecoderGLSurfaceView.TAG, "Failed to set up the textures", e);
}
}
}
}