助ける必要がある
AndroidのSurface(OpenGL)
でビデオを再生するにはどうすればよいですか? MediaPlayer
のヘルプメソッドsetSurface()
を使用してmySurfaceView extends SurfaceView
でビデオを再生してみました。
SurfaceTexture mTexture = new SurfaceTexture(texture_id);
Surface mSurface = new Surface(mTexture);
MediaPlayer mp = new MediaPlayer();
mp.setSurface(mSurface);
オーディオしか再生できませんでした。ビデオは再生されません。
OpenGL
で送信するためのビデオバッファを取得するにはどうすればよいですか?GLTexture
で動画を再生する方法は?From Androidソースコード..
import Java.io.IOException;
import Java.nio.ByteBuffer;
import Java.nio.ByteOrder;
import Java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import Android.content.Context;
import Android.graphics.SurfaceTexture;
import Android.media.MediaPlayer;
import Android.opengl.GLES20;
import Android.opengl.GLSurfaceView;
import Android.opengl.Matrix;
import Android.util.Log;
import Android.view.Surface;
class VideoSurfaceView extends GLSurfaceView {
VideoRender mRenderer;
private MediaPlayer mMediaPlayer = null;
public VideoSurfaceView(Context context, MediaPlayer mp) {
super(context);
setEGLContextClientVersion(2);
mMediaPlayer = mp;
mRenderer = new VideoRender(context);
setRenderer(mRenderer);
}
@Override
public void onResume() {
queueEvent(new Runnable(){
public void run() {
mRenderer.setMediaPlayer(mMediaPlayer);
}});
super.onResume();
}
private static class VideoRender
implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
private static String TAG = "VideoRender";
private static final int FLOAT_SIZE_BYTES = 4;
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private final float[] mTriangleVerticesData = {
// X, Y, Z, U, V
-1.0f, -1.0f, 0, 0.f, 0.f,
1.0f, -1.0f, 0, 1.f, 0.f,
-1.0f, 1.0f, 0, 0.f, 1.f,
1.0f, 1.0f, 0, 1.f, 1.f,
};
private FloatBuffer mTriangleVertices;
private final String mVertexShader =
"uniform mat4 uMVPMatrix;\n" +
"uniform mat4 uSTMatrix;\n" +
"attribute vec4 aPosition;\n" +
"attribute vec4 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main() {\n" +
" gl_Position = uMVPMatrix * aPosition;\n" +
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
"}\n";
private final String mFragmentShader =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";
private float[] mMVPMatrix = new float[16];
private float[] mSTMatrix = new float[16];
private int mProgram;
private int mTextureID;
private int muMVPMatrixHandle;
private int muSTMatrixHandle;
private int maPositionHandle;
private int maTextureHandle;
private SurfaceTexture mSurface;
private boolean updateSurface = false;
private static int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
private MediaPlayer mMediaPlayer;
public VideoRender(Context context) {
mTriangleVertices = ByteBuffer.allocateDirect(
mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangleVertices.put(mTriangleVerticesData).position(0);
Matrix.setIdentityM(mSTMatrix, 0);
}
public void setMediaPlayer(MediaPlayer player) {
mMediaPlayer = player;
}
public void onDrawFrame(GL10 glUnused) {
synchronized(this) {
if (updateSurface) {
mSurface.updateTexImage();
mSurface.getTransformMatrix(mSTMatrix);
updateSurface = false;
}
}
GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
Matrix.setIdentityM(mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
GLES20.glFinish();
}
public void onSurfaceChanged(GL10 glUnused, int width, int height) {
}
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
mProgram = createProgram(mVertexShader, mFragmentShader);
if (mProgram == 0) {
return;
}
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (maPositionHandle == -1) {
throw new RuntimeException("Could not get attrib location for aPosition");
}
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (maTextureHandle == -1) {
throw new RuntimeException("Could not get attrib location for aTextureCoord");
}
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
checkGlError("glGetUniformLocation uMVPMatrix");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uMVPMatrix");
}
muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
checkGlError("glGetUniformLocation uSTMatrix");
if (muSTMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uSTMatrix");
}
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureID = textures[0];
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
checkGlError("glBindTexture mTextureID");
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
/*
* Create the SurfaceTexture that will feed this textureID,
* and pass it to the MediaPlayer
*/
mSurface = new SurfaceTexture(mTextureID);
mSurface.setOnFrameAvailableListener(this);
Surface surface = new Surface(mSurface);
mMediaPlayer.setSurface(surface);
surface.release();
try {
mMediaPlayer.prepare();
} catch (IOException t) {
Log.e(TAG, "media player prepare failed");
}
synchronized(this) {
updateSurface = false;
}
mMediaPlayer.start();
}
synchronized public void onFrameAvailable(SurfaceTexture surface) {
updateSurface = true;
}
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}
private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
if (program != 0) {
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;
}
private void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
} // End of class VideoRender.
} // End of class VideoSurfaceView.
私はあなたができないと思います。少なくともそれは私が見つけたものです。私の計画は、ビデオの再生中にある種のOpenGLシーン(テキストティッカー)を用意することでした。 Androidはビデオの表示にHWデコードを使用するため、OpenGLでは実行されません。ffmpegを使用してOpenGLでビデオを再生しようとしましたが、試したデバイスがないことがわかりました。 ffmpegを介してSWデコードを行うのに十分なパフォーマンスを発揮しました。
そのため、VideoViewを使用してビデオを表示し、その上にGLSurfaceViewを配置して、ティッカーテキストを表示する必要がありました。ただし、「TranslucentGLSurfaceViewActivity」で実行されるApiDemosのように、GLSurfaceViewを半透明にする必要があります。
私が気付いたもう1つのことは、GLSurfaceViewをVideoViewの上に置くと、fpsが60 fps(opengl)から約30〜40fpsに劇的に低下することです。これは、テストしたAndroidのすべての2.xバージョンに適用されました。先週、Android 4でテストする機会がありましたが、今回は休憩がありませんでした。おそらく彼らはICSのグラフィックスパイプラインを本当に改善しました。
ご挨拶、-chris-
JavaをKotlinバージョンに変換しました
internal inline fun <T> glRun(message: String = "", block: (() -> T)): T {
return block().also {
var error: Int = GLES20.glGetError()
while (error != GLES20.GL_NO_ERROR) {
error = GLES20.glGetError()
Log.d("MOVIE_GL_ERROR", "$message: $error")
throw RuntimeException("GL Error: $message")
}
}
}
class MovieRenderer: GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
private var program = 0
private var textureId = 0
// Handles
private var mvpMatrixHandle = 0
private var stMatrixHandle = 0
private var positionHandle = 0
private var textureHandle = 0
// Surface Texture
private var updateSurface = false
private lateinit var surfaceTexture: SurfaceTexture
// Matrices
private var mvpMatrix = FloatArray(16)
private var stMatrix = FloatArray(16)
// float buffer
private val vertices: FloatBuffer = ByteBuffer.allocateDirect(VERTICES_DATA.size * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder())
.asFloatBuffer().also {
it.put(VERTICES_DATA).position(0)
}
var mediaPlayer: MediaPlayer? = null
@Synchronized
override fun onFrameAvailable(surfaceTexture: SurfaceTexture?) {
updateSurface = true
}
override fun onDrawFrame(gl: GL10?) {
synchronized(this) {
if (updateSurface) {
surfaceTexture.updateTexImage()
surfaceTexture.getTransformMatrix(stMatrix)
updateSurface = false
}
}
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f)
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT or GLES20.GL_COLOR_BUFFER_BIT)
glRun("glUseProgram: $program") {
GLES20.glUseProgram(program)
}
vertices.position(VERTICES_POS_OFFSET);
glRun("glVertexAttribPointer: Stride bytes") {
GLES20.glVertexAttribPointer(positionHandle, 3, GLES20.GL_FLOAT, false,
VERTICES_STRIDE_BYTES, vertices)
}
glRun("glEnableVertexAttribArray") {
GLES20.glEnableVertexAttribArray(positionHandle)
}
vertices.position(VERTICES_UV_OFFSET)
glRun("glVertexAttribPointer: texture handle") {
GLES20.glVertexAttribPointer(textureHandle, 3, GLES20.GL_FLOAT, false,
VERTICES_STRIDE_BYTES, vertices)
}
glRun("glEnableVertexAttribArray") {
GLES20.glEnableVertexAttribArray(textureHandle)
}
Matrix.setIdentityM(mvpMatrix, 0)
glRun("glUniformMatrix4fv: mvpMatrix") {
GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, mvpMatrix, 0)
}
glRun("glUniformMatrix4fv: stMatrix") {
GLES20.glUniformMatrix4fv(stMatrixHandle, 1, false, stMatrix, 0)
}
glRun("glDrawArrays: GL_TRIANGLE_STRIP") {
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
}
GLES20.glFinish()
}
override fun onSurfaceChanged(gl: GL10?, width: Int, height: Int) {
GLES20.glViewport(0, 0, width, height)
}
override fun onSurfaceCreated(gl: GL10?, config: EGLConfig?) {
program = createProgram()
positionHandle = "aPosition".attr()
textureHandle = "aTextureCoord".attr()
mvpMatrixHandle = "uMVPMatrix".uniform()
stMatrixHandle = "uSTMatrix".uniform()
createTexture()
}
private fun createTexture() {
val textures = IntArray(1)
GLES20.glGenTextures(1, textures, 0)
textureId = textures.first()
glRun("glBindTexture textureId") { GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureId) }
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST)
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR)
surfaceTexture = SurfaceTexture(textureId)
surfaceTexture.setOnFrameAvailableListener(this)
val surface = Surface(surfaceTexture)
mediaPlayer?.setSurface(surface)
surface.release()
try {
mediaPlayer?.prepare()
} catch (error: IOException) {
Log.e("MovieRenderer", "media player prepare failed");
throw error
}
synchronized(this) {
updateSurface = false
}
mediaPlayer?.start()
}
private fun String.attr(): Int {
return glRun("Get attribute location: $this") {
GLES20.glGetAttribLocation(program, this).also {
if (it == -1) fail("Error Attribute: $this not found!")
}
}
}
private fun String.uniform(): Int {
return glRun("Get uniform location: $this") {
GLES20.glGetUniformLocation(program, this).also {
if (it == -1) fail("Error Uniform: $this not found!")
}
}
}
companion object {
private const val GL_TEXTURE_EXTERNAL_OES = 0x8D65
private const val FLOAT_SIZE_BYTES = 4
private const val VERTICES_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES
private const val VERTICES_POS_OFFSET = 0
private const val VERTICES_UV_OFFSET = 3
private val VERTICES_DATA = floatArrayOf(
-1.0f, -1.0f, 0f, 0.0f, 0.0f,
1.0f, -1.0f, 0f, 1.0f, 0.0f,
-1.0f, 1.0f, 0f, 0.0f, 1.0f,
1.0f, 1.0f, 0f, 1.0f, 1.0f
)
private const val VERTEX_SHADER = """
uniform mat4 uMVPMatrix;
uniform mat4 uSTMatrix;
attribute vec4 aPosition;
attribute vec4 aTextureCoord;
varying vec2 vTextureCoord;
void main() {
gl_Position = uMVPMatrix * aPosition;
vTextureCoord = (uSTMatrix * aTextureCoord).xy;
}
"""
private const val FRAGMENT_SHADER = """
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 vTextureCoord;
uniform samplerExternalOES sTexture;
void main() {
gl_FragColor = texture2D(sTexture, vTextureCoord);
}
"""
private fun createShader(type: Int, source: String): Int {
val shader = GLES20.glCreateShader(type)
if (shader == 0) throw RuntimeException("Cannot create shader $type\n$source")
GLES20.glShaderSource(shader, source)
GLES20.glCompileShader(shader)
val args = IntArray(1)
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, args, 0)
if (args.first() == 0) {
Log.e("MOVIE_SHADER", "Failed to compile shader source")
Log.e("MOVIE_SHADER", GLES20.glGetShaderInfoLog(shader))
GLES20.glDeleteShader(shader)
throw RuntimeException("Could not compile shader $source\n$type")
}
return shader
}
private fun createProgram(vertexShaderSource: String = VERTEX_SHADER,
fragmentShaderSource: String = FRAGMENT_SHADER): Int {
val vertexShader = createShader(GLES20.GL_VERTEX_SHADER, vertexShaderSource)
val fragmentShader = createShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderSource)
val program = GLES20.glCreateProgram()
if (program == 0) throw RuntimeException("Cannot create program")
glRun("Attach vertex shader to program") {
GLES20.glAttachShader(program, vertexShader)
}
glRun("Attach fragment shader to program") {
GLES20.glAttachShader(program, fragmentShader)
}
GLES20.glLinkProgram(program)
val args = IntArray(1)
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, args, 0)
if (args.first() != GLES20.GL_TRUE) {
val info = GLES20.glGetProgramInfoLog(program)
GLES20.glDeleteProgram(program)
throw RuntimeException("Cannot link program $program, Info: $info")
}
return program
}
private fun fail(message: String): Nothing {
throw RuntimeException(message)
}
}
}
mMediaPlayer.setSurface(new Surface(mSurfaceTexture));
上記のコード行を使用して、表面ビューに適用されたテクスチャである目的のsurfaceTexture上のmediaPlayerObjectで使用できます。
お役に立てば幸いです。