Android NDK C++开发解码器程序,需要实现如下的功能
1.调用mediaCodec实现硬件解码。 2.使用GPU把解码后的每一帧数据转为RGB格式。 3.以索引为key,每一帧数据为value,加入缓存池。 4.设计一个接口,根据帧索引从缓存池获取数据,同时删除上一帧数据。 要求抽象设计如下模块:解码模块、格式转换模块。 请用C++语言给出具体实现
解码模块实现:
include <jni.h>
include <android/native_window.h>
include <android/nativewindowjni.h>
include <media/NdkMediaCodec.h>
define MAXBUFFERSIZE 100
ANativeWindow *window; AMediaFormat *format; AMediaCodec *codec; int bufferIndex = -1;
typedef struct Frame { int index; uint8_t *data; } Frame;
Frame frameBuffer[MAXBUFFERSIZE]; int frameCount = 0;
void releaseFrame(int index) { if (frameBuffer[index].data != NULL) { free(frameBuffer[index].data); frameBuffer[index].data = NULL; } }
JNIEXPORT jint JNICALL Javacomexample_ndkcodecMainActivityinitDecoder(JNIEnv *env, jobject thiz, jstring mime_type, jint width, jint height, jobject surface) { const char *mime = env->GetStringUTFChars(mimetype, 0); window = ANativeWindowfromSurface(env, surface); format = AMediaFormatnew(); AMediaFormatsetString(format, AMEDIAFORMATKEYMIME, mime); AMediaFormatsetInt32(format, AMEDIAFORMATKEY_WIDTH, width); AMediaFormatsetInt32(format, AMEDIAFORMATKEYHEIGHT, height); codec = AMediaCodeccreateDecoderByType(mime); mediastatust status = AMediaCodecconfigure(codec, format, window, NULL, 0); env->ReleaseStringUTFChars(mimetype, mime); return (jint) status; }
JNIEXPORT jint JNICALL Javacomexample_ndkcodecMainActivitystartDecoder(JNIEnv *env, jobject thiz) { return (jint) AMediaCodec_start(codec); }
JNIEXPORT jint JNICALL Javacomexample_ndkcodecMainActivitystopDecoder(JNIEnv *env, jobject thiz) { return (jint) AMediaCodec_stop(codec); }
JNIEXPORT jint JNICALL Javacomexample_ndkcodecMainActivityreleaseDecoder(JNIEnv *env, jobject thiz) { int i; for (i = 0; i < MAXBUFFERSIZE; i++) { releaseFrame(i); } return (jint) AMediaCodec_delete(codec); }
JNIEXPORT jint JNICALL Javacomexample_ndkcodecMainActivitydecodeFrame(JNIEnv *env, jobject thiz, jint index) { if (bufferIndex >= 0) { AMediaCodecreleaseOutputBuffer(codec, bufferIndex, false); bufferIndex = -1; } ssizet status = AMediaCodec_dequeueOutputBuffer(codec, &bufferIndex, 0); if (status >= 0 && bufferIndex >= 0) { AMediaCodecBufferInfo info; AMediaCodecgetOutputBufferInfo(codec, bufferIndex, &info); int size = info.size; uint8t *data = (uint8_t *) malloc(size); AMediaCodecgetOutputBufferData(codec, bufferIndex, &data); AMediaCodecreleaseOutputBuffer(codec, bufferIndex, false); bufferIndex = -1; Frame frame = {index, data}; memcpy(&frameBuffer[frameCount % MAXBUFFERSIZE], &frame, sizeof(Frame)); releaseFrame((frameCount - MAXBUFFERSIZE + 1) % MAXBUFFERSIZE); frameCount++; return size; } if (status == AMEDIACODECINFOOUTPUTFORMATCHANGED) { AMediaFormat *outputFormat = NULL; outputFormat = AMediaCodecgetOutputFormat(codec); ANativeWindowsetBuffersGeometry(window, AMediaFormatgetInt32(outputFormat, AMEDIAFORMATKEYWIDTH), AMediaFormatgetInt32(outputFormat, AMEDIAFORMATKEYHEIGHT), WINDOWFORMATRGBA8888); AMediaFormatdelete(outputFormat); } return -1; }
JNIEXPORT jint JNICALL Javacomexample_ndkcodecMainActivityrequestFrame(JNIEnv *env, jobject thiz, jint index, jbyteArray data) { int i; for (i = 0; i < MAXBUFFERSIZE; i++) { Frame frame = frameBuffer[i]; if (frame.index == index) { env->SetByteArrayRegion(data, 0, sizeof(frame.data), (jbyte *) frame.data); releaseFrame(i); return sizeof(frame.data); } } return -1; }
格式转换模块实现:
include <GLES2/gl2.h>
include <GLES2/gl2ext.h>
GLuint programId; GLint positionHandle; GLint textureHandle;
void initShader() { const char *vertexShaderCode = "attribute vec4 position;\n" "attribute vec2 texCoord;\n" "varying vec2 vTexCoord;\n" "void main() {\n" " gl_Position = position;\n" " vTexCoord = texCoord;\n" "}\n"; const char *fragmentShaderCode = "#extension GLOESEGLimageexternal : require\n" "precision mediump float;\n" "uniform samplerExternalOES texture;\n" "varying vec2 vTexCoord;\n" "void main() {\n" " glFragColor = texture2D(texture, vTexCoord);\n" "}\n"; GLuint vertexShaderId = glCreateShader(GLVERTEXSHADER); glShaderSource(vertexShaderId, 1, &vertexShaderCode, NULL); glCompileShader(vertexShaderId); GLuint fragmentShaderId = glCreateShader(GLFRAGMENT_SHADER); glShaderSource(fragmentShaderId, 1, &fragmentShaderCode, NULL); glCompileShader(fragmentShaderId); programId = glCreateProgram(); glAttachShader(programId, vertexShaderId); glAttachShader(programId, fragmentShaderId); glLinkProgram(programId); positionHandle = glGetAttribLocation(programId, "position"); textureHandle = glGetAttribLocation(programId, "texCoord"); }
void convertToRGBA(uint8_t *data, int width, int height) { GLuint textureId; glGenTextures(1, &textureId); glActiveTexture(GLTEXTURE0); glBindTexture(GLTEXTUREEXTERNALOES, textureId); glTexParameteri(GLTEXTUREEXTERNALOES, GLTEXTUREWRAPS, GLCLAMPTOEDGE); glTexParameteri(GLTEXTUREEXTERNALOES, GLTEXTUREWRAPT, GLCLAMPTOEDGE); glTexParameteri(GLTEXTUREEXTERNALOES, GLTEXTURE_MINFILTER, GLLINEAR); glTexParameteri(GLTEXTUREEXTERNALOES, GLTEXTUREMAGFILTER, GLLINEAR); glUseProgram(programId); glEnableVertexAttribArray(positionHandle); glVertexAttribPointer(positionHandle, 3, GLFLOAT, GLFALSE, 0, vertices); glEnableVertexAttribArray(textureHandle); glVertexAttribPointer(textureHandle, 2, GLFLOAT, GLFALSE, 0, texCoords); glTexImage2D(GLTEXTUREEXTERNALOES, 0, GLRGBA, width, height, 0, GLRGBA, GLUNSIGNEDBYTE, data); glDrawArrays(GLTRIANGLESTRIP, 0, 4); glDisableVertexAttribArray(positionHandle); glDisableVertexAttribArray(textureHandle); glDeleteTextures(1, &textureId); }
相关学习资料推荐,点击下方链接免费报名,先码住不迷路~】
音视频免费学习地址:FFmpeg/WebRTC/RTMP/NDK/Android音视频流媒体高级开发
【免费分享】音视频学习资料包、大厂面试题、技术视频和学习路线图,资料包括(C/C++,Linux,FFmpeg webRTC rtmp hls rtsp ffplay srs 等等)有需要的可以点击788280672加群免费领取~


