Android NDK C++開發(fā)解碼器程序,需要實(shí)現(xiàn)如下的功能
1.調(diào)用mediaCodec實(shí)現(xiàn)硬件解碼。 2.使用GPU把解碼后的每一幀數(shù)據(jù)轉(zhuǎn)為RGB格式。 3.以索引為key,每一幀數(shù)據(jù)為value,加入緩存池。 4.設(shè)計(jì)一個(gè)接口,根據(jù)幀索引從緩存池獲取數(shù)據(jù),同時(shí)刪除上一幀數(shù)據(jù)。 要求抽象設(shè)計(jì)如下模塊:解碼模塊、格式轉(zhuǎn)換模塊。 請(qǐng)用C++語言給出具體實(shí)現(xiàn)
解碼模塊實(shí)現(xiàn):
include <jni.h>
include <android/native_window.h>
include <android/nativewindowjni.h>
include <media/NdkMediaCodec.h>
define MAXBUFFERSIZE 100
ANativeWindow *window; AMediaFormat *format; AMediaCodec *codec; int bufferIndex = -1;
typedef struct Frame { int index; uint8_t *data; } Frame;
Frame frameBuffer[MAXBUFFERSIZE]; int frameCount = 0;
void releaseFrame(int index) { if (frameBuffer[index].data != NULL) { free(frameBuffer[index].data); frameBuffer[index].data = NULL; } }
JNIEXPORT jint JNICALL Javacomexample_ndkcodecMainActivityinitDecoder(JNIEnv *env, jobject thiz, jstring mime_type, jint width, jint height, jobject surface) { const char *mime = env->GetStringUTFChars(mimetype, 0); window = ANativeWindowfromSurface(env, surface); format = AMediaFormatnew(); AMediaFormatsetString(format, AMEDIAFORMATKEYMIME, mime); AMediaFormatsetInt32(format, AMEDIAFORMATKEY_WIDTH, width); AMediaFormatsetInt32(format, AMEDIAFORMATKEYHEIGHT, height); codec = AMediaCodeccreateDecoderByType(mime); mediastatust status = AMediaCodecconfigure(codec, format, window, NULL, 0); env->ReleaseStringUTFChars(mimetype, mime); return (jint) status; }
JNIEXPORT jint JNICALL Javacomexample_ndkcodecMainActivitystartDecoder(JNIEnv *env, jobject thiz) { return (jint) AMediaCodec_start(codec); }
JNIEXPORT jint JNICALL Javacomexample_ndkcodecMainActivitystopDecoder(JNIEnv *env, jobject thiz) { return (jint) AMediaCodec_stop(codec); }
JNIEXPORT jint JNICALL Javacomexample_ndkcodecMainActivityreleaseDecoder(JNIEnv *env, jobject thiz) { int i; for (i = 0; i < MAXBUFFERSIZE; i++) { releaseFrame(i); } return (jint) AMediaCodec_delete(codec); }
JNIEXPORT jint JNICALL Javacomexample_ndkcodecMainActivitydecodeFrame(JNIEnv *env, jobject thiz, jint index) { if (bufferIndex >= 0) { AMediaCodecreleaseOutputBuffer(codec, bufferIndex, false); bufferIndex = -1; } ssizet status = AMediaCodec_dequeueOutputBuffer(codec, &bufferIndex, 0); if (status >= 0 && bufferIndex >= 0) { AMediaCodecBufferInfo info; AMediaCodecgetOutputBufferInfo(codec, bufferIndex, &info); int size = info.size; uint8t *data = (uint8_t *) malloc(size); AMediaCodecgetOutputBufferData(codec, bufferIndex, &data); AMediaCodecreleaseOutputBuffer(codec, bufferIndex, false); bufferIndex = -1; Frame frame = {index, data}; memcpy(&frameBuffer[frameCount % MAXBUFFERSIZE], &frame, sizeof(Frame)); releaseFrame((frameCount - MAXBUFFERSIZE + 1) % MAXBUFFERSIZE); frameCount++; return size; } if (status == AMEDIACODECINFOOUTPUTFORMATCHANGED) { AMediaFormat *outputFormat = NULL; outputFormat = AMediaCodecgetOutputFormat(codec); ANativeWindowsetBuffersGeometry(window, AMediaFormatgetInt32(outputFormat, AMEDIAFORMATKEYWIDTH), AMediaFormatgetInt32(outputFormat, AMEDIAFORMATKEYHEIGHT), WINDOWFORMATRGBA8888); AMediaFormatdelete(outputFormat); } return -1; }
JNIEXPORT jint JNICALL Javacomexample_ndkcodecMainActivityrequestFrame(JNIEnv *env, jobject thiz, jint index, jbyteArray data) { int i; for (i = 0; i < MAXBUFFERSIZE; i++) { Frame frame = frameBuffer[i]; if (frame.index == index) { env->SetByteArrayRegion(data, 0, sizeof(frame.data), (jbyte *) frame.data); releaseFrame(i); return sizeof(frame.data); } } return -1; }
格式轉(zhuǎn)換模塊實(shí)現(xiàn):
include <GLES2/gl2.h>
include <GLES2/gl2ext.h>
GLuint programId; GLint positionHandle; GLint textureHandle;
void initShader() { const char *vertexShaderCode = "attribute vec4 position;\n" "attribute vec2 texCoord;\n" "varying vec2 vTexCoord;\n" "void main() {\n" " gl_Position = position;\n" " vTexCoord = texCoord;\n" "}\n"; const char *fragmentShaderCode = "#extension GLOESEGLimageexternal : require\n" "precision mediump float;\n" "uniform samplerExternalOES texture;\n" "varying vec2 vTexCoord;\n" "void main() {\n" " glFragColor = texture2D(texture, vTexCoord);\n" "}\n"; GLuint vertexShaderId = glCreateShader(GLVERTEXSHADER); glShaderSource(vertexShaderId, 1, &vertexShaderCode, NULL); glCompileShader(vertexShaderId); GLuint fragmentShaderId = glCreateShader(GLFRAGMENT_SHADER); glShaderSource(fragmentShaderId, 1, &fragmentShaderCode, NULL); glCompileShader(fragmentShaderId); programId = glCreateProgram(); glAttachShader(programId, vertexShaderId); glAttachShader(programId, fragmentShaderId); glLinkProgram(programId); positionHandle = glGetAttribLocation(programId, "position"); textureHandle = glGetAttribLocation(programId, "texCoord"); }
void convertToRGBA(uint8_t *data, int width, int height) { GLuint textureId; glGenTextures(1, &textureId); glActiveTexture(GLTEXTURE0); glBindTexture(GLTEXTUREEXTERNALOES, textureId); glTexParameteri(GLTEXTUREEXTERNALOES, GLTEXTUREWRAPS, GLCLAMPTOEDGE); glTexParameteri(GLTEXTUREEXTERNALOES, GLTEXTUREWRAPT, GLCLAMPTOEDGE); glTexParameteri(GLTEXTUREEXTERNALOES, GLTEXTURE_MINFILTER, GLLINEAR); glTexParameteri(GLTEXTUREEXTERNALOES, GLTEXTUREMAGFILTER, GLLINEAR); glUseProgram(programId); glEnableVertexAttribArray(positionHandle); glVertexAttribPointer(positionHandle, 3, GLFLOAT, GLFALSE, 0, vertices); glEnableVertexAttribArray(textureHandle); glVertexAttribPointer(textureHandle, 2, GLFLOAT, GLFALSE, 0, texCoords); glTexImage2D(GLTEXTUREEXTERNALOES, 0, GLRGBA, width, height, 0, GLRGBA, GLUNSIGNEDBYTE, data); glDrawArrays(GLTRIANGLESTRIP, 0, 4); glDisableVertexAttribArray(positionHandle); glDisableVertexAttribArray(textureHandle); glDeleteTextures(1, &textureId); }
相關(guān)學(xué)習(xí)資料推薦,點(diǎn)擊下方鏈接免費(fèi)報(bào)名,先碼住不迷路~】
音視頻免費(fèi)學(xué)習(xí)地址:FFmpeg/WebRTC/RTMP/NDK/Android音視頻流媒體高級(jí)開發(fā)
【免費(fèi)分享】音視頻學(xué)習(xí)資料包、大廠面試題、技術(shù)視頻和學(xué)習(xí)路線圖,資料包括(C/C++,Linux,F(xiàn)Fmpeg webRTC rtmp hls rtsp ffplay srs 等等)有需要的可以點(diǎn)擊788280672加群免費(fèi)領(lǐng)取~
