am 8092009a: Merge "NDK media codec sample"

* commit '8092009a49c525f4fefd053cffac23cc665ba3ef':
  NDK media codec sample
This commit is contained in:
Andrew Hsieh
2014-07-17 12:30:37 +00:00
committed by Android Git Automerger
14 changed files with 1362 additions and 0 deletions

View File

@@ -0,0 +1,24 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.nativecodec">
<uses-feature android:glEsVersion="0x00020000" />
<application android:icon="@drawable/icon" android:label="@string/app_name">
<activity android:name=".NativeCodec"
android:label="@string/app_name">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<uses-sdk android:minSdkVersion="19" />
<!-- INTERNET is needed to use a URI-based media player, depending on the URI -->
<uses-permission android:name="android.permission.INTERNET"></uses-permission>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"></uses-permission>
</manifest>

View File

@@ -0,0 +1,2 @@
This sample app requires a video file to be placed in /sdcard/testfile.mp4
For demonstration purposes we have supplied such a file.

View File

@@ -0,0 +1,4 @@
# Indicates whether an apk should be generated for each density.
split.density=false
# Project target.
target=android-L

View File

@@ -0,0 +1,30 @@
# Copyright (C) 2014 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := native-codec-jni
LOCAL_SRC_FILES := native-codec-jni.cpp looper.cpp
# for native multimedia
LOCAL_LDLIBS += -lOpenMAXAL -lmediandk
# for logging
LOCAL_LDLIBS += -llog
# for native windows
LOCAL_LDLIBS += -landroid
LOCAL_CFLAGS += -UNDEBUG
include $(BUILD_SHARED_LIBRARY)

View File

@@ -0,0 +1,2 @@
APP_PLATFORM := android-L
APP_ABI := all

View File

@@ -0,0 +1,152 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "looper.h"
#include <assert.h>
#include <jni.h>
#include <pthread.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <limits.h>
#include <semaphore.h>
// for __android_log_print(ANDROID_LOG_INFO, "YourApp", "formatted message");
#include <android/log.h>
#define TAG "NativeCodec-looper"
#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
struct loopermessage;
typedef struct loopermessage loopermessage;
struct loopermessage {
int what;
void *obj;
loopermessage *next;
bool quit;
};
void* looper::trampoline(void* p) {
((looper*)p)->loop();
return NULL;
}
looper::looper() {
sem_init(&headdataavailable, 0, 0);
sem_init(&headwriteprotect, 0, 1);
pthread_attr_t attr;
pthread_attr_init(&attr);
pthread_create(&worker, &attr, trampoline, this);
running = true;
}
looper::~looper() {
if (running) {
LOGV("Looper deleted while still running. Some messages will not be processed");
quit();
}
}
void looper::post(int what, void *data, bool flush) {
loopermessage *msg = new loopermessage();
msg->what = what;
msg->obj = data;
msg->next = NULL;
msg->quit = false;
addmsg(msg, flush);
}
void looper::addmsg(loopermessage *msg, bool flush) {
sem_wait(&headwriteprotect);
loopermessage *h = head;
if (flush) {
while(h) {
loopermessage *next = h->next;
delete h;
h = next;
}
h = NULL;
}
if (h) {
while (h->next) {
h = h->next;
}
h->next = msg;
} else {
head = msg;
}
LOGV("post msg %d", msg->what);
sem_post(&headwriteprotect);
sem_post(&headdataavailable);
}
void looper::loop() {
while(true) {
// wait for available message
sem_wait(&headdataavailable);
// get next available message
sem_wait(&headwriteprotect);
loopermessage *msg = head;
if (msg == NULL) {
LOGV("no msg");
sem_post(&headwriteprotect);
continue;
}
head = msg->next;
sem_post(&headwriteprotect);
if (msg->quit) {
LOGV("quitting");
delete msg;
return;
}
LOGV("processing msg %d", msg->what);
handle(msg->what, msg->obj);
delete msg;
}
}
void looper::quit() {
LOGV("quit");
loopermessage *msg = new loopermessage();
msg->what = 0;
msg->obj = NULL;
msg->next = NULL;
msg->quit = true;
addmsg(msg, false);
void *retval;
pthread_join(worker, &retval);
sem_destroy(&headdataavailable);
sem_destroy(&headwriteprotect);
running = false;
}
void looper::handle(int what, void* obj) {
LOGV("dropping msg %d %p", what, obj);
}

View File

@@ -0,0 +1,41 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <pthread.h>
#include <semaphore.h>
struct loopermessage;
class looper {
public:
looper();
~looper();
void post(int what, void *data, bool flush = false);
void quit();
virtual void handle(int what, void *data);
private:
void addmsg(loopermessage *msg, bool flush);
static void* trampoline(void* p);
void loop();
loopermessage *head;
pthread_t worker;
sem_t headwriteprotect;
sem_t headdataavailable;
bool running;
};

View File

@@ -0,0 +1,328 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* This is a JNI example where we use native methods to play video
* using the native AMedia* APIs.
* See the corresponding Java source file located at:
*
* src/com/example/nativecodec/NativeMedia.java
*
* In this example we use assert() for "impossible" error conditions,
* and explicit handling and recovery for more likely error conditions.
*/
#include <assert.h>
#include <jni.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <limits.h>
#include "looper.h"
#include "media/NdkMediaCodec.h"
#include "media/NdkMediaExtractor.h"
// for __android_log_print(ANDROID_LOG_INFO, "YourApp", "formatted message");
#include <android/log.h>
#define TAG "NativeCodec"
#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
// for native window JNI
#include <android/native_window_jni.h>
typedef struct {
int fd;
ANativeWindow* window;
AMediaExtractor* ex;
AMediaCodec *codec;
int64_t renderstart;
bool sawInputEOS;
bool sawOutputEOS;
bool isPlaying;
bool renderonce;
} workerdata;
workerdata data = {-1, NULL, NULL, NULL, 0, false, false, false, false};
enum {
kMsgCodecBuffer,
kMsgPause,
kMsgResume,
kMsgPauseAck,
kMsgDecodeDone,
kMsgSeek,
};
class mylooper: public looper {
virtual void handle(int what, void* obj);
};
static mylooper *mlooper = NULL;
int64_t systemnanotime() {
timespec now;
clock_gettime(CLOCK_MONOTONIC, &now);
return now.tv_sec * 1000000000LL + now.tv_nsec;
}
void doCodecWork(workerdata *d) {
ssize_t bufidx = -1;
if (!d->sawInputEOS) {
bufidx = AMediaCodec_dequeueInputBuffer(d->codec, 2000);
LOGV("input buffer %zd", bufidx);
if (bufidx >= 0) {
size_t bufsize;
uint8_t *buf = AMediaCodec_getInputBuffer(d->codec, bufidx, &bufsize);
ssize_t sampleSize = AMediaExtractor_readSampleData(d->ex, buf, bufsize);
if (sampleSize < 0) {
sampleSize = 0;
d->sawInputEOS = true;
LOGV("EOS");
}
int64_t presentationTimeUs = AMediaExtractor_getSampleTime(d->ex);
AMediaCodec_queueInputBuffer(d->codec, bufidx, 0, sampleSize, presentationTimeUs,
d->sawInputEOS ? AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM : 0);
AMediaExtractor_advance(d->ex);
}
}
if (!d->sawOutputEOS) {
AMediaCodecBufferInfo info;
ssize_t status = AMediaCodec_dequeueOutputBuffer(d->codec, &info, 0);
if (status >= 0) {
if (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
LOGV("output EOS");
d->sawOutputEOS = true;
}
int64_t presentationNano = info.presentationTimeUs * 1000;
if (d->renderstart < 0) {
d->renderstart = systemnanotime() - presentationNano;
}
int64_t delay = (d->renderstart + presentationNano) - systemnanotime();
if (delay > 0) {
usleep(delay / 1000);
}
AMediaCodec_releaseOutputBuffer(d->codec, status, info.size != 0);
if (d->renderonce) {
d->renderonce = false;
return;
}
} else if (status == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED) {
LOGV("output buffers changed");
} else if (status == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
AMediaFormat *format = NULL;
format = AMediaCodec_getOutputFormat(d->codec);
LOGV("format changed to: %s", AMediaFormat_toString(format));
AMediaFormat_delete(format);
} else if (status == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
LOGV("no output buffer right now");
} else {
LOGV("unexpected info code: %zd", status);
}
}
if (!d->sawInputEOS || !d->sawOutputEOS) {
mlooper->post(kMsgCodecBuffer, d);
}
}
void mylooper::handle(int what, void* obj) {
switch (what) {
case kMsgCodecBuffer:
doCodecWork((workerdata*)obj);
break;
case kMsgDecodeDone:
{
workerdata *d = (workerdata*)obj;
AMediaCodec_stop(d->codec);
AMediaCodec_delete(d->codec);
AMediaExtractor_delete(d->ex);
d->sawInputEOS = true;
d->sawOutputEOS = true;
}
break;
case kMsgSeek:
{
workerdata *d = (workerdata*)obj;
AMediaExtractor_seekTo(d->ex, 0, AMEDIAEXTRACTOR_SEEK_NEXT_SYNC);
AMediaCodec_flush(d->codec);
d->renderstart = -1;
d->sawInputEOS = false;
d->sawOutputEOS = false;
if (!d->isPlaying) {
d->renderonce = true;
post(kMsgCodecBuffer, d);
}
LOGV("seeked");
}
break;
case kMsgPause:
{
workerdata *d = (workerdata*)obj;
if (d->isPlaying) {
// flush all outstanding codecbuffer messages with a no-op message
d->isPlaying = false;
post(kMsgPauseAck, NULL, true);
}
}
break;
case kMsgResume:
{
workerdata *d = (workerdata*)obj;
if (!d->isPlaying) {
d->renderstart = -1;
d->isPlaying = true;
post(kMsgCodecBuffer, d);
}
}
break;
}
}
extern "C" {
jboolean Java_com_example_nativecodec_NativeCodec_createStreamingMediaPlayer(JNIEnv* env,
jclass clazz, jstring filename)
{
LOGV("@@@ create");
// convert Java string to UTF-8
const char *utf8 = env->GetStringUTFChars(filename, NULL);
LOGV("opening %s", utf8);
int fd = open(utf8, O_RDONLY);
env->ReleaseStringUTFChars(filename, utf8);
if (fd < 0) {
LOGV("failed: %d (%s)", fd, strerror(errno));
return JNI_FALSE;
}
data.fd = fd;
workerdata *d = &data;
AMediaExtractor *ex = AMediaExtractor_new();
media_status_t err = AMediaExtractor_setDataSourceFd(ex, d->fd, 0 , LONG_MAX);
close(d->fd);
if (err != AMEDIA_OK) {
LOGV("setDataSource error: %d", err);
return JNI_FALSE;
}
int numtracks = AMediaExtractor_getTrackCount(ex);
AMediaCodec *codec = NULL;
LOGV("input has %d tracks", numtracks);
for (int i = 0; i < numtracks; i++) {
AMediaFormat *format = AMediaExtractor_getTrackFormat(ex, i);
const char *s = AMediaFormat_toString(format);
LOGV("track %d format: %s", i, s);
const char *mime;
if (!AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime)) {
LOGV("no mime type");
return JNI_FALSE;
} else if (!strncmp(mime, "video/", 6)) {
// Omitting most error handling for clarity.
// Production code should check for errors.
AMediaExtractor_selectTrack(ex, i);
codec = AMediaCodec_createDecoderByType(mime);
AMediaCodec_configure(codec, format, d->window, NULL, 0);
d->ex = ex;
d->codec = codec;
d->renderstart = -1;
d->sawInputEOS = false;
d->sawOutputEOS = false;
d->isPlaying = false;
d->renderonce = true;
AMediaCodec_start(codec);
}
AMediaFormat_delete(format);
}
mlooper = new mylooper();
mlooper->post(kMsgCodecBuffer, d);
return JNI_TRUE;
}
// set the playing state for the streaming media player
void Java_com_example_nativecodec_NativeCodec_setPlayingStreamingMediaPlayer(JNIEnv* env,
jclass clazz, jboolean isPlaying)
{
LOGV("@@@ playpause: %d", isPlaying);
if (mlooper) {
if (isPlaying) {
mlooper->post(kMsgResume, &data);
} else {
mlooper->post(kMsgPause, &data);
}
}
}
// shut down the native media system
void Java_com_example_nativecodec_NativeCodec_shutdown(JNIEnv* env, jclass clazz)
{
LOGV("@@@ shutdown");
if (mlooper) {
mlooper->post(kMsgDecodeDone, &data, true /* flush */);
mlooper->quit();
delete mlooper;
mlooper = NULL;
}
if (data.window) {
ANativeWindow_release(data.window);
data.window = NULL;
}
}
// set the surface
void Java_com_example_nativecodec_NativeCodec_setSurface(JNIEnv *env, jclass clazz, jobject surface)
{
// obtain a native window from a Java surface
if (data.window) {
ANativeWindow_release(data.window);
data.window = NULL;
}
data.window = ANativeWindow_fromSurface(env, surface);
LOGV("@@@ setsurface %p", data.window);
}
// rewind the streaming media player
void Java_com_example_nativecodec_NativeCodec_rewindStreamingMediaPlayer(JNIEnv *env, jclass clazz)
{
LOGV("@@@ rewind");
mlooper->post(kMsgSeek, &data);
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

View File

@@ -0,0 +1,84 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
>
<LinearLayout
android:orientation="horizontal"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_margin="8dip"
>
<TextView
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="@string/source_select"
/>
<Spinner
android:id="@+id/source_spinner"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="@string/source_prompt"
/>
</LinearLayout>
<LinearLayout
android:orientation="horizontal"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_margin="8dip"
>
<Button
android:id="@+id/start_native"
android:text="@string/start_native"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
/>
<Button
android:id="@+id/rewind_native"
android:text="@string/rewind_native"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
/>
</LinearLayout>
<LinearLayout
android:orientation="horizontal"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_margin="8dip"
>
<RadioButton
android:id="@+id/radio1"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="10dip"
/>
<SurfaceView
android:id="@+id/surfaceview1"
android:layout_width="640px"
android:layout_height="480px"
/>
</LinearLayout>
<LinearLayout
android:orientation="horizontal"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_margin="8dip"
>
<RadioButton
android:id="@+id/radio2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:padding="10dip"
/>
<com.example.nativecodec.MyGLSurfaceView
android:id="@+id/glsurfaceview1"
android:layout_width="640px"
android:layout_height="480px"
/>
</LinearLayout>
</LinearLayout>

View File

@@ -0,0 +1,22 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">NativeCodec</string>
<string name="start_java">Start/Pause\nJava MediaPlayer</string>
<string name="start_native">Start/Pause</string>
<string name="rewind_native">Rewind</string>
<string name="source_select">Please select the media source</string>
<string name="source_prompt">Media source</string>
<string-array name="source_array">
<item>/sdcard/testfile.mp4</item>
</string-array>
<string name="sink_select">Please select the video sink</string>
<string name="sink_prompt">Video sink</string>
<string-array name="sink_array">
<item>Surface</item>
<item>Texture</item>
</string-array>
</resources>

View File

@@ -0,0 +1,354 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.nativecodec;
import android.graphics.SurfaceTexture;
import android.util.Log;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.os.SystemClock;
import android.util.AttributeSet;
public class MyGLSurfaceView extends GLSurfaceView {
MyRenderer mRenderer;
public MyGLSurfaceView(Context context) {
this(context, null);
}
public MyGLSurfaceView(Context context, AttributeSet attributeSet) {
super(context, attributeSet);
init();
}
private void init() {
setEGLContextClientVersion(2);
mRenderer = new MyRenderer();
setRenderer(mRenderer);
Log.i("@@@", "setrenderer");
}
@Override
public void onPause() {
mRenderer.onPause();
super.onPause();
}
@Override
public void onResume() {
super.onResume();
mRenderer.onResume();
}
public SurfaceTexture getSurfaceTexture() {
return mRenderer.getSurfaceTexture();
}
}
class MyRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
public MyRenderer() {
mVertices = ByteBuffer.allocateDirect(mVerticesData.length
* FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
mVertices.put(mVerticesData).position(0);
Matrix.setIdentityM(mSTMatrix, 0);
}
public void onPause() {
}
public void onResume() {
mLastTime = SystemClock.elapsedRealtimeNanos();
}
@Override
public void onDrawFrame(GL10 glUnused) {
synchronized(this) {
if (updateSurface) {
mSurface.updateTexImage();
mSurface.getTransformMatrix(mSTMatrix);
updateSurface = false;
}
}
// Ignore the passed-in GL10 interface, and use the GLES20
// class's static methods instead.
GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
mVertices.position(VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
VERTICES_DATA_STRIDE_BYTES, mVertices);
checkGlError("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
mVertices.position(VERTICES_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
VERTICES_DATA_STRIDE_BYTES, mVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
long now = SystemClock.elapsedRealtimeNanos();
mRunTime += (now - mLastTime);
mLastTime = now;
double d = ((double)mRunTime) / 1000000000;
Matrix.setIdentityM(mMMatrix, 0);
Matrix.rotateM(mMMatrix, 0, 30, (float)Math.sin(d), (float)Math.cos(d), 0);
Matrix.multiplyMM(mMVPMatrix, 0, mVMatrix, 0, mMMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
}
@Override
public void onSurfaceChanged(GL10 glUnused, int width, int height) {
// Ignore the passed-in GL10 interface, and use the GLES20
// class's static methods instead.
GLES20.glViewport(0, 0, width, height);
mRatio = (float) width / height;
Matrix.frustumM(mProjMatrix, 0, -mRatio, mRatio, -1, 1, 3, 7);
}
@Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
// Ignore the passed-in GL10 interface, and use the GLES20
// class's static methods instead.
/* Set up alpha blending and an Android background color */
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
GLES20.glClearColor(0.643f, 0.776f, 0.223f, 1.0f);
/* Set up shaders and handles to their variables */
mProgram = createProgram(mVertexShader, mFragmentShader);
if (mProgram == 0) {
return;
}
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (maPositionHandle == -1) {
throw new RuntimeException("Could not get attrib location for aPosition");
}
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (maTextureHandle == -1) {
throw new RuntimeException("Could not get attrib location for aTextureCoord");
}
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
checkGlError("glGetUniformLocation uMVPMatrix");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uMVPMatrix");
}
muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
checkGlError("glGetUniformLocation uSTMatrix");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uSTMatrix");
}
checkGlError("glGetUniformLocation uCRatio");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uCRatio");
}
/*
* Create our texture. This has to be done each time the
* surface is created.
*/
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureID = textures[0];
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
checkGlError("glBindTexture mTextureID");
// Can't do mipmapping with camera source
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
// Clamp to edge is the only option
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
checkGlError("glTexParameteri mTextureID");
/*
* Create the SurfaceTexture that will feed this textureID, and pass it to the camera
*/
mSurface = new SurfaceTexture(mTextureID);
mSurface.setOnFrameAvailableListener(this);
Matrix.setLookAtM(mVMatrix, 0, 0, 0, 4f, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
synchronized(this) {
updateSurface = false;
}
}
@Override
synchronized public void onFrameAvailable(SurfaceTexture surface) {
/* For simplicity, SurfaceTexture calls here when it has new
* data available. Call may come in from some random thread,
* so let's be safe and use synchronize. No OpenGL calls can be done here.
*/
updateSurface = true;
//Log.v(TAG, "onFrameAvailable " + surface.getTimestamp());
}
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}
private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
if (program != 0) {
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;
}
private void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
private static final int FLOAT_SIZE_BYTES = 4;
private static final int VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int VERTICES_DATA_POS_OFFSET = 0;
private static final int VERTICES_DATA_UV_OFFSET = 3;
private final float[] mVerticesData = {
// X, Y, Z, U, V
-1.25f, -1.0f, 0, 0.f, 0.f,
1.25f, -1.0f, 0, 1.f, 0.f,
-1.25f, 1.0f, 0, 0.f, 1.f,
1.25f, 1.0f, 0, 1.f, 1.f,
};
private FloatBuffer mVertices;
private final String mVertexShader =
"uniform mat4 uMVPMatrix;\n" +
"uniform mat4 uSTMatrix;\n" +
"attribute vec4 aPosition;\n" +
"attribute vec4 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main() {\n" +
" gl_Position = uMVPMatrix * aPosition;\n" +
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
"}\n";
private final String mFragmentShader =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";
private float[] mMVPMatrix = new float[16];
private float[] mProjMatrix = new float[16];
private float[] mMMatrix = new float[16];
private float[] mVMatrix = new float[16];
private float[] mSTMatrix = new float[16];
private int mProgram;
private int mTextureID;
private int muMVPMatrixHandle;
private int muSTMatrixHandle;
private int maPositionHandle;
private int maTextureHandle;
private float mRatio = 1.0f;
private SurfaceTexture mSurface;
private boolean updateSurface = false;
private long mLastTime = -1;
private long mRunTime = 0;
private static final String TAG = "MyRenderer";
// Magic key
private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
public SurfaceTexture getSurfaceTexture() {
return mSurface;
}
}

View File

@@ -0,0 +1,319 @@
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.nativecodec;
import android.app.Activity;
import android.graphics.SurfaceTexture;
import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.CompoundButton.OnCheckedChangeListener;
import android.widget.RadioButton;
import android.widget.Spinner;
import java.io.IOException;
public class NativeCodec extends Activity {
static final String TAG = "NativeCodec";
String mSourceString = null;
SurfaceView mSurfaceView1;
SurfaceHolder mSurfaceHolder1;
VideoSink mSelectedVideoSink;
VideoSink mNativeCodecPlayerVideoSink;
SurfaceHolderVideoSink mSurfaceHolder1VideoSink;
GLViewVideoSink mGLView1VideoSink;
boolean mCreated = false;
boolean mIsPlaying = false;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.main);
mGLView1 = (MyGLSurfaceView) findViewById(R.id.glsurfaceview1);
// set up the Surface 1 video sink
mSurfaceView1 = (SurfaceView) findViewById(R.id.surfaceview1);
mSurfaceHolder1 = mSurfaceView1.getHolder();
mSurfaceHolder1.addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.v(TAG, "surfaceChanged format=" + format + ", width=" + width + ", height="
+ height);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.v(TAG, "surfaceCreated");
if (mRadio1.isChecked()) {
setSurface(holder.getSurface());
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.v(TAG, "surfaceDestroyed");
}
});
// initialize content source spinner
Spinner sourceSpinner = (Spinner) findViewById(R.id.source_spinner);
ArrayAdapter<CharSequence> sourceAdapter = ArrayAdapter.createFromResource(
this, R.array.source_array, android.R.layout.simple_spinner_item);
sourceAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
sourceSpinner.setAdapter(sourceAdapter);
sourceSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int pos, long id) {
mSourceString = parent.getItemAtPosition(pos).toString();
Log.v(TAG, "onItemSelected " + mSourceString);
}
@Override
public void onNothingSelected(AdapterView parent) {
Log.v(TAG, "onNothingSelected");
mSourceString = null;
}
});
mRadio1 = (RadioButton) findViewById(R.id.radio1);
mRadio2 = (RadioButton) findViewById(R.id.radio2);
OnCheckedChangeListener checklistener = new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
Log.i("@@@@", "oncheckedchanged");
if (buttonView == mRadio1 && isChecked) {
mRadio2.setChecked(false);
}
if (buttonView == mRadio2 && isChecked) {
mRadio1.setChecked(false);
}
if (isChecked) {
if (mRadio1.isChecked()) {
if (mSurfaceHolder1VideoSink == null) {
mSurfaceHolder1VideoSink = new SurfaceHolderVideoSink(mSurfaceHolder1);
}
mSelectedVideoSink = mSurfaceHolder1VideoSink;
mGLView1.onPause();
Log.i("@@@@", "glview pause");
} else {
mGLView1.onResume();
if (mGLView1VideoSink == null) {
mGLView1VideoSink = new GLViewVideoSink(mGLView1);
}
mSelectedVideoSink = mGLView1VideoSink;
}
switchSurface();
}
}
};
mRadio1.setOnCheckedChangeListener(checklistener);
mRadio2.setOnCheckedChangeListener(checklistener);
mRadio2.toggle();
// the surfaces themselves are easier targets than the radio buttons
mSurfaceView1.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mRadio1.toggle();
}
});
mGLView1.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mRadio2.toggle();
}
});
// initialize button click handlers
// native MediaPlayer start/pause
((Button) findViewById(R.id.start_native)).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (!mCreated) {
if (mNativeCodecPlayerVideoSink == null) {
if (mSelectedVideoSink == null) {
return;
}
mSelectedVideoSink.useAsSinkForNative();
mNativeCodecPlayerVideoSink = mSelectedVideoSink;
}
if (mSourceString != null) {
mCreated = createStreamingMediaPlayer(mSourceString);
}
}
if (mCreated) {
mIsPlaying = !mIsPlaying;
setPlayingStreamingMediaPlayer(mIsPlaying);
}
}
});
// native MediaPlayer rewind
((Button) findViewById(R.id.rewind_native)).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mNativeCodecPlayerVideoSink != null) {
rewindStreamingMediaPlayer();
}
}
});
}
void switchSurface() {
if (mCreated && mNativeCodecPlayerVideoSink != mSelectedVideoSink) {
// shutdown and recreate on other surface
Log.i("@@@", "shutting down player");
shutdown();
mCreated = false;
mSelectedVideoSink.useAsSinkForNative();
mNativeCodecPlayerVideoSink = mSelectedVideoSink;
if (mSourceString != null) {
Log.i("@@@", "recreating player");
mCreated = createStreamingMediaPlayer(mSourceString);
mIsPlaying = false;
}
}
}
/** Called when the activity is about to be paused. */
@Override
protected void onPause()
{
mIsPlaying = false;
setPlayingStreamingMediaPlayer(false);
mGLView1.onPause();
super.onPause();
}
@Override
protected void onResume() {
super.onResume();
if (mRadio2.isChecked()) {
mGLView1.onResume();
}
}
/** Called when the activity is about to be destroyed. */
@Override
protected void onDestroy()
{
shutdown();
mCreated = false;
super.onDestroy();
}
private MyGLSurfaceView mGLView1;
private RadioButton mRadio1;
private RadioButton mRadio2;
/** Native methods, implemented in jni folder */
public static native void createEngine();
public static native boolean createStreamingMediaPlayer(String filename);
public static native void setPlayingStreamingMediaPlayer(boolean isPlaying);
public static native void shutdown();
public static native void setSurface(Surface surface);
public static native void rewindStreamingMediaPlayer();
/** Load jni .so on initialization */
static {
System.loadLibrary("native-codec-jni");
}
// VideoSink abstracts out the difference between Surface and SurfaceTexture
// aka SurfaceHolder and GLSurfaceView
static abstract class VideoSink {
abstract void setFixedSize(int width, int height);
abstract void useAsSinkForNative();
}
static class SurfaceHolderVideoSink extends VideoSink {
private final SurfaceHolder mSurfaceHolder;
SurfaceHolderVideoSink(SurfaceHolder surfaceHolder) {
mSurfaceHolder = surfaceHolder;
}
@Override
void setFixedSize(int width, int height) {
mSurfaceHolder.setFixedSize(width, height);
}
@Override
void useAsSinkForNative() {
Surface s = mSurfaceHolder.getSurface();
Log.i("@@@", "setting surface " + s);
setSurface(s);
}
}
static class GLViewVideoSink extends VideoSink {
private final MyGLSurfaceView mMyGLSurfaceView;
GLViewVideoSink(MyGLSurfaceView myGLSurfaceView) {
mMyGLSurfaceView = myGLSurfaceView;
}
@Override
void setFixedSize(int width, int height) {
}
@Override
void useAsSinkForNative() {
SurfaceTexture st = mMyGLSurfaceView.getSurfaceTexture();
Surface s = new Surface(st);
setSurface(s);
s.release();
}
}
}