diff --git a/ndk/platforms/android-L/samples/native-codec/AndroidManifest.xml b/ndk/platforms/android-L/samples/native-codec/AndroidManifest.xml
new file mode 100644
index 000000000..addf1f0e6
--- /dev/null
+++ b/ndk/platforms/android-L/samples/native-codec/AndroidManifest.xml
@@ -0,0 +1,24 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ndk/platforms/android-L/samples/native-codec/README.txt b/ndk/platforms/android-L/samples/native-codec/README.txt
new file mode 100644
index 000000000..446941a7e
--- /dev/null
+++ b/ndk/platforms/android-L/samples/native-codec/README.txt
@@ -0,0 +1,2 @@
+This sample app requires a video file to be placed in /sdcard/testfile.mp4
+For demonstration purposes we have supplied such a file.
diff --git a/ndk/platforms/android-L/samples/native-codec/default.properties b/ndk/platforms/android-L/samples/native-codec/default.properties
new file mode 100644
index 000000000..92f9bf030
--- /dev/null
+++ b/ndk/platforms/android-L/samples/native-codec/default.properties
@@ -0,0 +1,4 @@
+# Indicates whether an apk should be generated for each density.
+split.density=false
+# Project target.
+target=android-L
diff --git a/ndk/platforms/android-L/samples/native-codec/jni/Android.mk b/ndk/platforms/android-L/samples/native-codec/jni/Android.mk
new file mode 100644
index 000000000..34300c4be
--- /dev/null
+++ b/ndk/platforms/android-L/samples/native-codec/jni/Android.mk
@@ -0,0 +1,30 @@
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := native-codec-jni
+LOCAL_SRC_FILES := native-codec-jni.cpp looper.cpp
+# for native multimedia
+LOCAL_LDLIBS += -lOpenMAXAL -lmediandk
+# for logging
+LOCAL_LDLIBS += -llog
+# for native windows
+LOCAL_LDLIBS += -landroid
+
+LOCAL_CFLAGS += -UNDEBUG
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/ndk/platforms/android-L/samples/native-codec/jni/Application.mk b/ndk/platforms/android-L/samples/native-codec/jni/Application.mk
new file mode 100644
index 000000000..5b3fb7259
--- /dev/null
+++ b/ndk/platforms/android-L/samples/native-codec/jni/Application.mk
@@ -0,0 +1,2 @@
+APP_PLATFORM := android-L
+APP_ABI := all
diff --git a/ndk/platforms/android-L/samples/native-codec/jni/looper.cpp b/ndk/platforms/android-L/samples/native-codec/jni/looper.cpp
new file mode 100644
index 000000000..98112f4d8
--- /dev/null
+++ b/ndk/platforms/android-L/samples/native-codec/jni/looper.cpp
@@ -0,0 +1,152 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "looper.h"
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+// for __android_log_print(ANDROID_LOG_INFO, "YourApp", "formatted message");
+#include
+#define TAG "NativeCodec-looper"
+#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
+
+
+struct loopermessage;
+typedef struct loopermessage loopermessage;
+
+struct loopermessage {
+ int what;
+ void *obj;
+ loopermessage *next;
+ bool quit;
+};
+
+
+
+void* looper::trampoline(void* p) {
+ ((looper*)p)->loop();
+ return NULL;
+}
+
+looper::looper() {
+ sem_init(&headdataavailable, 0, 0);
+ sem_init(&headwriteprotect, 0, 1);
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+
+ pthread_create(&worker, &attr, trampoline, this);
+ running = true;
+}
+
+
+looper::~looper() {
+ if (running) {
+ LOGV("Looper deleted while still running. Some messages will not be processed");
+ quit();
+ }
+}
+
+void looper::post(int what, void *data, bool flush) {
+ loopermessage *msg = new loopermessage();
+ msg->what = what;
+ msg->obj = data;
+ msg->next = NULL;
+ msg->quit = false;
+ addmsg(msg, flush);
+}
+
+void looper::addmsg(loopermessage *msg, bool flush) {
+ sem_wait(&headwriteprotect);
+ loopermessage *h = head;
+
+ if (flush) {
+ while(h) {
+ loopermessage *next = h->next;
+ delete h;
+ h = next;
+ }
+ h = NULL;
+ }
+ if (h) {
+ while (h->next) {
+ h = h->next;
+ }
+ h->next = msg;
+ } else {
+ head = msg;
+ }
+ LOGV("post msg %d", msg->what);
+ sem_post(&headwriteprotect);
+ sem_post(&headdataavailable);
+}
+
+void looper::loop() {
+ while(true) {
+ // wait for available message
+ sem_wait(&headdataavailable);
+
+ // get next available message
+ sem_wait(&headwriteprotect);
+ loopermessage *msg = head;
+ if (msg == NULL) {
+ LOGV("no msg");
+ sem_post(&headwriteprotect);
+ continue;
+ }
+ head = msg->next;
+ sem_post(&headwriteprotect);
+
+ if (msg->quit) {
+ LOGV("quitting");
+ delete msg;
+ return;
+ }
+ LOGV("processing msg %d", msg->what);
+ handle(msg->what, msg->obj);
+ delete msg;
+ }
+}
+
+void looper::quit() {
+ LOGV("quit");
+ loopermessage *msg = new loopermessage();
+ msg->what = 0;
+ msg->obj = NULL;
+ msg->next = NULL;
+ msg->quit = true;
+ addmsg(msg, false);
+ void *retval;
+ pthread_join(worker, &retval);
+ sem_destroy(&headdataavailable);
+ sem_destroy(&headwriteprotect);
+ running = false;
+}
+
+void looper::handle(int what, void* obj) {
+ LOGV("dropping msg %d %p", what, obj);
+}
+
diff --git a/ndk/platforms/android-L/samples/native-codec/jni/looper.h b/ndk/platforms/android-L/samples/native-codec/jni/looper.h
new file mode 100644
index 000000000..531a7cbcf
--- /dev/null
+++ b/ndk/platforms/android-L/samples/native-codec/jni/looper.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include
+#include
+
+struct loopermessage;
+
+class looper {
+ public:
+ looper();
+ ~looper();
+
+ void post(int what, void *data, bool flush = false);
+ void quit();
+
+ virtual void handle(int what, void *data);
+
+ private:
+ void addmsg(loopermessage *msg, bool flush);
+ static void* trampoline(void* p);
+ void loop();
+ loopermessage *head;
+ pthread_t worker;
+ sem_t headwriteprotect;
+ sem_t headdataavailable;
+ bool running;
+};
diff --git a/ndk/platforms/android-L/samples/native-codec/jni/native-codec-jni.cpp b/ndk/platforms/android-L/samples/native-codec/jni/native-codec-jni.cpp
new file mode 100644
index 000000000..6053abe2f
--- /dev/null
+++ b/ndk/platforms/android-L/samples/native-codec/jni/native-codec-jni.cpp
@@ -0,0 +1,328 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* This is a JNI example where we use native methods to play video
+ * using the native AMedia* APIs.
+ * See the corresponding Java source file located at:
+ *
+ * src/com/example/nativecodec/NativeMedia.java
+ *
+ * In this example we use assert() for "impossible" error conditions,
+ * and explicit handling and recovery for more likely error conditions.
+ */
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include "looper.h"
+#include "media/NdkMediaCodec.h"
+#include "media/NdkMediaExtractor.h"
+
+// for __android_log_print(ANDROID_LOG_INFO, "YourApp", "formatted message");
+#include
+#define TAG "NativeCodec"
+#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
+
+// for native window JNI
+#include
+
+typedef struct {
+ int fd;
+ ANativeWindow* window;
+ AMediaExtractor* ex;
+ AMediaCodec *codec;
+ int64_t renderstart;
+ bool sawInputEOS;
+ bool sawOutputEOS;
+ bool isPlaying;
+ bool renderonce;
+} workerdata;
+
+workerdata data = {-1, NULL, NULL, NULL, 0, false, false, false, false};
+
+enum {
+ kMsgCodecBuffer,
+ kMsgPause,
+ kMsgResume,
+ kMsgPauseAck,
+ kMsgDecodeDone,
+ kMsgSeek,
+};
+
+
+
+class mylooper: public looper {
+ virtual void handle(int what, void* obj);
+};
+
+static mylooper *mlooper = NULL;
+
+int64_t systemnanotime() {
+ timespec now;
+ clock_gettime(CLOCK_MONOTONIC, &now);
+ return now.tv_sec * 1000000000LL + now.tv_nsec;
+}
+
+void doCodecWork(workerdata *d) {
+
+ ssize_t bufidx = -1;
+ if (!d->sawInputEOS) {
+ bufidx = AMediaCodec_dequeueInputBuffer(d->codec, 2000);
+ LOGV("input buffer %zd", bufidx);
+ if (bufidx >= 0) {
+ size_t bufsize;
+ uint8_t *buf = AMediaCodec_getInputBuffer(d->codec, bufidx, &bufsize);
+ ssize_t sampleSize = AMediaExtractor_readSampleData(d->ex, buf, bufsize);
+ if (sampleSize < 0) {
+ sampleSize = 0;
+ d->sawInputEOS = true;
+ LOGV("EOS");
+ }
+ int64_t presentationTimeUs = AMediaExtractor_getSampleTime(d->ex);
+
+ AMediaCodec_queueInputBuffer(d->codec, bufidx, 0, sampleSize, presentationTimeUs,
+ d->sawInputEOS ? AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM : 0);
+ AMediaExtractor_advance(d->ex);
+ }
+ }
+
+ if (!d->sawOutputEOS) {
+ AMediaCodecBufferInfo info;
+ ssize_t status = AMediaCodec_dequeueOutputBuffer(d->codec, &info, 0);
+ if (status >= 0) {
+ if (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
+ LOGV("output EOS");
+ d->sawOutputEOS = true;
+ }
+ int64_t presentationNano = info.presentationTimeUs * 1000;
+ if (d->renderstart < 0) {
+ d->renderstart = systemnanotime() - presentationNano;
+ }
+ int64_t delay = (d->renderstart + presentationNano) - systemnanotime();
+ if (delay > 0) {
+ usleep(delay / 1000);
+ }
+ AMediaCodec_releaseOutputBuffer(d->codec, status, info.size != 0);
+ if (d->renderonce) {
+ d->renderonce = false;
+ return;
+ }
+ } else if (status == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED) {
+ LOGV("output buffers changed");
+ } else if (status == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
+ AMediaFormat *format = NULL;
+ format = AMediaCodec_getOutputFormat(d->codec);
+ LOGV("format changed to: %s", AMediaFormat_toString(format));
+ AMediaFormat_delete(format);
+ } else if (status == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
+ LOGV("no output buffer right now");
+ } else {
+ LOGV("unexpected info code: %zd", status);
+ }
+ }
+
+ if (!d->sawInputEOS || !d->sawOutputEOS) {
+ mlooper->post(kMsgCodecBuffer, d);
+ }
+}
+
+void mylooper::handle(int what, void* obj) {
+ switch (what) {
+ case kMsgCodecBuffer:
+ doCodecWork((workerdata*)obj);
+ break;
+
+ case kMsgDecodeDone:
+ {
+ workerdata *d = (workerdata*)obj;
+ AMediaCodec_stop(d->codec);
+ AMediaCodec_delete(d->codec);
+ AMediaExtractor_delete(d->ex);
+ d->sawInputEOS = true;
+ d->sawOutputEOS = true;
+ }
+ break;
+
+ case kMsgSeek:
+ {
+ workerdata *d = (workerdata*)obj;
+ AMediaExtractor_seekTo(d->ex, 0, AMEDIAEXTRACTOR_SEEK_NEXT_SYNC);
+ AMediaCodec_flush(d->codec);
+ d->renderstart = -1;
+ d->sawInputEOS = false;
+ d->sawOutputEOS = false;
+ if (!d->isPlaying) {
+ d->renderonce = true;
+ post(kMsgCodecBuffer, d);
+ }
+ LOGV("seeked");
+ }
+ break;
+
+ case kMsgPause:
+ {
+ workerdata *d = (workerdata*)obj;
+ if (d->isPlaying) {
+ // flush all outstanding codecbuffer messages with a no-op message
+ d->isPlaying = false;
+ post(kMsgPauseAck, NULL, true);
+ }
+ }
+ break;
+
+ case kMsgResume:
+ {
+ workerdata *d = (workerdata*)obj;
+ if (!d->isPlaying) {
+ d->renderstart = -1;
+ d->isPlaying = true;
+ post(kMsgCodecBuffer, d);
+ }
+ }
+ break;
+ }
+}
+
+
+
+
+extern "C" {
+
+jboolean Java_com_example_nativecodec_NativeCodec_createStreamingMediaPlayer(JNIEnv* env,
+ jclass clazz, jstring filename)
+{
+ LOGV("@@@ create");
+
+ // convert Java string to UTF-8
+ const char *utf8 = env->GetStringUTFChars(filename, NULL);
+ LOGV("opening %s", utf8);
+ int fd = open(utf8, O_RDONLY);
+ env->ReleaseStringUTFChars(filename, utf8);
+ if (fd < 0) {
+ LOGV("failed: %d (%s)", fd, strerror(errno));
+ return JNI_FALSE;
+ }
+
+ data.fd = fd;
+
+ workerdata *d = &data;
+
+ AMediaExtractor *ex = AMediaExtractor_new();
+ media_status_t err = AMediaExtractor_setDataSourceFd(ex, d->fd, 0 , LONG_MAX);
+ close(d->fd);
+ if (err != AMEDIA_OK) {
+ LOGV("setDataSource error: %d", err);
+ return JNI_FALSE;
+ }
+
+ int numtracks = AMediaExtractor_getTrackCount(ex);
+
+ AMediaCodec *codec = NULL;
+
+ LOGV("input has %d tracks", numtracks);
+ for (int i = 0; i < numtracks; i++) {
+ AMediaFormat *format = AMediaExtractor_getTrackFormat(ex, i);
+ const char *s = AMediaFormat_toString(format);
+ LOGV("track %d format: %s", i, s);
+ const char *mime;
+ if (!AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime)) {
+ LOGV("no mime type");
+ return JNI_FALSE;
+ } else if (!strncmp(mime, "video/", 6)) {
+ // Omitting most error handling for clarity.
+ // Production code should check for errors.
+ AMediaExtractor_selectTrack(ex, i);
+ codec = AMediaCodec_createDecoderByType(mime);
+ AMediaCodec_configure(codec, format, d->window, NULL, 0);
+ d->ex = ex;
+ d->codec = codec;
+ d->renderstart = -1;
+ d->sawInputEOS = false;
+ d->sawOutputEOS = false;
+ d->isPlaying = false;
+ d->renderonce = true;
+ AMediaCodec_start(codec);
+ }
+ AMediaFormat_delete(format);
+ }
+
+ mlooper = new mylooper();
+ mlooper->post(kMsgCodecBuffer, d);
+
+ return JNI_TRUE;
+}
+
+// set the playing state for the streaming media player
+void Java_com_example_nativecodec_NativeCodec_setPlayingStreamingMediaPlayer(JNIEnv* env,
+ jclass clazz, jboolean isPlaying)
+{
+ LOGV("@@@ playpause: %d", isPlaying);
+ if (mlooper) {
+ if (isPlaying) {
+ mlooper->post(kMsgResume, &data);
+ } else {
+ mlooper->post(kMsgPause, &data);
+ }
+ }
+}
+
+
+// shut down the native media system
+void Java_com_example_nativecodec_NativeCodec_shutdown(JNIEnv* env, jclass clazz)
+{
+ LOGV("@@@ shutdown");
+ if (mlooper) {
+ mlooper->post(kMsgDecodeDone, &data, true /* flush */);
+ mlooper->quit();
+ delete mlooper;
+ mlooper = NULL;
+ }
+ if (data.window) {
+ ANativeWindow_release(data.window);
+ data.window = NULL;
+ }
+}
+
+
+// set the surface
+void Java_com_example_nativecodec_NativeCodec_setSurface(JNIEnv *env, jclass clazz, jobject surface)
+{
+ // obtain a native window from a Java surface
+ if (data.window) {
+ ANativeWindow_release(data.window);
+ data.window = NULL;
+ }
+ data.window = ANativeWindow_fromSurface(env, surface);
+ LOGV("@@@ setsurface %p", data.window);
+}
+
+
+// rewind the streaming media player
+void Java_com_example_nativecodec_NativeCodec_rewindStreamingMediaPlayer(JNIEnv *env, jclass clazz)
+{
+ LOGV("@@@ rewind");
+ mlooper->post(kMsgSeek, &data);
+}
+
+}
diff --git a/ndk/platforms/android-L/samples/native-codec/res/drawable/icon.png b/ndk/platforms/android-L/samples/native-codec/res/drawable/icon.png
new file mode 100644
index 000000000..a07c69fa5
Binary files /dev/null and b/ndk/platforms/android-L/samples/native-codec/res/drawable/icon.png differ
diff --git a/ndk/platforms/android-L/samples/native-codec/res/layout/main.xml b/ndk/platforms/android-L/samples/native-codec/res/layout/main.xml
new file mode 100644
index 000000000..4e94a7b92
--- /dev/null
+++ b/ndk/platforms/android-L/samples/native-codec/res/layout/main.xml
@@ -0,0 +1,84 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/ndk/platforms/android-L/samples/native-codec/res/values/strings.xml b/ndk/platforms/android-L/samples/native-codec/res/values/strings.xml
new file mode 100644
index 000000000..03169fc73
--- /dev/null
+++ b/ndk/platforms/android-L/samples/native-codec/res/values/strings.xml
@@ -0,0 +1,22 @@
+
+
+ NativeCodec
+ Start/Pause\nJava MediaPlayer
+ Start/Pause
+
+ Rewind
+
+ Please select the media source
+ Media source
+
+ - /sdcard/testfile.mp4
+
+
+ Please select the video sink
+ Video sink
+
+ - Surface
+ - Texture
+
+
+
diff --git a/ndk/platforms/android-L/samples/native-codec/src/com/example/nativecodec/MyGLSurfaceView.java b/ndk/platforms/android-L/samples/native-codec/src/com/example/nativecodec/MyGLSurfaceView.java
new file mode 100644
index 000000000..8897feb3c
--- /dev/null
+++ b/ndk/platforms/android-L/samples/native-codec/src/com/example/nativecodec/MyGLSurfaceView.java
@@ -0,0 +1,354 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.example.nativecodec;
+
+import android.graphics.SurfaceTexture;
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+import android.content.Context;
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+import android.opengl.Matrix;
+import android.os.SystemClock;
+import android.util.AttributeSet;
+
+public class MyGLSurfaceView extends GLSurfaceView {
+
+ MyRenderer mRenderer;
+
+ public MyGLSurfaceView(Context context) {
+ this(context, null);
+ }
+
+ public MyGLSurfaceView(Context context, AttributeSet attributeSet) {
+ super(context, attributeSet);
+ init();
+ }
+
+ private void init() {
+ setEGLContextClientVersion(2);
+ mRenderer = new MyRenderer();
+ setRenderer(mRenderer);
+ Log.i("@@@", "setrenderer");
+ }
+
+ @Override
+ public void onPause() {
+ mRenderer.onPause();
+ super.onPause();
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ mRenderer.onResume();
+ }
+
+ public SurfaceTexture getSurfaceTexture() {
+ return mRenderer.getSurfaceTexture();
+ }
+}
+
+class MyRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
+
+ public MyRenderer() {
+ mVertices = ByteBuffer.allocateDirect(mVerticesData.length
+ * FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
+ mVertices.put(mVerticesData).position(0);
+
+ Matrix.setIdentityM(mSTMatrix, 0);
+ }
+ public void onPause() {
+ }
+
+ public void onResume() {
+ mLastTime = SystemClock.elapsedRealtimeNanos();
+ }
+
+ @Override
+ public void onDrawFrame(GL10 glUnused) {
+ synchronized(this) {
+ if (updateSurface) {
+ mSurface.updateTexImage();
+
+ mSurface.getTransformMatrix(mSTMatrix);
+ updateSurface = false;
+ }
+ }
+
+ // Ignore the passed-in GL10 interface, and use the GLES20
+ // class's static methods instead.
+ GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
+ GLES20.glUseProgram(mProgram);
+ checkGlError("glUseProgram");
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
+
+ mVertices.position(VERTICES_DATA_POS_OFFSET);
+ GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
+ VERTICES_DATA_STRIDE_BYTES, mVertices);
+ checkGlError("glVertexAttribPointer maPosition");
+ GLES20.glEnableVertexAttribArray(maPositionHandle);
+ checkGlError("glEnableVertexAttribArray maPositionHandle");
+
+ mVertices.position(VERTICES_DATA_UV_OFFSET);
+ GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
+ VERTICES_DATA_STRIDE_BYTES, mVertices);
+ checkGlError("glVertexAttribPointer maTextureHandle");
+ GLES20.glEnableVertexAttribArray(maTextureHandle);
+ checkGlError("glEnableVertexAttribArray maTextureHandle");
+
+ long now = SystemClock.elapsedRealtimeNanos();
+ mRunTime += (now - mLastTime);
+ mLastTime = now;
+ double d = ((double)mRunTime) / 1000000000;
+ Matrix.setIdentityM(mMMatrix, 0);
+ Matrix.rotateM(mMMatrix, 0, 30, (float)Math.sin(d), (float)Math.cos(d), 0);
+ Matrix.multiplyMM(mMVPMatrix, 0, mVMatrix, 0, mMMatrix, 0);
+ Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mMVPMatrix, 0);
+
+ GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
+ GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
+
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ checkGlError("glDrawArrays");
+ }
+
+ @Override
+ public void onSurfaceChanged(GL10 glUnused, int width, int height) {
+ // Ignore the passed-in GL10 interface, and use the GLES20
+ // class's static methods instead.
+ GLES20.glViewport(0, 0, width, height);
+ mRatio = (float) width / height;
+ Matrix.frustumM(mProjMatrix, 0, -mRatio, mRatio, -1, 1, 3, 7);
+ }
+
+ @Override
+ public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
+ // Ignore the passed-in GL10 interface, and use the GLES20
+ // class's static methods instead.
+
+ /* Set up alpha blending and an Android background color */
+ GLES20.glEnable(GLES20.GL_BLEND);
+ GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
+ GLES20.glClearColor(0.643f, 0.776f, 0.223f, 1.0f);
+
+ /* Set up shaders and handles to their variables */
+ mProgram = createProgram(mVertexShader, mFragmentShader);
+ if (mProgram == 0) {
+ return;
+ }
+ maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
+ checkGlError("glGetAttribLocation aPosition");
+ if (maPositionHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for aPosition");
+ }
+ maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
+ checkGlError("glGetAttribLocation aTextureCoord");
+ if (maTextureHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for aTextureCoord");
+ }
+
+ muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
+ checkGlError("glGetUniformLocation uMVPMatrix");
+ if (muMVPMatrixHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for uMVPMatrix");
+ }
+
+ muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
+ checkGlError("glGetUniformLocation uSTMatrix");
+ if (muMVPMatrixHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for uSTMatrix");
+ }
+
+ checkGlError("glGetUniformLocation uCRatio");
+ if (muMVPMatrixHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for uCRatio");
+ }
+
+ /*
+ * Create our texture. This has to be done each time the
+ * surface is created.
+ */
+
+ int[] textures = new int[1];
+ GLES20.glGenTextures(1, textures, 0);
+
+ mTextureID = textures[0];
+ GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
+ checkGlError("glBindTexture mTextureID");
+
+ // Can't do mipmapping with camera source
+ GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
+ GLES20.GL_NEAREST);
+ GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
+ GLES20.GL_LINEAR);
+ // Clamp to edge is the only option
+ GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
+ GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
+ GLES20.GL_CLAMP_TO_EDGE);
+ checkGlError("glTexParameteri mTextureID");
+
+ /*
+ * Create the SurfaceTexture that will feed this textureID, and pass it to the camera
+ */
+
+ mSurface = new SurfaceTexture(mTextureID);
+ mSurface.setOnFrameAvailableListener(this);
+
+ Matrix.setLookAtM(mVMatrix, 0, 0, 0, 4f, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
+
+ synchronized(this) {
+ updateSurface = false;
+ }
+ }
+
+ @Override
+ synchronized public void onFrameAvailable(SurfaceTexture surface) {
+ /* For simplicity, SurfaceTexture calls here when it has new
+ * data available. Call may come in from some random thread,
+ * so let's be safe and use synchronize. No OpenGL calls can be done here.
+ */
+ updateSurface = true;
+ //Log.v(TAG, "onFrameAvailable " + surface.getTimestamp());
+ }
+
+ private int loadShader(int shaderType, String source) {
+ int shader = GLES20.glCreateShader(shaderType);
+ if (shader != 0) {
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ int[] compiled = new int[1];
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+ if (compiled[0] == 0) {
+ Log.e(TAG, "Could not compile shader " + shaderType + ":");
+ Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
+ GLES20.glDeleteShader(shader);
+ shader = 0;
+ }
+ }
+ return shader;
+ }
+
+ private int createProgram(String vertexSource, String fragmentSource) {
+ int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ if (vertexShader == 0) {
+ return 0;
+ }
+ int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ if (pixelShader == 0) {
+ return 0;
+ }
+
+ int program = GLES20.glCreateProgram();
+ if (program != 0) {
+ GLES20.glAttachShader(program, vertexShader);
+ checkGlError("glAttachShader");
+ GLES20.glAttachShader(program, pixelShader);
+ checkGlError("glAttachShader");
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[1];
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ Log.e(TAG, "Could not link program: ");
+ Log.e(TAG, GLES20.glGetProgramInfoLog(program));
+ GLES20.glDeleteProgram(program);
+ program = 0;
+ }
+ }
+ return program;
+ }
+
+ private void checkGlError(String op) {
+ int error;
+ while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+ Log.e(TAG, op + ": glError " + error);
+ throw new RuntimeException(op + ": glError " + error);
+ }
+ }
+
+ private static final int FLOAT_SIZE_BYTES = 4;
+ private static final int VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
+ private static final int VERTICES_DATA_POS_OFFSET = 0;
+ private static final int VERTICES_DATA_UV_OFFSET = 3;
+ private final float[] mVerticesData = {
+ // X, Y, Z, U, V
+ -1.25f, -1.0f, 0, 0.f, 0.f,
+ 1.25f, -1.0f, 0, 1.f, 0.f,
+ -1.25f, 1.0f, 0, 0.f, 1.f,
+ 1.25f, 1.0f, 0, 1.f, 1.f,
+ };
+
+ private FloatBuffer mVertices;
+
+ private final String mVertexShader =
+ "uniform mat4 uMVPMatrix;\n" +
+ "uniform mat4 uSTMatrix;\n" +
+ "attribute vec4 aPosition;\n" +
+ "attribute vec4 aTextureCoord;\n" +
+ "varying vec2 vTextureCoord;\n" +
+ "void main() {\n" +
+ " gl_Position = uMVPMatrix * aPosition;\n" +
+ " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
+ "}\n";
+
+ private final String mFragmentShader =
+ "#extension GL_OES_EGL_image_external : require\n" +
+ "precision mediump float;\n" +
+ "varying vec2 vTextureCoord;\n" +
+ "uniform samplerExternalOES sTexture;\n" +
+ "void main() {\n" +
+ " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
+ "}\n";
+
+ private float[] mMVPMatrix = new float[16];
+ private float[] mProjMatrix = new float[16];
+ private float[] mMMatrix = new float[16];
+ private float[] mVMatrix = new float[16];
+ private float[] mSTMatrix = new float[16];
+
+ private int mProgram;
+ private int mTextureID;
+ private int muMVPMatrixHandle;
+ private int muSTMatrixHandle;
+ private int maPositionHandle;
+ private int maTextureHandle;
+
+ private float mRatio = 1.0f;
+ private SurfaceTexture mSurface;
+ private boolean updateSurface = false;
+ private long mLastTime = -1;
+ private long mRunTime = 0;
+
+ private static final String TAG = "MyRenderer";
+
+ // Magic key
+ private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
+
+ public SurfaceTexture getSurfaceTexture() {
+ return mSurface;
+ }
+}
diff --git a/ndk/platforms/android-L/samples/native-codec/src/com/example/nativecodec/NativeCodec.java b/ndk/platforms/android-L/samples/native-codec/src/com/example/nativecodec/NativeCodec.java
new file mode 100644
index 000000000..babd55fe0
--- /dev/null
+++ b/ndk/platforms/android-L/samples/native-codec/src/com/example/nativecodec/NativeCodec.java
@@ -0,0 +1,319 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.example.nativecodec;
+
+import android.app.Activity;
+import android.graphics.SurfaceTexture;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.View;
+import android.widget.AdapterView;
+import android.widget.ArrayAdapter;
+import android.widget.Button;
+import android.widget.CompoundButton;
+import android.widget.CompoundButton.OnCheckedChangeListener;
+import android.widget.RadioButton;
+import android.widget.Spinner;
+
+import java.io.IOException;
+
+public class NativeCodec extends Activity {
+ static final String TAG = "NativeCodec";
+
+ String mSourceString = null;
+
+ SurfaceView mSurfaceView1;
+ SurfaceHolder mSurfaceHolder1;
+
+ VideoSink mSelectedVideoSink;
+ VideoSink mNativeCodecPlayerVideoSink;
+
+ SurfaceHolderVideoSink mSurfaceHolder1VideoSink;
+ GLViewVideoSink mGLView1VideoSink;
+
+ boolean mCreated = false;
+ boolean mIsPlaying = false;
+
+ /** Called when the activity is first created. */
+ @Override
+ public void onCreate(Bundle icicle) {
+ super.onCreate(icicle);
+ setContentView(R.layout.main);
+
+ mGLView1 = (MyGLSurfaceView) findViewById(R.id.glsurfaceview1);
+
+ // set up the Surface 1 video sink
+ mSurfaceView1 = (SurfaceView) findViewById(R.id.surfaceview1);
+ mSurfaceHolder1 = mSurfaceView1.getHolder();
+
+ mSurfaceHolder1.addCallback(new SurfaceHolder.Callback() {
+
+ @Override
+ public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+ Log.v(TAG, "surfaceChanged format=" + format + ", width=" + width + ", height="
+ + height);
+ }
+
+ @Override
+ public void surfaceCreated(SurfaceHolder holder) {
+ Log.v(TAG, "surfaceCreated");
+ if (mRadio1.isChecked()) {
+ setSurface(holder.getSurface());
+ }
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder holder) {
+ Log.v(TAG, "surfaceDestroyed");
+ }
+
+ });
+
+ // initialize content source spinner
+ Spinner sourceSpinner = (Spinner) findViewById(R.id.source_spinner);
+ ArrayAdapter sourceAdapter = ArrayAdapter.createFromResource(
+ this, R.array.source_array, android.R.layout.simple_spinner_item);
+ sourceAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+ sourceSpinner.setAdapter(sourceAdapter);
+ sourceSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
+
+ @Override
+ public void onItemSelected(AdapterView> parent, View view, int pos, long id) {
+ mSourceString = parent.getItemAtPosition(pos).toString();
+ Log.v(TAG, "onItemSelected " + mSourceString);
+ }
+
+ @Override
+ public void onNothingSelected(AdapterView parent) {
+ Log.v(TAG, "onNothingSelected");
+ mSourceString = null;
+ }
+
+ });
+
+ mRadio1 = (RadioButton) findViewById(R.id.radio1);
+ mRadio2 = (RadioButton) findViewById(R.id.radio2);
+
+ OnCheckedChangeListener checklistener = new CompoundButton.OnCheckedChangeListener() {
+
+ @Override
+ public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
+ Log.i("@@@@", "oncheckedchanged");
+ if (buttonView == mRadio1 && isChecked) {
+ mRadio2.setChecked(false);
+ }
+ if (buttonView == mRadio2 && isChecked) {
+ mRadio1.setChecked(false);
+ }
+ if (isChecked) {
+ if (mRadio1.isChecked()) {
+ if (mSurfaceHolder1VideoSink == null) {
+ mSurfaceHolder1VideoSink = new SurfaceHolderVideoSink(mSurfaceHolder1);
+ }
+ mSelectedVideoSink = mSurfaceHolder1VideoSink;
+ mGLView1.onPause();
+ Log.i("@@@@", "glview pause");
+ } else {
+ mGLView1.onResume();
+ if (mGLView1VideoSink == null) {
+ mGLView1VideoSink = new GLViewVideoSink(mGLView1);
+ }
+ mSelectedVideoSink = mGLView1VideoSink;
+ }
+ switchSurface();
+ }
+ }
+ };
+ mRadio1.setOnCheckedChangeListener(checklistener);
+ mRadio2.setOnCheckedChangeListener(checklistener);
+ mRadio2.toggle();
+
+ // the surfaces themselves are easier targets than the radio buttons
+ mSurfaceView1.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View v) {
+ mRadio1.toggle();
+ }
+ });
+ mGLView1.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View v) {
+ mRadio2.toggle();
+ }
+ });
+
+ // initialize button click handlers
+
+ // native MediaPlayer start/pause
+ ((Button) findViewById(R.id.start_native)).setOnClickListener(new View.OnClickListener() {
+
+ @Override
+ public void onClick(View view) {
+ if (!mCreated) {
+ if (mNativeCodecPlayerVideoSink == null) {
+ if (mSelectedVideoSink == null) {
+ return;
+ }
+ mSelectedVideoSink.useAsSinkForNative();
+ mNativeCodecPlayerVideoSink = mSelectedVideoSink;
+ }
+ if (mSourceString != null) {
+ mCreated = createStreamingMediaPlayer(mSourceString);
+ }
+ }
+ if (mCreated) {
+ mIsPlaying = !mIsPlaying;
+ setPlayingStreamingMediaPlayer(mIsPlaying);
+ }
+ }
+
+ });
+
+
+ // native MediaPlayer rewind
+ ((Button) findViewById(R.id.rewind_native)).setOnClickListener(new View.OnClickListener() {
+
+ @Override
+ public void onClick(View view) {
+ if (mNativeCodecPlayerVideoSink != null) {
+ rewindStreamingMediaPlayer();
+ }
+ }
+
+ });
+
+ }
+
+ void switchSurface() {
+ if (mCreated && mNativeCodecPlayerVideoSink != mSelectedVideoSink) {
+ // shutdown and recreate on other surface
+ Log.i("@@@", "shutting down player");
+ shutdown();
+ mCreated = false;
+ mSelectedVideoSink.useAsSinkForNative();
+ mNativeCodecPlayerVideoSink = mSelectedVideoSink;
+ if (mSourceString != null) {
+ Log.i("@@@", "recreating player");
+ mCreated = createStreamingMediaPlayer(mSourceString);
+ mIsPlaying = false;
+ }
+ }
+ }
+
+ /** Called when the activity is about to be paused. */
+ @Override
+ protected void onPause()
+ {
+ mIsPlaying = false;
+ setPlayingStreamingMediaPlayer(false);
+ mGLView1.onPause();
+ super.onPause();
+ }
+
+ @Override
+ protected void onResume() {
+ super.onResume();
+ if (mRadio2.isChecked()) {
+ mGLView1.onResume();
+ }
+ }
+
+ /** Called when the activity is about to be destroyed. */
+ @Override
+ protected void onDestroy()
+ {
+ shutdown();
+ mCreated = false;
+ super.onDestroy();
+ }
+
+ private MyGLSurfaceView mGLView1;
+
+ private RadioButton mRadio1;
+
+ private RadioButton mRadio2;
+
+ /** Native methods, implemented in jni folder */
+ public static native void createEngine();
+ public static native boolean createStreamingMediaPlayer(String filename);
+ public static native void setPlayingStreamingMediaPlayer(boolean isPlaying);
+ public static native void shutdown();
+ public static native void setSurface(Surface surface);
+ public static native void rewindStreamingMediaPlayer();
+
+ /** Load jni .so on initialization */
+ static {
+ System.loadLibrary("native-codec-jni");
+ }
+
+ // VideoSink abstracts out the difference between Surface and SurfaceTexture
+ // aka SurfaceHolder and GLSurfaceView
+ static abstract class VideoSink {
+
+ abstract void setFixedSize(int width, int height);
+ abstract void useAsSinkForNative();
+
+ }
+
+ static class SurfaceHolderVideoSink extends VideoSink {
+
+ private final SurfaceHolder mSurfaceHolder;
+
+ SurfaceHolderVideoSink(SurfaceHolder surfaceHolder) {
+ mSurfaceHolder = surfaceHolder;
+ }
+
+ @Override
+ void setFixedSize(int width, int height) {
+ mSurfaceHolder.setFixedSize(width, height);
+ }
+
+ @Override
+ void useAsSinkForNative() {
+ Surface s = mSurfaceHolder.getSurface();
+ Log.i("@@@", "setting surface " + s);
+ setSurface(s);
+ }
+
+ }
+
+ static class GLViewVideoSink extends VideoSink {
+
+ private final MyGLSurfaceView mMyGLSurfaceView;
+
+ GLViewVideoSink(MyGLSurfaceView myGLSurfaceView) {
+ mMyGLSurfaceView = myGLSurfaceView;
+ }
+
+ @Override
+ void setFixedSize(int width, int height) {
+ }
+
+ @Override
+ void useAsSinkForNative() {
+ SurfaceTexture st = mMyGLSurfaceView.getSurfaceTexture();
+ Surface s = new Surface(st);
+ setSurface(s);
+ s.release();
+ }
+
+ }
+
+}
diff --git a/ndk/platforms/android-L/samples/native-codec/testfile.mp4 b/ndk/platforms/android-L/samples/native-codec/testfile.mp4
new file mode 100644
index 000000000..571ff4459
Binary files /dev/null and b/ndk/platforms/android-L/samples/native-codec/testfile.mp4 differ