am 48aaee2c: Merge "Native media sample app for NDK API level 14"

* commit '48aaee2cd0ca157b4c789bf017cd2ff6e9404664':
  Native media sample app for NDK API level 14
This commit is contained in:
Andrew Hsieh
2012-04-02 07:55:44 -07:00
committed by Android Git Automerger
12 changed files with 1508 additions and 2 deletions

View File

@@ -0,0 +1,23 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.nativemedia">
<uses-feature android:glEsVersion="0x00020000" />
<application android:icon="@drawable/icon" android:label="@string/app_name">
<activity android:name=".NativeMedia"
android:label="@string/app_name">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<uses-sdk android:minSdkVersion="14" />
<!-- INTERNET is needed to use a URI-based media player, depending on the URI -->
<uses-permission android:name="android.permission.INTERNET"></uses-permission>
</manifest>

View File

@@ -1,2 +1,8 @@
The documentation for Android native media based on OpenMAX AL 1.0.1 This sample app requires an MPEG-2 Transport Stream file to be
references this directory, but the example is not yet available. placed in /sdcard/NativeMedia.ts and encoded as:
video: H.264 baseline profile
audio: AAC LC stereo
For demonstration purposes we have supplied such a .ts file.
Any actual stream must be created according to the MPEG-2 specification.

View File

@@ -0,0 +1,4 @@
# Indicates whether an apk should be generated for each density.
split.density=false
# Project target.
target=android-14

View File

@@ -0,0 +1,30 @@
# Copyright (C) 2011 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := native-media-jni
LOCAL_SRC_FILES := native-media-jni.c
# for native multimedia
LOCAL_LDLIBS += -lOpenMAXAL
# for logging
LOCAL_LDLIBS += -llog
# for native windows
LOCAL_LDLIBS += -landroid
LOCAL_CFLAGS += -UNDEBUG
include $(BUILD_SHARED_LIBRARY)

View File

@@ -0,0 +1,526 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* This is a JNI example where we use native methods to play video
* using OpenMAX AL. See the corresponding Java source file located at:
*
* src/com/example/nativemedia/NativeMedia/NativeMedia.java
*
* In this example we use assert() for "impossible" error conditions,
* and explicit handling and recovery for more likely error conditions.
*/
#include <assert.h>
#include <jni.h>
#include <pthread.h>
#include <stdio.h>
#include <string.h>
// for __android_log_print(ANDROID_LOG_INFO, "YourApp", "formatted message");
#include <android/log.h>
#define TAG "NativeMedia"
#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
// for native media
#include <OMXAL/OpenMAXAL.h>
#include <OMXAL/OpenMAXAL_Android.h>
// for native window JNI
#include <android/native_window_jni.h>
// engine interfaces
static XAObjectItf engineObject = NULL;
static XAEngineItf engineEngine = NULL;
// output mix interfaces
static XAObjectItf outputMixObject = NULL;
// streaming media player interfaces
static XAObjectItf playerObj = NULL;
static XAPlayItf playerPlayItf = NULL;
static XAAndroidBufferQueueItf playerBQItf = NULL;
static XAStreamInformationItf playerStreamInfoItf = NULL;
static XAVolumeItf playerVolItf = NULL;
// number of required interfaces for the MediaPlayer creation
#define NB_MAXAL_INTERFACES 3 // XAAndroidBufferQueueItf, XAStreamInformationItf and XAPlayItf
// video sink for the player
static ANativeWindow* theNativeWindow;
// number of buffers in our buffer queue, an arbitrary number
#define NB_BUFFERS 8
// we're streaming MPEG-2 transport stream data, operate on transport stream block size
#define MPEG2_TS_PACKET_SIZE 188
// number of MPEG-2 transport stream blocks per buffer, an arbitrary number
#define PACKETS_PER_BUFFER 10
// determines how much memory we're dedicating to memory caching
#define BUFFER_SIZE (PACKETS_PER_BUFFER*MPEG2_TS_PACKET_SIZE)
// where we cache in memory the data to play
// note this memory is re-used by the buffer queue callback
static char dataCache[BUFFER_SIZE * NB_BUFFERS];
// handle of the file to play
static FILE *file;
// has the app reached the end of the file
static jboolean reachedEof = JNI_FALSE;
// constant to identify a buffer context which is the end of the stream to decode
static const int kEosBufferCntxt = 1980; // a magic value we can compare against
// For mutual exclusion between callback thread and application thread(s).
// The mutex protects reachedEof, discontinuity,
// The condition is signalled when a discontinuity is acknowledged.
static pthread_mutex_t mutex = PTHREAD_MUTEX_INITIALIZER;
static pthread_cond_t cond = PTHREAD_COND_INITIALIZER;
// whether a discontinuity is in progress
static jboolean discontinuity = JNI_FALSE;
static jboolean enqueueInitialBuffers(jboolean discontinuity);
// AndroidBufferQueueItf callback to supply MPEG-2 TS packets to the media player
static XAresult AndroidBufferQueueCallback(
XAAndroidBufferQueueItf caller,
void *pCallbackContext, /* input */
void *pBufferContext, /* input */
void *pBufferData, /* input */
XAuint32 dataSize, /* input */
XAuint32 dataUsed, /* input */
const XAAndroidBufferItem *pItems,/* input */
XAuint32 itemsLength /* input */)
{
XAresult res;
int ok;
// pCallbackContext was specified as NULL at RegisterCallback and is unused here
assert(NULL == pCallbackContext);
// note there is never any contention on this mutex unless a discontinuity request is active
ok = pthread_mutex_lock(&mutex);
assert(0 == ok);
// was a discontinuity requested?
if (discontinuity) {
// Note: can't rewind after EOS, which we send when reaching EOF
// (don't send EOS if you plan to play more content through the same player)
if (!reachedEof) {
// clear the buffer queue
res = (*playerBQItf)->Clear(playerBQItf);
assert(XA_RESULT_SUCCESS == res);
// rewind the data source so we are guaranteed to be at an appropriate point
rewind(file);
// Enqueue the initial buffers, with a discontinuity indicator on first buffer
(void) enqueueInitialBuffers(JNI_TRUE);
}
// acknowledge the discontinuity request
discontinuity = JNI_FALSE;
ok = pthread_cond_signal(&cond);
assert(0 == ok);
goto exit;
}
if ((pBufferData == NULL) && (pBufferContext != NULL)) {
const int processedCommand = *(int *)pBufferContext;
if (kEosBufferCntxt == processedCommand) {
LOGV("EOS was processed\n");
// our buffer with the EOS message has been consumed
assert(0 == dataSize);
goto exit;
}
}
// pBufferData is a pointer to a buffer that we previously Enqueued
assert((dataSize > 0) && ((dataSize % MPEG2_TS_PACKET_SIZE) == 0));
assert(dataCache <= (char *) pBufferData && (char *) pBufferData <
&dataCache[BUFFER_SIZE * NB_BUFFERS]);
assert(0 == (((char *) pBufferData - dataCache) % BUFFER_SIZE));
// don't bother trying to read more data once we've hit EOF
if (reachedEof) {
goto exit;
}
size_t nbRead;
// note we do call fread from multiple threads, but never concurrently
size_t bytesRead;
bytesRead = fread(pBufferData, 1, BUFFER_SIZE, file);
if (bytesRead > 0) {
if ((bytesRead % MPEG2_TS_PACKET_SIZE) != 0) {
LOGV("Dropping last packet because it is not whole");
}
size_t packetsRead = bytesRead / MPEG2_TS_PACKET_SIZE;
size_t bufferSize = packetsRead * MPEG2_TS_PACKET_SIZE;
res = (*caller)->Enqueue(caller, NULL /*pBufferContext*/,
pBufferData /*pData*/,
bufferSize /*dataLength*/,
NULL /*pMsg*/,
0 /*msgLength*/);
assert(XA_RESULT_SUCCESS == res);
} else {
// EOF or I/O error, signal EOS
XAAndroidBufferItem msgEos[1];
msgEos[0].itemKey = XA_ANDROID_ITEMKEY_EOS;
msgEos[0].itemSize = 0;
// EOS message has no parameters, so the total size of the message is the size of the key
// plus the size if itemSize, both XAuint32
res = (*caller)->Enqueue(caller, (void *)&kEosBufferCntxt /*pBufferContext*/,
NULL /*pData*/, 0 /*dataLength*/,
msgEos /*pMsg*/,
sizeof(XAuint32)*2 /*msgLength*/);
assert(XA_RESULT_SUCCESS == res);
reachedEof = JNI_TRUE;
}
exit:
ok = pthread_mutex_unlock(&mutex);
assert(0 == ok);
return XA_RESULT_SUCCESS;
}
// callback invoked whenever there is new or changed stream information
static void StreamChangeCallback(XAStreamInformationItf caller,
XAuint32 eventId,
XAuint32 streamIndex,
void * pEventData,
void * pContext )
{
LOGV("StreamChangeCallback called for stream %u", streamIndex);
// pContext was specified as NULL at RegisterStreamChangeCallback and is unused here
assert(NULL == pContext);
switch (eventId) {
case XA_STREAMCBEVENT_PROPERTYCHANGE: {
/** From spec 1.0.1:
"This event indicates that stream property change has occurred.
The streamIndex parameter identifies the stream with the property change.
The pEventData parameter for this event is not used and shall be ignored."
*/
XAresult res;
XAuint32 domain;
res = (*caller)->QueryStreamType(caller, streamIndex, &domain);
assert(XA_RESULT_SUCCESS == res);
switch (domain) {
case XA_DOMAINTYPE_VIDEO: {
XAVideoStreamInformation videoInfo;
res = (*caller)->QueryStreamInformation(caller, streamIndex, &videoInfo);
assert(XA_RESULT_SUCCESS == res);
LOGV("Found video size %u x %u, codec ID=%u, frameRate=%u, bitRate=%u, duration=%u ms",
videoInfo.width, videoInfo.height, videoInfo.codecId, videoInfo.frameRate,
videoInfo.bitRate, videoInfo.duration);
} break;
default:
fprintf(stderr, "Unexpected domain %u\n", domain);
break;
}
} break;
default:
fprintf(stderr, "Unexpected stream event ID %u\n", eventId);
break;
}
}
// create the engine and output mix objects
void Java_com_example_nativemedia_NativeMedia_createEngine(JNIEnv* env, jclass clazz)
{
XAresult res;
// create engine
res = xaCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
assert(XA_RESULT_SUCCESS == res);
// realize the engine
res = (*engineObject)->Realize(engineObject, XA_BOOLEAN_FALSE);
assert(XA_RESULT_SUCCESS == res);
// get the engine interface, which is needed in order to create other objects
res = (*engineObject)->GetInterface(engineObject, XA_IID_ENGINE, &engineEngine);
assert(XA_RESULT_SUCCESS == res);
// create output mix
res = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 0, NULL, NULL);
assert(XA_RESULT_SUCCESS == res);
// realize the output mix
res = (*outputMixObject)->Realize(outputMixObject, XA_BOOLEAN_FALSE);
assert(XA_RESULT_SUCCESS == res);
}
// Enqueue the initial buffers, and optionally signal a discontinuity in the first buffer
static jboolean enqueueInitialBuffers(jboolean discontinuity)
{
/* Fill our cache.
* We want to read whole packets (integral multiples of MPEG2_TS_PACKET_SIZE).
* fread returns units of "elements" not bytes, so we ask for 1-byte elements
* and then check that the number of elements is a multiple of the packet size.
*/
size_t bytesRead;
bytesRead = fread(dataCache, 1, BUFFER_SIZE * NB_BUFFERS, file);
if (bytesRead <= 0) {
// could be premature EOF or I/O error
return JNI_FALSE;
}
if ((bytesRead % MPEG2_TS_PACKET_SIZE) != 0) {
LOGV("Dropping last packet because it is not whole");
}
size_t packetsRead = bytesRead / MPEG2_TS_PACKET_SIZE;
LOGV("Initially queueing %u packets", packetsRead);
/* Enqueue the content of our cache before starting to play,
we don't want to starve the player */
size_t i;
for (i = 0; i < NB_BUFFERS && packetsRead > 0; i++) {
// compute size of this buffer
size_t packetsThisBuffer = packetsRead;
if (packetsThisBuffer > PACKETS_PER_BUFFER) {
packetsThisBuffer = PACKETS_PER_BUFFER;
}
size_t bufferSize = packetsThisBuffer * MPEG2_TS_PACKET_SIZE;
XAresult res;
if (discontinuity) {
// signal discontinuity
XAAndroidBufferItem items[1];
items[0].itemKey = XA_ANDROID_ITEMKEY_DISCONTINUITY;
items[0].itemSize = 0;
// DISCONTINUITY message has no parameters,
// so the total size of the message is the size of the key
// plus the size if itemSize, both XAuint32
res = (*playerBQItf)->Enqueue(playerBQItf, NULL /*pBufferContext*/,
dataCache + i*BUFFER_SIZE, bufferSize, items /*pMsg*/,
sizeof(XAuint32)*2 /*msgLength*/);
discontinuity = JNI_FALSE;
} else {
res = (*playerBQItf)->Enqueue(playerBQItf, NULL /*pBufferContext*/,
dataCache + i*BUFFER_SIZE, bufferSize, NULL, 0);
}
assert(XA_RESULT_SUCCESS == res);
packetsRead -= packetsThisBuffer;
}
return JNI_TRUE;
}
// create streaming media player
jboolean Java_com_example_nativemedia_NativeMedia_createStreamingMediaPlayer(JNIEnv* env,
jclass clazz, jstring filename)
{
XAresult res;
// convert Java string to UTF-8
const char *utf8 = (*env)->GetStringUTFChars(env, filename, NULL);
assert(NULL != utf8);
// open the file to play
file = fopen(utf8, "rb");
if (file == NULL) {
return JNI_FALSE;
}
// configure data source
XADataLocator_AndroidBufferQueue loc_abq = { XA_DATALOCATOR_ANDROIDBUFFERQUEUE, NB_BUFFERS };
XADataFormat_MIME format_mime = {
XA_DATAFORMAT_MIME, XA_ANDROID_MIME_MP2TS, XA_CONTAINERTYPE_MPEG_TS };
XADataSource dataSrc = {&loc_abq, &format_mime};
// configure audio sink
XADataLocator_OutputMix loc_outmix = { XA_DATALOCATOR_OUTPUTMIX, outputMixObject };
XADataSink audioSnk = { &loc_outmix, NULL };
// configure image video sink
XADataLocator_NativeDisplay loc_nd = {
XA_DATALOCATOR_NATIVEDISPLAY, // locatorType
// the video sink must be an ANativeWindow created from a Surface or SurfaceTexture
(void*)theNativeWindow, // hWindow
// must be NULL
NULL // hDisplay
};
XADataSink imageVideoSink = {&loc_nd, NULL};
// declare interfaces to use
XAboolean required[NB_MAXAL_INTERFACES]
= {XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE};
XAInterfaceID iidArray[NB_MAXAL_INTERFACES]
= {XA_IID_PLAY, XA_IID_ANDROIDBUFFERQUEUESOURCE,
XA_IID_STREAMINFORMATION};
// create media player
res = (*engineEngine)->CreateMediaPlayer(engineEngine, &playerObj, &dataSrc,
NULL, &audioSnk, &imageVideoSink, NULL, NULL,
NB_MAXAL_INTERFACES /*XAuint32 numInterfaces*/,
iidArray /*const XAInterfaceID *pInterfaceIds*/,
required /*const XAboolean *pInterfaceRequired*/);
assert(XA_RESULT_SUCCESS == res);
// release the Java string and UTF-8
(*env)->ReleaseStringUTFChars(env, filename, utf8);
// realize the player
res = (*playerObj)->Realize(playerObj, XA_BOOLEAN_FALSE);
assert(XA_RESULT_SUCCESS == res);
// get the play interface
res = (*playerObj)->GetInterface(playerObj, XA_IID_PLAY, &playerPlayItf);
assert(XA_RESULT_SUCCESS == res);
// get the stream information interface (for video size)
res = (*playerObj)->GetInterface(playerObj, XA_IID_STREAMINFORMATION, &playerStreamInfoItf);
assert(XA_RESULT_SUCCESS == res);
// get the volume interface
res = (*playerObj)->GetInterface(playerObj, XA_IID_VOLUME, &playerVolItf);
assert(XA_RESULT_SUCCESS == res);
// get the Android buffer queue interface
res = (*playerObj)->GetInterface(playerObj, XA_IID_ANDROIDBUFFERQUEUESOURCE, &playerBQItf);
assert(XA_RESULT_SUCCESS == res);
// specify which events we want to be notified of
res = (*playerBQItf)->SetCallbackEventsMask(playerBQItf, XA_ANDROIDBUFFERQUEUEEVENT_PROCESSED);
assert(XA_RESULT_SUCCESS == res);
// register the callback from which OpenMAX AL can retrieve the data to play
res = (*playerBQItf)->RegisterCallback(playerBQItf, AndroidBufferQueueCallback, NULL);
assert(XA_RESULT_SUCCESS == res);
// we want to be notified of the video size once it's found, so we register a callback for that
res = (*playerStreamInfoItf)->RegisterStreamChangeCallback(playerStreamInfoItf,
StreamChangeCallback, NULL);
assert(XA_RESULT_SUCCESS == res);
// enqueue the initial buffers
if (!enqueueInitialBuffers(JNI_FALSE)) {
return JNI_FALSE;
}
// prepare the player
res = (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_PAUSED);
assert(XA_RESULT_SUCCESS == res);
// set the volume
res = (*playerVolItf)->SetVolumeLevel(playerVolItf, 0);
assert(XA_RESULT_SUCCESS == res);
// start the playback
res = (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_PLAYING);
assert(XA_RESULT_SUCCESS == res);
return JNI_TRUE;
}
// set the playing state for the streaming media player
void Java_com_example_nativemedia_NativeMedia_setPlayingStreamingMediaPlayer(JNIEnv* env,
jclass clazz, jboolean isPlaying)
{
XAresult res;
// make sure the streaming media player was created
if (NULL != playerPlayItf) {
// set the player's state
res = (*playerPlayItf)->SetPlayState(playerPlayItf, isPlaying ?
XA_PLAYSTATE_PLAYING : XA_PLAYSTATE_PAUSED);
assert(XA_RESULT_SUCCESS == res);
}
}
// shut down the native media system
void Java_com_example_nativemedia_NativeMedia_shutdown(JNIEnv* env, jclass clazz)
{
// destroy streaming media player object, and invalidate all associated interfaces
if (playerObj != NULL) {
(*playerObj)->Destroy(playerObj);
playerObj = NULL;
playerPlayItf = NULL;
playerBQItf = NULL;
playerStreamInfoItf = NULL;
playerVolItf = NULL;
}
// destroy output mix object, and invalidate all associated interfaces
if (outputMixObject != NULL) {
(*outputMixObject)->Destroy(outputMixObject);
outputMixObject = NULL;
}
// destroy engine object, and invalidate all associated interfaces
if (engineObject != NULL) {
(*engineObject)->Destroy(engineObject);
engineObject = NULL;
engineEngine = NULL;
}
// close the file
if (file != NULL) {
fclose(file);
file = NULL;
}
// make sure we don't leak native windows
if (theNativeWindow != NULL) {
ANativeWindow_release(theNativeWindow);
theNativeWindow = NULL;
}
}
// set the surface
void Java_com_example_nativemedia_NativeMedia_setSurface(JNIEnv *env, jclass clazz, jobject surface)
{
// obtain a native window from a Java surface
theNativeWindow = ANativeWindow_fromSurface(env, surface);
}
// rewind the streaming media player
void Java_com_example_nativemedia_NativeMedia_rewindStreamingMediaPlayer(JNIEnv *env, jclass clazz)
{
XAresult res;
// make sure the streaming media player was created
if (NULL != playerBQItf && NULL != file) {
// first wait for buffers currently in queue to be drained
int ok;
ok = pthread_mutex_lock(&mutex);
assert(0 == ok);
discontinuity = JNI_TRUE;
// wait for discontinuity request to be observed by buffer queue callback
// Note: can't rewind after EOS, which we send when reaching EOF
// (don't send EOS if you plan to play more content through the same player)
while (discontinuity && !reachedEof) {
ok = pthread_cond_wait(&cond, &mutex);
assert(0 == ok);
}
ok = pthread_mutex_unlock(&mutex);
assert(0 == ok);
}
}

View File

@@ -0,0 +1,13 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "ant.properties", and override values to adapt the script to your
# project structure.
# Indicates whether an apk should be generated for each density.
split.density=false
# Project target.
target=android-14

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

View File

@@ -0,0 +1,133 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
>
<TextView
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="@string/hello"
/>
<TextView
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="@string/source_select"
/>
<Spinner
android:id="@+id/source_spinner"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="@string/source_prompt"
/>
<TextView
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="@string/sink_select"
/>
<Spinner
android:id="@+id/sink_spinner"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="@string/sink_prompt"
/>
<LinearLayout
android:orientation="horizontal"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
>
<Button
android:id="@+id/start_java"
android:text="@string/start_java"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
/>
<Button
android:id="@+id/start_native"
android:text="@string/start_native"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
/>
<Button
android:id="@+id/finish"
android:text="@string/finish"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
/>
</LinearLayout>
<LinearLayout
android:orientation="horizontal"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
>
<Button
android:id="@+id/rewind_java"
android:text="@string/rewind_java"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
/>
<Button
android:id="@+id/rewind_native"
android:text="@string/rewind_native"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
/>
</LinearLayout>
<LinearLayout
android:orientation="horizontal"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
>
<TextView
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="S1"
/>
<SurfaceView
android:id="@+id/surfaceview1"
android:layout_width="320px"
android:layout_height="240px"
/>
<TextView
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="S2"
/>
<SurfaceView
android:id="@+id/surfaceview2"
android:layout_width="400px"
android:layout_height="224px"
/>
</LinearLayout>
<LinearLayout
android:orientation="horizontal"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
>
<TextView
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="ST1"
/>
<com.example.nativemedia.MyGLSurfaceView
android:id="@+id/glsurfaceview1"
android:layout_width="320px"
android:layout_height="240px"
/>
<TextView
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="ST2"
/>
<com.example.nativemedia.MyGLSurfaceView
android:id="@+id/glsurfaceview2"
android:layout_width="320px"
android:layout_height="240px"
/>
</LinearLayout>
</LinearLayout>

View File

@@ -0,0 +1,28 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="hello">Hello, Android, using native media!</string>
<string name="app_name">NativeMedia</string>
<string name="start_java">Start/Pause\nJava MediaPlayer</string>
<string name="start_native">Start/Pause\nnative MediaPlayer</string>
<string name="finish">Finish</string>
<string name="rewind_java">Rewind\nJava MediaPlayer</string>
<string name="rewind_native">Rewind\nnative MediaPlayer</string>
<string name="source_select">Please select the media source</string>
<string name="source_prompt">Media source</string>
<string-array name="source_array">
<item>/sdcard/NativeMedia.ts</item>
</string-array>
<string name="sink_select">Please select the video sink</string>
<string name="sink_prompt">Video sink</string>
<string-array name="sink_array">
<item>Surface 1</item>
<item>Surface 2</item>
<item>SurfaceTexture 1</item>
<item>SurfaceTexture 2</item>
</string-array>
</resources>

View File

@@ -0,0 +1,336 @@
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.nativemedia;
import android.graphics.SurfaceTexture;
import android.util.Log;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.util.AttributeSet;
public class MyGLSurfaceView extends GLSurfaceView {
MyRenderer mRenderer;
public MyGLSurfaceView(Context context) {
this(context, null);
}
public MyGLSurfaceView(Context context, AttributeSet attributeSet) {
super(context, attributeSet);
init();
}
private void init() {
setEGLContextClientVersion(2);
mRenderer = new MyRenderer();
setRenderer(mRenderer);
}
@Override
public void onPause() {
super.onPause();
}
@Override
public void onResume() {
super.onResume();
}
public SurfaceTexture getSurfaceTexture() {
return mRenderer.getSurfaceTexture();
}
}
class MyRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
public MyRenderer() {
mVertices = ByteBuffer.allocateDirect(mVerticesData.length
* FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
mVertices.put(mVerticesData).position(0);
Matrix.setIdentityM(mSTMatrix, 0);
Matrix.setIdentityM(mMMatrix, 0);
Matrix.rotateM(mMMatrix, 0, 20, 0, 1, 0);
}
public void onDrawFrame(GL10 glUnused) {
synchronized(this) {
if (updateSurface) {
mSurface.updateTexImage();
mSurface.getTransformMatrix(mSTMatrix);
updateSurface = false;
}
}
// Ignore the passed-in GL10 interface, and use the GLES20
// class's static methods instead.
GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
mVertices.position(VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
VERTICES_DATA_STRIDE_BYTES, mVertices);
checkGlError("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");
mVertices.position(VERTICES_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
VERTICES_DATA_STRIDE_BYTES, mVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");
Matrix.multiplyMM(mMVPMatrix, 0, mVMatrix, 0, mMMatrix, 0);
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
}
public void onSurfaceChanged(GL10 glUnused, int width, int height) {
// Ignore the passed-in GL10 interface, and use the GLES20
// class's static methods instead.
GLES20.glViewport(0, 0, width, height);
mRatio = (float) width / height;
Matrix.frustumM(mProjMatrix, 0, -mRatio, mRatio, -1, 1, 3, 7);
}
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
// Ignore the passed-in GL10 interface, and use the GLES20
// class's static methods instead.
/* Set up alpha blending and an Android background color */
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
GLES20.glClearColor(0.643f, 0.776f, 0.223f, 1.0f);
/* Set up shaders and handles to their variables */
mProgram = createProgram(mVertexShader, mFragmentShader);
if (mProgram == 0) {
return;
}
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (maPositionHandle == -1) {
throw new RuntimeException("Could not get attrib location for aPosition");
}
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (maTextureHandle == -1) {
throw new RuntimeException("Could not get attrib location for aTextureCoord");
}
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
checkGlError("glGetUniformLocation uMVPMatrix");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uMVPMatrix");
}
muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
checkGlError("glGetUniformLocation uSTMatrix");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uSTMatrix");
}
checkGlError("glGetUniformLocation uCRatio");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uCRatio");
}
/*
* Create our texture. This has to be done each time the
* surface is created.
*/
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
mTextureID = textures[0];
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
checkGlError("glBindTexture mTextureID");
// Can't do mipmapping with camera source
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
// Clamp to edge is the only option
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
checkGlError("glTexParameteri mTextureID");
/*
* Create the SurfaceTexture that will feed this textureID, and pass it to the camera
*/
mSurface = new SurfaceTexture(mTextureID);
mSurface.setOnFrameAvailableListener(this);
Matrix.setLookAtM(mVMatrix, 0, 0, 0, 4f, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
synchronized(this) {
updateSurface = false;
}
}
synchronized public void onFrameAvailable(SurfaceTexture surface) {
/* For simplicity, SurfaceTexture calls here when it has new
* data available. Call may come in from some random thread,
* so let's be safe and use synchronize. No OpenGL calls can be done here.
*/
updateSurface = true;
//Log.v(TAG, "onFrameAvailable " + surface.getTimestamp());
}
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}
private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}
int program = GLES20.glCreateProgram();
if (program != 0) {
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;
}
private void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
private static final int FLOAT_SIZE_BYTES = 4;
private static final int VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int VERTICES_DATA_POS_OFFSET = 0;
private static final int VERTICES_DATA_UV_OFFSET = 3;
private final float[] mVerticesData = {
// X, Y, Z, U, V
-1.0f, -1.0f, 0, 0.f, 0.f,
1.0f, -1.0f, 0, 1.f, 0.f,
-1.0f, 1.0f, 0, 0.f, 1.f,
1.0f, 1.0f, 0, 1.f, 1.f,
};
private FloatBuffer mVertices;
private final String mVertexShader =
"uniform mat4 uMVPMatrix;\n" +
"uniform mat4 uSTMatrix;\n" +
"attribute vec4 aPosition;\n" +
"attribute vec4 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main() {\n" +
" gl_Position = uMVPMatrix * aPosition;\n" +
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
"}\n";
private final String mFragmentShader =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";
private float[] mMVPMatrix = new float[16];
private float[] mProjMatrix = new float[16];
private float[] mMMatrix = new float[16];
private float[] mVMatrix = new float[16];
private float[] mSTMatrix = new float[16];
private int mProgram;
private int mTextureID;
private int muMVPMatrixHandle;
private int muSTMatrixHandle;
private int maPositionHandle;
private int maTextureHandle;
private float mRatio = 1.0f;
private SurfaceTexture mSurface;
private boolean updateSurface = false;
private static final String TAG = "MyRenderer";
// Magic key
private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
public SurfaceTexture getSurfaceTexture() {
return mSurface;
}
}

View File

@@ -0,0 +1,407 @@
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.nativemedia;
import android.app.Activity;
import android.graphics.SurfaceTexture;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.Spinner;
import java.io.IOException;
public class NativeMedia extends Activity {
static final String TAG = "NativeMedia";
String mSourceString = null;
String mSinkString = null;
// member variables for Java media player
MediaPlayer mMediaPlayer;
boolean mMediaPlayerIsPrepared = false;
SurfaceView mSurfaceView1;
SurfaceHolder mSurfaceHolder1;
// member variables for native media player
boolean mIsPlayingStreaming = false;
SurfaceView mSurfaceView2;
SurfaceHolder mSurfaceHolder2;
VideoSink mSelectedVideoSink;
VideoSink mJavaMediaPlayerVideoSink;
VideoSink mNativeMediaPlayerVideoSink;
SurfaceHolderVideoSink mSurfaceHolder1VideoSink, mSurfaceHolder2VideoSink;
GLViewVideoSink mGLView1VideoSink, mGLView2VideoSink;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.main);
mGLView1 = (MyGLSurfaceView) findViewById(R.id.glsurfaceview1);
mGLView2 = (MyGLSurfaceView) findViewById(R.id.glsurfaceview2);
// initialize native media system
createEngine();
// set up the Surface 1 video sink
mSurfaceView1 = (SurfaceView) findViewById(R.id.surfaceview1);
mSurfaceHolder1 = mSurfaceView1.getHolder();
mSurfaceHolder1.addCallback(new SurfaceHolder.Callback() {
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.v(TAG, "surfaceChanged format=" + format + ", width=" + width + ", height="
+ height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.v(TAG, "surfaceCreated");
setSurface(holder.getSurface());
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.v(TAG, "surfaceDestroyed");
}
});
// set up the Surface 2 video sink
mSurfaceView2 = (SurfaceView) findViewById(R.id.surfaceview2);
mSurfaceHolder2 = mSurfaceView2.getHolder();
mSurfaceHolder2.addCallback(new SurfaceHolder.Callback() {
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.v(TAG, "surfaceChanged format=" + format + ", width=" + width + ", height="
+ height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.v(TAG, "surfaceCreated");
setSurface(holder.getSurface());
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.v(TAG, "surfaceDestroyed");
}
});
// create Java media player
mMediaPlayer = new MediaPlayer();
// set up Java media player listeners
mMediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
public void onPrepared(MediaPlayer mediaPlayer) {
int width = mediaPlayer.getVideoWidth();
int height = mediaPlayer.getVideoHeight();
Log.v(TAG, "onPrepared width=" + width + ", height=" + height);
if (width != 0 && height != 0 && mJavaMediaPlayerVideoSink != null) {
mJavaMediaPlayerVideoSink.setFixedSize(width, height);
}
mMediaPlayerIsPrepared = true;
mediaPlayer.start();
}
});
mMediaPlayer.setOnVideoSizeChangedListener(new MediaPlayer.OnVideoSizeChangedListener() {
public void onVideoSizeChanged(MediaPlayer mediaPlayer, int width, int height) {
Log.v(TAG, "onVideoSizeChanged width=" + width + ", height=" + height);
if (width != 0 && height != 0 && mJavaMediaPlayerVideoSink != null) {
mJavaMediaPlayerVideoSink.setFixedSize(width, height);
}
}
});
// initialize content source spinner
Spinner sourceSpinner = (Spinner) findViewById(R.id.source_spinner);
ArrayAdapter<CharSequence> sourceAdapter = ArrayAdapter.createFromResource(
this, R.array.source_array, android.R.layout.simple_spinner_item);
sourceAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
sourceSpinner.setAdapter(sourceAdapter);
sourceSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
public void onItemSelected(AdapterView<?> parent, View view, int pos, long id) {
mSourceString = parent.getItemAtPosition(pos).toString();
Log.v(TAG, "onItemSelected " + mSourceString);
}
public void onNothingSelected(AdapterView parent) {
Log.v(TAG, "onNothingSelected");
mSourceString = null;
}
});
// initialize video sink spinner
Spinner sinkSpinner = (Spinner) findViewById(R.id.sink_spinner);
ArrayAdapter<CharSequence> sinkAdapter = ArrayAdapter.createFromResource(
this, R.array.sink_array, android.R.layout.simple_spinner_item);
sinkAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
sinkSpinner.setAdapter(sinkAdapter);
sinkSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
public void onItemSelected(AdapterView<?> parent, View view, int pos, long id) {
mSinkString = parent.getItemAtPosition(pos).toString();
Log.v(TAG, "onItemSelected " + mSinkString);
if ("Surface 1".equals(mSinkString)) {
if (mSurfaceHolder1VideoSink == null) {
mSurfaceHolder1VideoSink = new SurfaceHolderVideoSink(mSurfaceHolder1);
}
mSelectedVideoSink = mSurfaceHolder1VideoSink;
} else if ("Surface 2".equals(mSinkString)) {
if (mSurfaceHolder2VideoSink == null) {
mSurfaceHolder2VideoSink = new SurfaceHolderVideoSink(mSurfaceHolder2);
}
mSelectedVideoSink = mSurfaceHolder2VideoSink;
} else if ("SurfaceTexture 1".equals(mSinkString)) {
if (mGLView1VideoSink == null) {
mGLView1VideoSink = new GLViewVideoSink(mGLView1);
}
mSelectedVideoSink = mGLView1VideoSink;
} else if ("SurfaceTexture 2".equals(mSinkString)) {
if (mGLView2VideoSink == null) {
mGLView2VideoSink = new GLViewVideoSink(mGLView2);
}
mSelectedVideoSink = mGLView2VideoSink;
}
}
public void onNothingSelected(AdapterView parent) {
Log.v(TAG, "onNothingSelected");
mSinkString = null;
mSelectedVideoSink = null;
}
});
// initialize button click handlers
// Java MediaPlayer start/pause
((Button) findViewById(R.id.start_java)).setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
if (mJavaMediaPlayerVideoSink == null) {
if (mSelectedVideoSink == null) {
return;
}
mSelectedVideoSink.useAsSinkForJava(mMediaPlayer);
mJavaMediaPlayerVideoSink = mSelectedVideoSink;
}
if (!mMediaPlayerIsPrepared) {
if (mSourceString != null) {
try {
mMediaPlayer.setDataSource(mSourceString);
} catch (IOException e) {
Log.e(TAG, "IOException " + e);
}
mMediaPlayer.prepareAsync();
}
} else if (mMediaPlayer.isPlaying()) {
mMediaPlayer.pause();
} else {
mMediaPlayer.start();
}
}
});
// native MediaPlayer start/pause
((Button) findViewById(R.id.start_native)).setOnClickListener(new View.OnClickListener() {
boolean created = false;
public void onClick(View view) {
if (!created) {
if (mNativeMediaPlayerVideoSink == null) {
if (mSelectedVideoSink == null) {
return;
}
mSelectedVideoSink.useAsSinkForNative();
mNativeMediaPlayerVideoSink = mSelectedVideoSink;
}
if (mSourceString != null) {
created = createStreamingMediaPlayer(mSourceString);
}
}
if (created) {
mIsPlayingStreaming = !mIsPlayingStreaming;
setPlayingStreamingMediaPlayer(mIsPlayingStreaming);
}
}
});
// finish
((Button) findViewById(R.id.finish)).setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
finish();
}
});
// Java MediaPlayer rewind
((Button) findViewById(R.id.rewind_java)).setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
if (mMediaPlayerIsPrepared) {
mMediaPlayer.seekTo(0);
}
}
});
// native MediaPlayer rewind
((Button) findViewById(R.id.rewind_native)).setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
if (mNativeMediaPlayerVideoSink != null) {
rewindStreamingMediaPlayer();
}
}
});
}
/** Called when the activity is about to be paused. */
@Override
protected void onPause()
{
mIsPlayingStreaming = false;
setPlayingStreamingMediaPlayer(false);
mGLView1.onPause();
mGLView2.onPause();
super.onPause();
}
@Override
protected void onResume() {
super.onResume();
mGLView1.onResume();
mGLView2.onResume();
}
/** Called when the activity is about to be destroyed. */
@Override
protected void onDestroy()
{
shutdown();
super.onDestroy();
}
private MyGLSurfaceView mGLView1, mGLView2;
/** Native methods, implemented in jni folder */
public static native void createEngine();
public static native boolean createStreamingMediaPlayer(String filename);
public static native void setPlayingStreamingMediaPlayer(boolean isPlaying);
public static native void shutdown();
public static native void setSurface(Surface surface);
public static native void rewindStreamingMediaPlayer();
/** Load jni .so on initialization */
static {
System.loadLibrary("native-media-jni");
}
// VideoSink abstracts out the difference between Surface and SurfaceTexture
// aka SurfaceHolder and GLSurfaceView
static abstract class VideoSink {
abstract void setFixedSize(int width, int height);
abstract void useAsSinkForJava(MediaPlayer mediaPlayer);
abstract void useAsSinkForNative();
}
static class SurfaceHolderVideoSink extends VideoSink {
private final SurfaceHolder mSurfaceHolder;
SurfaceHolderVideoSink(SurfaceHolder surfaceHolder) {
mSurfaceHolder = surfaceHolder;
}
void setFixedSize(int width, int height) {
mSurfaceHolder.setFixedSize(width, height);
}
void useAsSinkForJava(MediaPlayer mediaPlayer) {
// Use the newer MediaPlayer.setSurface(Surface) since API level 14
// instead of MediaPlayer.setDisplay(mSurfaceHolder) since API level 1,
// because setSurface also works with a Surface derived from a SurfaceTexture.
Surface s = mSurfaceHolder.getSurface();
mediaPlayer.setSurface(s);
s.release();
}
void useAsSinkForNative() {
Surface s = mSurfaceHolder.getSurface();
setSurface(s);
s.release();
}
}
static class GLViewVideoSink extends VideoSink {
private final MyGLSurfaceView mMyGLSurfaceView;
GLViewVideoSink(MyGLSurfaceView myGLSurfaceView) {
mMyGLSurfaceView = myGLSurfaceView;
}
void setFixedSize(int width, int height) {
}
void useAsSinkForJava(MediaPlayer mediaPlayer) {
SurfaceTexture st = mMyGLSurfaceView.getSurfaceTexture();
Surface s = new Surface(st);
mediaPlayer.setSurface(s);
s.release();
}
void useAsSinkForNative() {
SurfaceTexture st = mMyGLSurfaceView.getSurfaceTexture();
Surface s = new Surface(st);
setSurface(s);
s.release();
}
}
}