2 Commits

Author SHA1 Message Date
Gerry
485f2b9d19 Merge pull request #489 from googlesamples/mk-fix-37001153
Fix gles3jni bug b/37001153 ( dated 2014 ) in branch android-mk
2018-02-01 15:27:44 -08:00
guanghuafan
0e88bb1c3e Fix gles3jni bug b/37001153 ( dated 2014 ) in branch android-mk 2018-02-01 14:39:58 -08:00
1547 changed files with 3600 additions and 144082 deletions

View File

@@ -1,131 +0,0 @@
#!/bin/bash
# - Configure SDK/NDK locations so we do not depends on local.properties, e.g
# - export ANDROID_HOME=$HOME/dev/sdk
# - export ANDROID_NDK_HOME=$ANDROID_HOME/ndk-bundle
# - executing in repo root directory
# Configurations:
# temp file name to hold build result
BUILD_RESULT_FILE=build_result.txt
# Repo root directory
NDK_SAMPLE_REPO=.
declare projects=(
audio-echo
bitmap-plasma
camera
endless-tunnel
gles3jni
hello-gl2
hello-jni
hello-jniCallback
hello-libs
hello-neon
native-activity
native-audio
native-codec
native-media
native-plasma
nn-samples
prefab/curl-ssl
prefab/prefab-publishing
san-angeles
sensor-graph
# webp
teapots
## ndk-build samples
other-builds/ndkbuild/bitmap-plasma
other-builds/ndkbuild/gles3jni
other-builds/ndkbuild/hello-gl2
other-builds/ndkbuild/hello-jni
other-builds/ndkbuild/hello-libs
other-builds/ndkbuild/hello-neon
other-builds/ndkbuild/native-activity
other-builds/ndkbuild/native-audio
other-builds/ndkbuild/native-codec
other-builds/ndkbuild/native-media
other-builds/ndkbuild/native-plasma
other-builds/ndkbuild/nn-samples
other-builds/ndkbuild/san-angeles
other-builds/ndkbuild/teapots
)
for d in "${projects[@]}"; do
pushd ${NDK_SAMPLE_REPO}/${d} >/dev/null
TERM=dumb ./gradlew -q clean assembleDebug
popd >/dev/null
done
# Check the apks that all get built fine
declare apks=(
audio-echo/app/build/outputs/apk/debug/app-debug.apk
bitmap-plasma/app/build/outputs/apk/debug/app-debug.apk
camera/basic/build/outputs/apk/debug/basic-debug.apk
camera/texture-view/build/outputs/apk/debug/texture-view-debug.apk
endless-tunnel/app/build/outputs/apk/debug/app-debug.apk
gles3jni/app/build/outputs/apk/debug/app-debug.apk
hello-gl2/app/build/outputs/apk/debug/app-debug.apk
hello-jni/app/build/outputs/apk/arm8/debug/app-arm8-debug.apk
hello-jniCallback/app/build/outputs/apk/debug/app-debug.apk
hello-libs/app/build/outputs/apk/debug/app-debug.apk
hello-neon/app/build/outputs/apk/debug/app-debug.apk
native-activity/app/build/outputs/apk/debug/app-debug.apk
native-audio/app/build/outputs/apk/debug/app-debug.apk
native-codec/app/build/outputs/apk/debug/app-debug.apk
native-media/app/build/outputs/apk/debug/app-debug.apk
native-plasma/app/build/outputs/apk/debug/app-debug.apk
nn-samples/basic/build/outputs/apk/debug/basic-debug.apk
nn-samples/sequence/build/outputs/apk/debug/sequence-debug.apk
prefab/curl-ssl/app/build/outputs/apk/debug/app-debug.apk
prefab/prefab-publishing/mylibrary/build/outputs/aar/mylibrary-debug.aar
sensor-graph/accelerometer/build/outputs/apk/debug/accelerometer-debug.apk
san-angeles/app/build/outputs/apk/debug/app-armeabi-v7a-debug.apk
san-angeles/app/build/outputs/apk/debug/app-arm64-v8a-debug.apk
san-angeles/app/build/outputs/apk/debug/app-x86-debug.apk
teapots/classic-teapot/build/outputs/apk/debug/classic-teapot-debug.apk
teapots/more-teapots/build/outputs/apk/debug/more-teapots-debug.apk
teapots/choreographer-30fps/build/outputs/apk/debug/choreographer-30fps-debug.apk
teapots/image-decoder/build/outputs/apk/debug/image-decoder-debug.apk
# webp/view/build/outputs/apk/debug/view-arm7-debug.apk
## other-builds
other-builds/ndkbuild/bitmap-plasma/app/build/outputs/apk/debug/app-debug.apk
other-builds/ndkbuild/gles3jni/app/build/outputs/apk/debug/app-debug.apk
other-builds/ndkbuild/hello-gl2/app/build/outputs/apk/debug/app-debug.apk
other-builds/ndkbuild/hello-jni/app/build/outputs/apk/debug/app-debug.apk
other-builds/ndkbuild/hello-libs/app/build/outputs/apk/debug/app-debug.apk
other-builds/ndkbuild/hello-neon/app/build/outputs/apk/arm7/debug/app-arm7-debug.apk
other-builds/ndkbuild/native-activity/app/build/outputs/apk/debug/app-debug.apk
other-builds/ndkbuild/native-audio/app/build/outputs/apk/debug/app-debug.apk
other-builds/ndkbuild/native-codec/app/build/outputs/apk/debug/app-debug.apk
other-builds/ndkbuild/native-media/app/build/outputs/apk/debug/app-debug.apk
other-builds/ndkbuild/native-plasma/app/build/outputs/apk/debug/app-debug.apk
other-builds/ndkbuild/nn-samples/basic/build/outputs/apk/debug/basic-debug.apk
other-builds/ndkbuild/san-angeles/app/build/outputs/apk/debug/app-armeabi-v7a-debug.apk
other-builds/ndkbuild/san-angeles/app/build/outputs/apk/debug/app-arm64-v8a-debug.apk
other-builds/ndkbuild/san-angeles/app/build/outputs/apk/debug/app-x86-debug.apk
other-builds/ndkbuild/teapots/more-teapots/build/outputs/apk/debug/more-teapots-debug.apk
other-builds/ndkbuild/teapots/classic-teapot/build/outputs/apk/debug/classic-teapot-debug.apk
)
rm -fr ${BUILD_RESULT_FILE}
for apk in "${apks[@]}"; do
if [ ! -f ${NDK_SAMPLE_REPO}/${apk} ]; then
export SAMPLE_CI_RESULT=1
echo ${apk} does not build >> ${BUILD_RESULT_FILE}
fi
done
if [ -f ${BUILD_RESULT_FILE} ]; then
echo "******* Failed Builds ********:"
cat ${BUILD_RESULT_FILE}
else
echo "======= BUILD SUCCESS ======="
fi
rm -fr ${BUILD_RESULT_FILE}

View File

@@ -1,31 +0,0 @@
#!/bin/bash
set +e
MISC_STATUS=0
# check that all Support section of the README are the same.
for f in */README.md; do
sed -n '/Support/,/License/p' $f > /tmp/$(dirname $f).readme;
done && diff -u --from-file=/tmp/hello-jni.readme /tmp/*.readme
MISC_STATUS=$(($MISC_STATUS + $?))
# check that all targetSdkVersion are 26+
# test "$(grep -H targetSdkVersion */app/build.gradle | tee /dev/stderr | cut -d= -f 2 | xargs -n1 echo | sort | uniq | wc -l)" = "2"
# check that there is no tabs in AndroidManifest
(! grep -n $'\t' */*/src/main/AndroidManifest.xml) | cat -t;
MISC_STATUS=$(($MISC_STATUS + ${PIPESTATUS[0]}))
# check that there is no trailing spaces in AndroidManifest
(! grep -E '\s+$' */*/src/main/AndroidManifest.xml) | cat -e;
MISC_STATUS=$(($MISC_STATUS + ${PIPESTATUS[0]}))
## Fix the builder then enable it [TBD]
#(cd builder && ./gradlew test)
# print build failure summary
# pandoc builder/build/reports/tests/index.html -t plain | sed -n '/^Failed tests/,/default-package/p'
# print lint results details
# for f in */app/build/outputs/lint-results.html; do pandoc $f -t plain; done
# populate the error to final status
if [[ "$MISC_STATUS" -ne 0 ]]; then
SAMPLE_CI_RESULT=$(($SAMPLE_CI_RESULT + 1))
fi

View File

@@ -1,4 +0,0 @@
#!/bin/bash
# TBD: load apks on emulator
#

View File

@@ -1,100 +0,0 @@
#!/bin/bash
# assumption:
# - pwd must be inside the repo's homed directory (android-ndk)
# - upon completion, we are still in the same directory ( no change )
# parse all build.gradle to find the specified tokens' version
# usage:
# retrive_versions token version_file
# where
# token: the token to search for inside build.grade
# version string is right after the token string
# version_file: file to hold the given versions
# one version at a line
retrieve_versions() {
# $1: token; $2 version_file
if [[ -z $1 ]] || [[ -z $2 ]]; then
echo "input string(s) may be empty: token: $1; version_file: $2"
return 1
fi
find . -type f -name 'build.gradle' -exec grep $1 {} + | \
sed "s/^.*$1//" | sed 's/[=+]//g' | \
sed 's/"//g' | sed "s/'//g" | \
sed 's/[[:space:]]//g' | \
awk '!seen[$0]++' > $2
return 0
}
# helper function for src_str > target_str
# Usage
# comp_ver_string src_str target_str
# return:
# 0: src_str <= target_str
# 1: otherwise
comp_ver_string () {
# $1: src_str, $2: target_str
if [[ $1 == $2 ]]
then
return 0
fi
local IFS=.
local i ver1=($1) ver2=($2)
# fill empty fields in ver1 with zeros
for ((i=${#ver1[@]}; i<${#ver2[@]}; i++))
do
ver1[i]=0
done
for ((i=0; i<${#ver1[@]}; i++))
do
if [[ -z ${ver2[i]} ]]
then
# fill empty fields in ver2 with zeros
ver2[i]=0
fi
if ((10#${ver1[i]} < 10#${ver2[i]}))
then
return 0
fi
if ((10#${ver1[i]} > 10#${ver2[i]}))
then
return 1
fi
done
return 0
}
# prepare to install necessary packages
if [ -f ~/.android/repositories.cfg ]; then
touch ~/.android/repositories.cfg
fi
TMP_SETUP_FILENAME=versions_.txt
## Retrieve all necessary Android Platforms and install them all
retrieve_versions compileSdkVersion $TMP_SETUP_FILENAME
# Install platforms
while read -r version_; do
version_=${version_//android-/}
echo y | $ANDROID_HOME/tools/bin/sdkmanager "platforms;android-$version_";
done < $TMP_SETUP_FILENAME
# echo "Android platforms:"; cat $TMP_SETUP_FILENAME
# Install side by side ndks
retrieve_versions ndkVersion $TMP_SETUP_FILENAME
while read -r version_; do
echo y | $ANDROID_HOME/tools/bin/sdkmanager "ndk;$version_" --channel=3;
done < $TMP_SETUP_FILENAME
# echo "NDK versions:"; cat $TMP_SETUP_FILENAME
# add customized cmake installation
echo y | $ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager "cmake;3.18.1"
rm -f $TMP_SETUP_FILENAME

View File

@@ -1,30 +0,0 @@
name: build
on:
push:
branches: [ main develop ]
pull_request:
branches: [ main ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: set up JDK 1.8
uses: actions/setup-java@v1
with:
java-version: 1.8
- name: setup env
run:
source .ci_tools/setup_env.sh
- name: build samples
run: |
export SAMPLE_CI_RESULT=0
source .ci_tools/build_samples.sh
source .ci_tools/run_samples.sh
eval "[[ $SAMPLE_CI_RESULT == 0 ]]"

View File

@@ -1,31 +0,0 @@
# Duplicates default main branch to the old master branch
name: Duplicates main to old master branch
# Controls when the action will run. Triggers the workflow on push or pull request
# events but only for the main branch
on:
push:
branches: [ main ]
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "copy-branch"
copy-branch:
# The type of runner that the job will run on
runs-on: ubuntu-latest
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it,
# but specifies master branch (old default).
- uses: actions/checkout@v2
with:
fetch-depth: 0
ref: master
- run: |
git config user.name github-actions
git config user.email github-actions@github.com
git merge origin/main
git push

13
.gitignore vendored
View File

@@ -1,13 +0,0 @@
.gradle
.idea
**/*.iml
local.properties
build
*~
.externalNativeBuild
libwebp
.DS_Store
**/ndkHelperBin
**/.cxx
display-p3/third_party

View File

@@ -31,3 +31,26 @@ accept your pull requests.
1. The repo owner will review your request. If it is approved, the change will
be merged. If it needs additional work, the repo owner will respond with
useful comments.
## Contributing a New Sample App
1. Sign a Contributor License Agreement, if you have not yet done so (see
details above).
1. Create your own repo for your app following this naming convention:
* mirror-{app-name}-{language or plaform}
* apps: quickstart, photohunt-server, photohunt-client
* example: mirror-quickstart-android
* For multi-language apps, concatenate the primary languages like this:
mirror-photohunt-server-java-python.
1. Create your sample app in this repo.
* Be sure to clone the README.md, CONTRIBUTING.md and LICENSE files from the
googlecast repo.
* Ensure that your code is clear and comprehensible.
* Ensure that your code has an appropriate set of unit tests which all pass.
* Instructional value is the top priority when evaluating new app proposals for
this collection of repos.
1. Submit a request to fork your repo in googlecast organization.
1. The repo owner will review your request. If it is approved, the sample will
be merged. If it needs additional work, the repo owner will respond with
useful comments.

View File

@@ -0,0 +1,34 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.sample.moreteapots"
android:versionCode="1"
android:versionName="1.0" >
<uses-sdk
android:minSdkVersion="11"
android:targetSdkVersion="19" />
<uses-feature android:glEsVersion="0x00020000"></uses-feature>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"></uses-permission>
<application
android:allowBackup="true"
android:icon="@drawable/ic_launcher"
android:label="@string/app_name"
android:theme="@style/AppTheme"
android:hasCode="true"
android:name="com.sample.moreteapots.MoreTeapotsApplication"
>
<!-- Our activity is the built-in NativeActivity framework class.
This will take care of integrating with our NDK code. -->
<activity android:name="com.sample.moreteapots.MoreTeapotsNativeActivity"
android:label="@string/app_name"
android:configChanges="orientation|keyboardHidden">
<!-- Tell NativeActivity the name of or .so -->
<meta-data android:name="android.app.lib_name"
android:value="MoreTeapotsNativeActivity" />
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View File

@@ -17,7 +17,7 @@
//
uniform lowp vec3 vMaterialAmbient;
uniform lowp vec4 vMaterialSpecular;
uniform mediump vec4 vMaterialSpecular;
varying lowp vec4 colorDiffuse;

View File

@@ -17,7 +17,7 @@
#version 300 es
precision mediump float;
uniform lowp vec4 vMaterialSpecular;
uniform mediump vec4 vMaterialSpecular;
uniform highp vec3 vLight0;
in lowp vec4 colorDiffuse;

View File

@@ -31,6 +31,7 @@ uniform highp vec3 vLight0;
uniform lowp vec4 vMaterialDiffuse;
uniform lowp vec3 vMaterialAmbient;
uniform lowp vec4 vMaterialSpecular;
void main(void)
{

View File

@@ -0,0 +1,19 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := MoreTeapotsNativeActivity
LOCAL_SRC_FILES := MoreTeapotsNativeActivity.cpp \
MoreTeapotsRenderer.cpp \
LOCAL_C_INCLUDES :=
LOCAL_CFLAGS :=
LOCAL_LDLIBS := -llog -landroid -lEGL -lGLESv2
LOCAL_STATIC_LIBRARIES := cpufeatures android_native_app_glue ndk_helper
include $(BUILD_SHARED_LIBRARY)
$(call import-module,android/ndk_helper)
$(call import-module,android/native_app_glue)
$(call import-module,android/cpufeatures)

View File

@@ -0,0 +1,4 @@
APP_PLATFORM := android-9
APP_ABI := all
APP_STL := stlport_static

View File

@@ -0,0 +1,500 @@
/*
* Copyright 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//--------------------------------------------------------------------------------
// Include files
//--------------------------------------------------------------------------------
#include <jni.h>
#include <errno.h>
#include <vector>
#include <EGL/egl.h>
#include <GLES/gl.h>
#include <android/sensor.h>
#include <android/log.h>
#include <android_native_app_glue.h>
#include <android/native_window_jni.h>
#include <cpu-features.h>
#include "MoreTeapotsRenderer.h"
//-------------------------------------------------------------------------
//Preprocessor
//-------------------------------------------------------------------------
#define HELPER_CLASS_NAME "com/sample/helper/NDKHelper" //Class name of helper function
//-------------------------------------------------------------------------
//Constants
//-------------------------------------------------------------------------
const int32_t NUM_TEAPOTS_X = 8;
const int32_t NUM_TEAPOTS_Y = 8;
const int32_t NUM_TEAPOTS_Z = 8;
//-------------------------------------------------------------------------
//Shared state for our app.
//-------------------------------------------------------------------------
struct android_app;
class Engine
{
MoreTeapotsRenderer renderer_;
ndk_helper::GLContext* gl_context_;
bool initialized_resources_;
bool has_focus_;
ndk_helper::DoubletapDetector doubletap_detector_;
ndk_helper::PinchDetector pinch_detector_;
ndk_helper::DragDetector drag_detector_;
ndk_helper::PerfMonitor monitor_;
ndk_helper::TapCamera tap_camera_;
android_app* app_;
ASensorManager* sensor_manager_;
const ASensor* accelerometer_sensor_;
ASensorEventQueue* sensor_event_queue_;
void UpdateFPS( float fps );
void ShowUI();
void TransformPosition( ndk_helper::Vec2& vec );
public:
static void HandleCmd( struct android_app* app,
int32_t cmd );
static int32_t HandleInput( android_app* app,
AInputEvent* event );
Engine();
~Engine();
void SetState( android_app* state );
int InitDisplay();
void LoadResources();
void UnloadResources();
void DrawFrame();
void TermDisplay();
void TrimMemory();
bool IsReady();
void UpdatePosition( AInputEvent* event,
int32_t index,
float& x,
float& y );
void InitSensors();
void ProcessSensors( int32_t id );
void SuspendSensors();
void ResumeSensors();
};
//-------------------------------------------------------------------------
//Ctor
//-------------------------------------------------------------------------
Engine::Engine() :
initialized_resources_( false ),
has_focus_( false ),
app_( NULL ),
sensor_manager_( NULL ),
accelerometer_sensor_( NULL ),
sensor_event_queue_( NULL )
{
gl_context_ = ndk_helper::GLContext::GetInstance();
}
//-------------------------------------------------------------------------
//Dtor
//-------------------------------------------------------------------------
Engine::~Engine()
{
}
/**
* Load resources
*/
void Engine::LoadResources()
{
renderer_.Init( NUM_TEAPOTS_X, NUM_TEAPOTS_Y, NUM_TEAPOTS_Z );
renderer_.Bind( &tap_camera_ );
}
/**
* Unload resources
*/
void Engine::UnloadResources()
{
renderer_.Unload();
}
/**
* Initialize an EGL context for the current display.
*/
int Engine::InitDisplay()
{
if( !initialized_resources_ )
{
gl_context_->Init( app_->window );
LoadResources();
initialized_resources_ = true;
}
else
{
// initialize OpenGL ES and EGL
if( EGL_SUCCESS != gl_context_->Resume( app_->window ) )
{
UnloadResources();
LoadResources();
}
}
ShowUI();
// Initialize GL state.
glEnable( GL_CULL_FACE );
glEnable( GL_DEPTH_TEST );
glDepthFunc( GL_LEQUAL );
//Note that screen size might have been changed
glViewport( 0, 0, gl_context_->GetScreenWidth(), gl_context_->GetScreenHeight() );
renderer_.UpdateViewport();
tap_camera_.SetFlip( 1.f, -1.f, -1.f );
tap_camera_.SetPinchTransformFactor( 10.f, 10.f, 8.f );
return 0;
}
/**
* Just the current frame in the display.
*/
void Engine::DrawFrame()
{
float fps;
if( monitor_.Update( fps ) )
{
UpdateFPS( fps );
}
double dTime = monitor_.GetCurrentTime();
renderer_.Update( dTime );
// Just fill the screen with a color.
glClearColor( 0.5f, 0.5f, 0.5f, 1.f );
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
renderer_.Render();
// Swap
if( EGL_SUCCESS != gl_context_->Swap() )
{
UnloadResources();
LoadResources();
}
}
/**
* Tear down the EGL context currently associated with the display.
*/
void Engine::TermDisplay()
{
gl_context_->Suspend();
}
void Engine::TrimMemory()
{
LOGI( "Trimming memory" );
gl_context_->Invalidate();
}
/**
* Process the next input event.
*/
int32_t Engine::HandleInput( android_app* app,
AInputEvent* event )
{
Engine* eng = (Engine*) app->userData;
if( AInputEvent_getType( event ) == AINPUT_EVENT_TYPE_MOTION )
{
ndk_helper::GESTURE_STATE doubleTapState = eng->doubletap_detector_.Detect( event );
ndk_helper::GESTURE_STATE dragState = eng->drag_detector_.Detect( event );
ndk_helper::GESTURE_STATE pinchState = eng->pinch_detector_.Detect( event );
//Double tap detector has a priority over other detectors
if( doubleTapState == ndk_helper::GESTURE_STATE_ACTION )
{
//Detect double tap
eng->tap_camera_.Reset( true );
}
else
{
//Handle drag state
if( dragState & ndk_helper::GESTURE_STATE_START )
{
//Otherwise, start dragging
ndk_helper::Vec2 v;
eng->drag_detector_.GetPointer( v );
eng->TransformPosition( v );
eng->tap_camera_.BeginDrag( v );
}
else if( dragState & ndk_helper::GESTURE_STATE_MOVE )
{
ndk_helper::Vec2 v;
eng->drag_detector_.GetPointer( v );
eng->TransformPosition( v );
eng->tap_camera_.Drag( v );
}
else if( dragState & ndk_helper::GESTURE_STATE_END )
{
eng->tap_camera_.EndDrag();
}
//Handle pinch state
if( pinchState & ndk_helper::GESTURE_STATE_START )
{
//Start new pinch
ndk_helper::Vec2 v1;
ndk_helper::Vec2 v2;
eng->pinch_detector_.GetPointers( v1, v2 );
eng->TransformPosition( v1 );
eng->TransformPosition( v2 );
eng->tap_camera_.BeginPinch( v1, v2 );
}
else if( pinchState & ndk_helper::GESTURE_STATE_MOVE )
{
//Multi touch
//Start new pinch
ndk_helper::Vec2 v1;
ndk_helper::Vec2 v2;
eng->pinch_detector_.GetPointers( v1, v2 );
eng->TransformPosition( v1 );
eng->TransformPosition( v2 );
eng->tap_camera_.Pinch( v1, v2 );
}
}
return 1;
}
return 0;
}
/**
* Process the next main command.
*/
void Engine::HandleCmd( struct android_app* app,
int32_t cmd )
{
Engine* eng = (Engine*) app->userData;
switch( cmd )
{
case APP_CMD_SAVE_STATE:
break;
case APP_CMD_INIT_WINDOW:
// The window is being shown, get it ready.
if( app->window != NULL )
{
eng->InitDisplay();
eng->DrawFrame();
}
break;
case APP_CMD_TERM_WINDOW:
// The window is being hidden or closed, clean it up.
eng->TermDisplay();
eng->has_focus_ = false;
break;
case APP_CMD_STOP:
break;
case APP_CMD_GAINED_FOCUS:
eng->ResumeSensors();
//Start animation
eng->has_focus_ = true;
break;
case APP_CMD_LOST_FOCUS:
eng->SuspendSensors();
// Also stop animating.
eng->has_focus_ = false;
eng->DrawFrame();
break;
case APP_CMD_LOW_MEMORY:
//Free up GL resources
eng->TrimMemory();
break;
}
}
//-------------------------------------------------------------------------
//Sensor handlers
//-------------------------------------------------------------------------
void Engine::InitSensors()
{
sensor_manager_ = ASensorManager_getInstance();
accelerometer_sensor_ = ASensorManager_getDefaultSensor( sensor_manager_,
ASENSOR_TYPE_ACCELEROMETER );
sensor_event_queue_ = ASensorManager_createEventQueue( sensor_manager_, app_->looper,
LOOPER_ID_USER, NULL, NULL );
}
void Engine::ProcessSensors( int32_t id )
{
// If a sensor has data, process it now.
if( id == LOOPER_ID_USER )
{
if( accelerometer_sensor_ != NULL )
{
ASensorEvent event;
while( ASensorEventQueue_getEvents( sensor_event_queue_, &event, 1 ) > 0 )
{
}
}
}
}
void Engine::ResumeSensors()
{
// When our app gains focus, we start monitoring the accelerometer.
if( accelerometer_sensor_ != NULL )
{
ASensorEventQueue_enableSensor( sensor_event_queue_, accelerometer_sensor_ );
// We'd like to get 60 events per second (in us).
ASensorEventQueue_setEventRate( sensor_event_queue_, accelerometer_sensor_,
(1000L / 60) * 1000 );
}
}
void Engine::SuspendSensors()
{
// When our app loses focus, we stop monitoring the accelerometer.
// This is to avoid consuming battery while not being used.
if( accelerometer_sensor_ != NULL )
{
ASensorEventQueue_disableSensor( sensor_event_queue_, accelerometer_sensor_ );
}
}
//-------------------------------------------------------------------------
//Misc
//-------------------------------------------------------------------------
void Engine::SetState( android_app* state )
{
app_ = state;
doubletap_detector_.SetConfiguration( app_->config );
drag_detector_.SetConfiguration( app_->config );
pinch_detector_.SetConfiguration( app_->config );
}
bool Engine::IsReady()
{
if( has_focus_ )
return true;
return false;
}
void Engine::TransformPosition( ndk_helper::Vec2& vec )
{
vec = ndk_helper::Vec2( 2.0f, 2.0f ) * vec
/ ndk_helper::Vec2( gl_context_->GetScreenWidth(), gl_context_->GetScreenHeight() )
- ndk_helper::Vec2( 1.f, 1.f );
}
void Engine::ShowUI()
{
JNIEnv *jni;
app_->activity->vm->AttachCurrentThread( &jni, NULL );
//Default class retrieval
jclass clazz = jni->GetObjectClass( app_->activity->clazz );
jmethodID methodID = jni->GetMethodID( clazz, "showUI", "()V" );
jni->CallVoidMethod( app_->activity->clazz, methodID );
app_->activity->vm->DetachCurrentThread();
return;
}
void Engine::UpdateFPS( float fps )
{
JNIEnv *jni;
app_->activity->vm->AttachCurrentThread( &jni, NULL );
//Default class retrieval
jclass clazz = jni->GetObjectClass( app_->activity->clazz );
jmethodID methodID = jni->GetMethodID( clazz, "updateFPS", "(F)V" );
jni->CallVoidMethod( app_->activity->clazz, methodID, fps );
app_->activity->vm->DetachCurrentThread();
return;
}
Engine g_engine;
/**
* This is the main entry point of a native application that is using
* android_native_app_glue. It runs in its own thread, with its own
* event loop for receiving input events and doing other things.
*/
void android_main( android_app* state )
{
app_dummy();
g_engine.SetState( state );
//Init helper functions
ndk_helper::JNIHelper::GetInstance()->Init( state->activity, HELPER_CLASS_NAME );
state->userData = &g_engine;
state->onAppCmd = Engine::HandleCmd;
state->onInputEvent = Engine::HandleInput;
#ifdef USE_NDK_PROFILER
monstartup("libMoreTeapotsNativeActivity.so");
#endif
// Prepare to monitor accelerometer
g_engine.InitSensors();
// loop waiting for stuff to do.
while( 1 )
{
// Read all pending events.
int id;
int events;
android_poll_source* source;
// If not animating, we will block forever waiting for events.
// If animating, we loop until all events are read, then continue
// to draw the next frame of animation.
while( (id = ALooper_pollAll( g_engine.IsReady() ? 0 : -1, NULL, &events, (void**) &source ))
>= 0 )
{
// Process this event.
if( source != NULL )
source->process( state, source );
g_engine.ProcessSensors( id );
// Check if we are exiting.
if( state->destroyRequested != 0 )
{
g_engine.TermDisplay();
return;
}
}
if( g_engine.IsReady() )
{
// Drawing is throttled to the screen update rate, so there
// is no need to do timing here.
g_engine.DrawFrame();
}
}
}

View File

@@ -0,0 +1,555 @@
/*
* Copyright 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//--------------------------------------------------------------------------------
// MoreTeapotsRenderer.cpp
// Render teapots
//--------------------------------------------------------------------------------
//--------------------------------------------------------------------------------
// Include files
//--------------------------------------------------------------------------------
#include "MoreTeapotsRenderer.h"
//--------------------------------------------------------------------------------
// Teapot model data
//--------------------------------------------------------------------------------
#include "teapot.inl"
//--------------------------------------------------------------------------------
// Ctor
//--------------------------------------------------------------------------------
MoreTeapotsRenderer::MoreTeapotsRenderer() :
geometry_instancing_support_( false )
{
}
//--------------------------------------------------------------------------------
// Dtor
//--------------------------------------------------------------------------------
MoreTeapotsRenderer::~MoreTeapotsRenderer()
{
Unload();
}
//--------------------------------------------------------------------------------
// Init
//--------------------------------------------------------------------------------
void MoreTeapotsRenderer::Init( const int32_t numX,
const int32_t numY,
const int32_t numZ )
{
if( ndk_helper::GLContext::GetInstance()->GetGLVersion() >= 3.0 )
{
geometry_instancing_support_ = true;
}
else if( ndk_helper::GLContext::GetInstance()->CheckExtension( "GL_NV_draw_instanced" )
&& ndk_helper::GLContext::GetInstance()->CheckExtension(
"GL_NV_uniform_buffer_object" ) )
{
LOGI( "Supported via extension!" );
//_bGeometryInstancingSupport = true;
//_bARBSupport = true; //Need to patch shaders
//Currently this has been disabled
}
//Settings
glFrontFace( GL_CCW );
//Create Index buffer
num_indices_ = sizeof(teapotIndices) / sizeof(teapotIndices[0]);
glGenBuffers( 1, &ibo_ );
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, ibo_ );
glBufferData( GL_ELEMENT_ARRAY_BUFFER, sizeof(teapotIndices), teapotIndices, GL_STATIC_DRAW );
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, 0 );
//Create VBO
num_vertices_ = sizeof(teapotPositions) / sizeof(teapotPositions[0]) / 3;
int32_t iStride = sizeof(TEAPOT_VERTEX);
int32_t iIndex = 0;
TEAPOT_VERTEX* p = new TEAPOT_VERTEX[num_vertices_];
for( int32_t i = 0; i < num_vertices_; ++i )
{
p[i].pos[0] = teapotPositions[iIndex];
p[i].pos[1] = teapotPositions[iIndex + 1];
p[i].pos[2] = teapotPositions[iIndex + 2];
p[i].normal[0] = teapotNormals[iIndex];
p[i].normal[1] = teapotNormals[iIndex + 1];
p[i].normal[2] = teapotNormals[iIndex + 2];
iIndex += 3;
}
glGenBuffers( 1, &vbo_ );
glBindBuffer( GL_ARRAY_BUFFER, vbo_ );
glBufferData( GL_ARRAY_BUFFER, iStride * num_vertices_, p, GL_STATIC_DRAW );
glBindBuffer( GL_ARRAY_BUFFER, 0 );
delete[] p;
//Init Projection matrices
teapot_x_ = numX;
teapot_y_ = numY;
teapot_z_ = numZ;
vec_mat_models_.reserve( teapot_x_ * teapot_y_ * teapot_z_ );
UpdateViewport();
const float total_width = 500.f;
float gap_x = total_width / (teapot_x_ - 1);
float gap_y = total_width / (teapot_y_ - 1);
float gap_z = total_width / (teapot_z_ - 1);
float offset_x = -total_width / 2.f;
float offset_y = -total_width / 2.f;
float offset_z = -total_width / 2.f;
for( int32_t iX = 0; iX < teapot_x_; ++iX )
for( int32_t iY = 0; iY < teapot_y_; ++iY )
for( int32_t iZ = 0; iZ < teapot_z_; ++iZ )
{
vec_mat_models_.push_back(
ndk_helper::Mat4::Translation( iX * gap_x + offset_x, iY * gap_y + offset_y,
iZ * gap_z + offset_z ) );
vec_colors_.push_back(
ndk_helper::Vec3( random() / float( RAND_MAX * 1.1 ),
random() / float( RAND_MAX * 1.1 ),
random() / float( RAND_MAX * 1.1 ) ) );
float fX = random() / float( RAND_MAX ) - 0.5f;
float fY = random() / float( RAND_MAX ) - 0.5f;
vec_rotations_.push_back( ndk_helper::Vec2( fX * 0.05f, fY * 0.05f ) );
vec_current_rotations_.push_back( ndk_helper::Vec2( fX * M_PI, fY * M_PI ) );
}
if( geometry_instancing_support_ )
{
//
//Create parameter dictionary for shader patch
std::map<std::string, std::string> param;
param[std::string( "%NUM_TEAPOT%" )] = ToString( teapot_x_ * teapot_y_ * teapot_z_ );
param[std::string( "%LOCATION_VERTEX%" )] = ToString( ATTRIB_VERTEX );
param[std::string( "%LOCATION_NORMAL%" )] = ToString( ATTRIB_NORMAL );
if( arb_support_ )
param[std::string( "%ARB%" )] = std::string( "ARB" );
else
param[std::string( "%ARB%" )] = std::string( "" );
//Load shader
bool b = LoadShadersES3( &shader_param_, "Shaders/VS_ShaderPlainES3.vsh",
"Shaders/ShaderPlainES3.fsh", param );
if( b )
{
//
//Create uniform buffer
//
GLuint bindingPoint = 1;
GLuint blockIndex;
blockIndex = glGetUniformBlockIndex( shader_param_.program_, "ParamBlock" );
glUniformBlockBinding( shader_param_.program_, blockIndex, bindingPoint );
//Retrieve array stride value
int32_t iNumIndices;
glGetActiveUniformBlockiv( shader_param_.program_, blockIndex,
GL_UNIFORM_BLOCK_ACTIVE_UNIFORMS, &iNumIndices );
GLint i[iNumIndices];
GLint stride[iNumIndices];
glGetActiveUniformBlockiv( shader_param_.program_, blockIndex,
GL_UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES, i );
glGetActiveUniformsiv( shader_param_.program_, iNumIndices, (GLuint*) i,
GL_UNIFORM_ARRAY_STRIDE, stride );
ubo_matrix_stride_ = stride[0] / sizeof(float);
ubo_vector_stride_ = stride[2] / sizeof(float);
glGenBuffers( 1, &ubo_ );
glBindBuffer( GL_UNIFORM_BUFFER, ubo_ );
glBindBufferBase( GL_UNIFORM_BUFFER, bindingPoint, ubo_ );
//Store color value which wouldn't be updated every frame
int32_t iSize = teapot_x_ * teapot_y_ * teapot_z_
* (ubo_matrix_stride_ + ubo_matrix_stride_ + ubo_vector_stride_); //Mat4 + Mat4 + Vec3 + 1 stride
float* pBuffer = new float[iSize];
float* pColor = pBuffer + teapot_x_ * teapot_y_ * teapot_z_ * ubo_matrix_stride_ * 2;
for( int32_t i = 0; i < teapot_x_ * teapot_y_ * teapot_z_; ++i )
{
memcpy( pColor, &vec_colors_[i], 3 * sizeof(float) );
pColor += ubo_vector_stride_; //Assuming std140 layout which is 4 DWORD stride for vectors
}
glBufferData( GL_UNIFORM_BUFFER, iSize * sizeof(float), pBuffer, GL_DYNAMIC_DRAW );
delete[] pBuffer;
}
else
{
LOGI( "Shader compilation failed!! Falls back to ES2.0 pass" );
//This happens some devices.
geometry_instancing_support_ = false;
//Load shader for GLES2.0
LoadShaders( &shader_param_, "Shaders/VS_ShaderPlain.vsh", "Shaders/ShaderPlain.fsh" );
}
}
else
{
//Load shader for GLES2.0
LoadShaders( &shader_param_, "Shaders/VS_ShaderPlain.vsh", "Shaders/ShaderPlain.fsh" );
}
}
void MoreTeapotsRenderer::UpdateViewport()
{
int32_t viewport[4];
glGetIntegerv( GL_VIEWPORT, viewport );
float fAspect = (float) viewport[2] / (float) viewport[3];
const float CAM_NEAR = 5.f;
const float CAM_FAR = 10000.f;
bool bRotate = false;
mat_projection_ = ndk_helper::Mat4::Perspective( fAspect, 1.f, CAM_NEAR, CAM_FAR );
}
//--------------------------------------------------------------------------------
// Unload
//--------------------------------------------------------------------------------
void MoreTeapotsRenderer::Unload()
{
if( vbo_ )
{
glDeleteBuffers( 1, &vbo_ );
vbo_ = 0;
}
if( ubo_ )
{
glDeleteBuffers( 1, &ubo_ );
ubo_ = 0;
}
if( ibo_ )
{
glDeleteBuffers( 1, &ibo_ );
ibo_ = 0;
}
if( shader_param_.program_ )
{
glDeleteProgram( shader_param_.program_ );
shader_param_.program_ = 0;
}
}
//--------------------------------------------------------------------------------
// Update
//--------------------------------------------------------------------------------
void MoreTeapotsRenderer::Update( float fTime )
{
const float CAM_X = 0.f;
const float CAM_Y = 0.f;
const float CAM_Z = 2000.f;
mat_view_ = ndk_helper::Mat4::LookAt( ndk_helper::Vec3( CAM_X, CAM_Y, CAM_Z ),
ndk_helper::Vec3( 0.f, 0.f, 0.f ), ndk_helper::Vec3( 0.f, 1.f, 0.f ) );
if( camera_ )
{
camera_->Update();
mat_view_ = camera_->GetTransformMatrix() * mat_view_ * camera_->GetRotationMatrix();
}
}
//--------------------------------------------------------------------------------
// Render
//--------------------------------------------------------------------------------
void MoreTeapotsRenderer::Render()
{
// Bind the VBO
glBindBuffer( GL_ARRAY_BUFFER, vbo_ );
int32_t iStride = sizeof(TEAPOT_VERTEX);
// Pass the vertex data
glVertexAttribPointer( ATTRIB_VERTEX, 3, GL_FLOAT, GL_FALSE, iStride, BUFFER_OFFSET( 0 ) );
glEnableVertexAttribArray( ATTRIB_VERTEX );
glVertexAttribPointer( ATTRIB_NORMAL, 3, GL_FLOAT, GL_FALSE, iStride,
BUFFER_OFFSET( 3 * sizeof(GLfloat) ) );
glEnableVertexAttribArray( ATTRIB_NORMAL );
// Bind the IB
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, ibo_ );
glUseProgram( shader_param_.program_ );
TEAPOT_MATERIALS material = { { 1.0f, 1.0f, 1.0f, 10.f }, { 0.1f, 0.1f, 0.1f }, };
//Update uniforms
//
//using glUniform3fv here was troublesome..
//
glUniform4f( shader_param_.material_specular_, material.specular_color[0],
material.specular_color[1], material.specular_color[2], material.specular_color[3] );
glUniform3f( shader_param_.material_ambient_, material.ambient_color[0],
material.ambient_color[1], material.ambient_color[2] );
glUniform3f( shader_param_.light0_, 100.f, -200.f, -600.f );
if( geometry_instancing_support_ )
{
//
//Geometry instancing, new feature in GLES3.0
//
//Update UBO
glBindBuffer( GL_UNIFORM_BUFFER, ubo_ );
float* p = (float*) glMapBufferRange( GL_UNIFORM_BUFFER, 0,
teapot_x_ * teapot_y_ * teapot_z_ * (ubo_matrix_stride_ * 2) * sizeof(float),
GL_MAP_WRITE_BIT | GL_MAP_INVALIDATE_RANGE_BIT );
float* pMVPMat = p;
float* pMVMat = p + teapot_x_ * teapot_y_ * teapot_z_ * ubo_matrix_stride_;
for( int32_t i = 0; i < teapot_x_ * teapot_y_ * teapot_z_; ++i )
{
//Rotation
float fX, fY;
vec_current_rotations_[i] += vec_rotations_[i];
vec_current_rotations_[i].Value( fX, fY );
ndk_helper::Mat4 mat_rotation = ndk_helper::Mat4::RotationX( fX )
* ndk_helper::Mat4::RotationY( fY );
// Feed Projection and Model View matrices to the shaders
ndk_helper::Mat4 mat_v = mat_view_ * vec_mat_models_[i] * mat_rotation;
ndk_helper::Mat4 mat_vp = mat_projection_ * mat_v;
memcpy( pMVPMat, mat_vp.Ptr(), sizeof(mat_v) );
pMVPMat += ubo_matrix_stride_;
memcpy( pMVMat, mat_v.Ptr(), sizeof(mat_v) );
pMVMat += ubo_matrix_stride_;
}
glUnmapBuffer( GL_UNIFORM_BUFFER );
//Instanced rendering
glDrawElementsInstanced( GL_TRIANGLES, num_indices_, GL_UNSIGNED_SHORT, BUFFER_OFFSET(0),
teapot_x_ * teapot_y_ * teapot_z_ );
}
else
{
//Regular rendering pass
for( int32_t i = 0; i < teapot_x_ * teapot_y_ * teapot_z_; ++i )
{
//Set diffuse
float x, y, z;
vec_colors_[i].Value( x, y, z );
glUniform4f( shader_param_.material_diffuse_, x, y, z, 1.f );
//Rotation
vec_current_rotations_[i] += vec_rotations_[i];
vec_current_rotations_[i].Value( x, y );
ndk_helper::Mat4 mat_rotation = ndk_helper::Mat4::RotationX( x )
* ndk_helper::Mat4::RotationY( y );
// Feed Projection and Model View matrices to the shaders
ndk_helper::Mat4 mat_v = mat_view_ * vec_mat_models_[i] * mat_rotation;
ndk_helper::Mat4 mat_vp = mat_projection_ * mat_v;
glUniformMatrix4fv( shader_param_.matrix_projection_, 1, GL_FALSE, mat_vp.Ptr() );
glUniformMatrix4fv( shader_param_.matrix_view_, 1, GL_FALSE, mat_v.Ptr() );
glDrawElements( GL_TRIANGLES, num_indices_, GL_UNSIGNED_SHORT, BUFFER_OFFSET(0) );
}
}
glBindBuffer( GL_ARRAY_BUFFER, 0 );
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, 0 );
}
//--------------------------------------------------------------------------------
// LoadShaders
//--------------------------------------------------------------------------------
bool MoreTeapotsRenderer::LoadShaders( SHADER_PARAMS* params,
const char* strVsh,
const char* strFsh )
{
//
//Shader load for GLES2
//In GLES2.0, shader attribute locations need to be explicitly specified before linking
//
GLuint program;
GLuint vertShader, fragShader;
char *vertShaderPathname, *fragShaderPathname;
// Create shader program
program = glCreateProgram();
LOGI( "Created Shader %d", program );
// Create and compile vertex shader
if( !ndk_helper::shader::CompileShader( &vertShader, GL_VERTEX_SHADER, strVsh ) )
{
LOGI( "Failed to compile vertex shader" );
glDeleteProgram( program );
return false;
}
// Create and compile fragment shader
if( !ndk_helper::shader::CompileShader( &fragShader, GL_FRAGMENT_SHADER, strFsh ) )
{
LOGI( "Failed to compile fragment shader" );
glDeleteProgram( program );
return false;
}
// Attach vertex shader to program
glAttachShader( program, vertShader );
// Attach fragment shader to program
glAttachShader( program, fragShader );
// Bind attribute locations
// this needs to be done prior to linking
glBindAttribLocation( program, ATTRIB_VERTEX, "myVertex" );
glBindAttribLocation( program, ATTRIB_NORMAL, "myNormal" );
// Link program
if( !ndk_helper::shader::LinkProgram( program ) )
{
LOGI( "Failed to link program: %d", program );
if( vertShader )
{
glDeleteShader( vertShader );
vertShader = 0;
}
if( fragShader )
{
glDeleteShader( fragShader );
fragShader = 0;
}
if( program )
{
glDeleteProgram( program );
}
return false;
}
// Get uniform locations
params->matrix_projection_ = glGetUniformLocation( program, "uPMatrix" );
params->matrix_view_ = glGetUniformLocation( program, "uMVMatrix" );
params->light0_ = glGetUniformLocation( program, "vLight0" );
params->material_diffuse_ = glGetUniformLocation( program, "vMaterialDiffuse" );
params->material_ambient_ = glGetUniformLocation( program, "vMaterialAmbient" );
params->material_specular_ = glGetUniformLocation( program, "vMaterialSpecular" );
// Release vertex and fragment shaders
if( vertShader )
glDeleteShader( vertShader );
if( fragShader )
glDeleteShader( fragShader );
params->program_ = program;
return true;
}
bool MoreTeapotsRenderer::LoadShadersES3( SHADER_PARAMS* params,
const char* strVsh,
const char* strFsh,
std::map<std::string, std::string>&shaderParams )
{
//
//Shader load for GLES3
//In GLES3.0, shader attribute index can be described in a shader code directly with layout() attribute
//
GLuint program;
GLuint vertShader, fragShader;
char *vertShaderPathname, *fragShaderPathname;
// Create shader program
program = glCreateProgram();
LOGI( "Created Shader %d", program );
// Create and compile vertex shader
if( !ndk_helper::shader::CompileShader( &vertShader, GL_VERTEX_SHADER, strVsh, shaderParams ) )
{
LOGI( "Failed to compile vertex shader" );
glDeleteProgram( program );
return false;
}
// Create and compile fragment shader
if( !ndk_helper::shader::CompileShader( &fragShader, GL_FRAGMENT_SHADER, strFsh,
shaderParams ) )
{
LOGI( "Failed to compile fragment shader" );
glDeleteProgram( program );
return false;
}
// Attach vertex shader to program
glAttachShader( program, vertShader );
// Attach fragment shader to program
glAttachShader( program, fragShader );
// Link program
if( !ndk_helper::shader::LinkProgram( program ) )
{
LOGI( "Failed to link program: %d", program );
if( vertShader )
{
glDeleteShader( vertShader );
vertShader = 0;
}
if( fragShader )
{
glDeleteShader( fragShader );
fragShader = 0;
}
if( program )
{
glDeleteProgram( program );
}
return false;
}
// Get uniform locations
params->light0_ = glGetUniformLocation( program, "vLight0" );
params->material_ambient_ = glGetUniformLocation( program, "vMaterialAmbient" );
params->material_specular_ = glGetUniformLocation( program, "vMaterialSpecular" );
// Release vertex and fragment shaders
if( vertShader )
glDeleteShader( vertShader );
if( fragShader )
glDeleteShader( fragShader );
params->program_ = program;
return true;
}
//--------------------------------------------------------------------------------
// Bind
//--------------------------------------------------------------------------------
bool MoreTeapotsRenderer::Bind( ndk_helper::TapCamera* camera )
{
camera_ = camera;
return true;
}
//--------------------------------------------------------------------------------
// Helper functions
//--------------------------------------------------------------------------------
std::string MoreTeapotsRenderer::ToString( const int32_t i )
{
char str[64];
snprintf( str, sizeof(str), "%d", i );
return std::string( str );
}

View File

@@ -0,0 +1,126 @@
/*
* Copyright 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//--------------------------------------------------------------------------------
// MoreTeapotsRenderer.h
// Renderer for teapots
//--------------------------------------------------------------------------------
#ifndef _MoreTeapotsRenderer_H
#define _MoreTeapotsRenderer_H
//--------------------------------------------------------------------------------
// Include files
//--------------------------------------------------------------------------------
#include <jni.h>
#include <errno.h>
#include <vector>
#include <EGL/egl.h>
#include <GLES/gl.h>
#include <android/sensor.h>
#include <android/log.h>
#include <android_native_app_glue.h>
#include <android/native_window_jni.h>
#include <cpu-features.h>
#define CLASS_NAME "android/app/NativeActivity"
#define APPLICATION_CLASS_NAME "com/sample/moreteapotss/MoreTeapotsApplication"
#include "NDKHelper.h"
#define BUFFER_OFFSET(i) ((char *)NULL + (i))
struct TEAPOT_VERTEX
{
float pos[3];
float normal[3];
};
enum SHADER_ATTRIBUTES
{
ATTRIB_VERTEX, ATTRIB_NORMAL, ATTRIB_COLOR, ATTRIB_UV
};
struct SHADER_PARAMS
{
GLuint program_;
GLuint light0_;
GLuint material_diffuse_;
GLuint material_ambient_;
GLuint material_specular_;
GLuint matrix_projection_;
GLuint matrix_view_;
};
struct TEAPOT_MATERIALS
{
float specular_color[4];
float ambient_color[3];
};
class MoreTeapotsRenderer
{
int32_t num_indices_;
int32_t num_vertices_;
GLuint ibo_;
GLuint vbo_;
GLuint ubo_;
SHADER_PARAMS shader_param_;
bool LoadShaders( SHADER_PARAMS* params,
const char* strVsh,
const char* strFsh );
bool LoadShadersES3( SHADER_PARAMS* params,
const char* strVsh,
const char* strFsh,
std::map<std::string, std::string>&shaderParameters );
ndk_helper::Mat4 mat_projection_;
ndk_helper::Mat4 mat_view_;
std::vector<ndk_helper::Mat4> vec_mat_models_;
std::vector<ndk_helper::Vec3> vec_colors_;
std::vector<ndk_helper::Vec2> vec_rotations_;
std::vector<ndk_helper::Vec2> vec_current_rotations_;
ndk_helper::TapCamera* camera_;
int32_t teapot_x_;
int32_t teapot_y_;
int32_t teapot_z_;
int32_t ubo_matrix_stride_;
int32_t ubo_vector_stride_;
bool geometry_instancing_support_;
bool arb_support_;
std::string ToString( const int32_t i );
public:
MoreTeapotsRenderer();
virtual ~MoreTeapotsRenderer();
void Init( const int32_t numX,
const int32_t numY,
const int32_t numZ );
void Render();
void Update( float dTime );
bool Bind( ndk_helper::TapCamera* camera );
void Unload();
void UpdateViewport();
};
#endif

3
MoreTeapots/lint.xml Normal file
View File

@@ -0,0 +1,3 @@
<?xml version="1.0" encoding="UTF-8"?>
<lint>
</lint>

View File

@@ -0,0 +1,14 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system edit
# "ant.properties", and override values to adapt the script to your
# project structure.
#
# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
# Project target.
target=Google Inc.:Google APIs:19

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

View File

@@ -9,8 +9,8 @@
android:id="@+id/textViewFPS"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:gravity="end"
android:text="@string/fps"
android:gravity="right"
android:text="0.0 FPS"
android:textAppearance="?android:attr/textAppearanceMedium"
android:textColor="@android:color/white" />

View File

@@ -1,6 +1,5 @@
<resources>
<string name="app_name">More Teapots</string>
<string name="fps">0.0 FPS</string>
</resources>

View File

@@ -0,0 +1,202 @@
/*
* Copyright 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample.helper;
import java.io.File;
import java.io.FileInputStream;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.opengl.GLUtils;
import android.util.Log;
public class NDKHelper
{
private static Context context;
public static void setContext(Context c)
{
Log.i("NDKHelper", "setContext:" + c);
context = c;
}
//
// Load Bitmap
// Java helper is useful decoding PNG, TIFF etc rather than linking libPng
// etc separately
//
private int nextPOT(int i)
{
int pot = 1;
while (pot < i)
pot <<= 1;
return pot;
}
private Bitmap scaleBitmap(Bitmap bitmapToScale, float newWidth, float newHeight)
{
if (bitmapToScale == null)
return null;
// get the original width and height
int width = bitmapToScale.getWidth();
int height = bitmapToScale.getHeight();
// create a matrix for the manipulation
Matrix matrix = new Matrix();
// resize the bit map
matrix.postScale(newWidth / width, newHeight / height);
// recreate the new Bitmap and set it back
return Bitmap.createBitmap(bitmapToScale, 0, 0, bitmapToScale.getWidth(),
bitmapToScale.getHeight(), matrix, true);
}
public boolean loadTexture(String path)
{
Bitmap bitmap = null;
try
{
String str = path;
if (!path.startsWith("/"))
{
str = "/" + path;
}
File file = new File(context.getExternalFilesDir(null), str);
if (file.canRead())
{
bitmap = BitmapFactory.decodeStream(new FileInputStream(file));
} else
{
bitmap = BitmapFactory.decodeStream(context.getResources().getAssets()
.open(path));
}
// Matrix matrix = new Matrix();
// // resize the bit map
// matrix.postScale(-1F, 1F);
//
// // recreate the new Bitmap and set it back
// bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(),
// bitmap.getHeight(), matrix, true);
} catch (Exception e)
{
Log.w("NDKHelper", "Coundn't load a file:" + path);
return false;
}
if (bitmap != null)
{
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
}
return true;
}
public Bitmap openBitmap(String path, boolean iScalePOT)
{
Bitmap bitmap = null;
try
{
bitmap = BitmapFactory.decodeStream(context.getResources().getAssets()
.open(path));
if (iScalePOT)
{
int originalWidth = getBitmapWidth(bitmap);
int originalHeight = getBitmapHeight(bitmap);
int width = nextPOT(originalWidth);
int height = nextPOT(originalHeight);
if (originalWidth != width || originalHeight != height)
{
// Scale it
bitmap = scaleBitmap(bitmap, width, height);
}
}
} catch (Exception e)
{
Log.w("NDKHelper", "Coundn't load a file:" + path);
}
return bitmap;
}
public int getBitmapWidth(Bitmap bmp)
{
return bmp.getWidth();
}
public int getBitmapHeight(Bitmap bmp)
{
return bmp.getHeight();
}
public void getBitmapPixels(Bitmap bmp, int[] pixels)
{
int w = bmp.getWidth();
int h = bmp.getHeight();
bmp.getPixels(pixels, 0, w, 0, 0, w, h);
}
public void closeBitmap(Bitmap bmp)
{
bmp.recycle();
}
public static String getNativeLibraryDirectory(Context appContext)
{
ApplicationInfo ai = context.getApplicationInfo();
Log.w("NDKHelper", "ai.nativeLibraryDir:" + ai.nativeLibraryDir);
if ((ai.flags & ApplicationInfo.FLAG_UPDATED_SYSTEM_APP) != 0
|| (ai.flags & ApplicationInfo.FLAG_SYSTEM) == 0)
{
return ai.nativeLibraryDir;
}
return "/system/lib/";
}
public int getNativeAudioBufferSize()
{
int SDK_INT = android.os.Build.VERSION.SDK_INT;
if (SDK_INT >= 17)
{
AudioManager am = (AudioManager) context
.getSystemService(Context.AUDIO_SERVICE);
String framesPerBuffer = am
.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
return Integer.parseInt(framesPerBuffer);
} else
{
return 0;
}
}
public int getNativeAudioSampleRate()
{
return AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_SYSTEM);
}
}

View File

@@ -35,9 +35,8 @@ import android.widget.Toast;
public class MoreTeapotsApplication extends Application {
private static Context context;
public void onCreate(){
super.onCreate();
context=getApplicationContext();
NDKHelper.setContext(context);
Log.w("native-activity", "onCreate");
final PackageManager pm = getApplicationContext().getPackageManager();

View File

@@ -16,8 +16,6 @@
package com.sample.moreteapots;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.app.NativeActivity;
import android.content.res.Configuration;
import android.os.Bundle;
@@ -53,7 +51,6 @@ public class MoreTeapotsNativeActivity extends NativeActivity {
}
@TargetApi(19)
protected void onResume() {
super.onResume();
@@ -76,10 +73,14 @@ public class MoreTeapotsNativeActivity extends NativeActivity {
protected void onPause()
{
super.onPause();
if (_popupWindow != null) {
_popupWindow.dismiss();
_popupWindow = null;
}
}
// Our popup window, you will call it from your C/C++ code later
@TargetApi(19)
void setImmersiveSticky() {
View decorView = getWindow().getDecorView();
decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_FULLSCREEN
@@ -94,7 +95,6 @@ public class MoreTeapotsNativeActivity extends NativeActivity {
PopupWindow _popupWindow;
TextView _label;
@SuppressLint("InflateParams")
public void showUI()
{
if( _popupWindow != null )
@@ -120,7 +120,7 @@ public class MoreTeapotsNativeActivity extends NativeActivity {
_activity.setContentView(mainLayout, params);
// Show our UI over NativeActivity window
_popupWindow.showAtLocation(mainLayout, Gravity.TOP | Gravity.START, 10, 10);
_popupWindow.showAtLocation(mainLayout, Gravity.TOP | Gravity.LEFT, 10, 10);
_popupWindow.update();
_label = (TextView)popupView.findViewById(R.id.textViewFPS);

View File

@@ -1,50 +1,35 @@
NDK Samples [![build](https://github.com/android/ndk-samples/workflows/build/badge.svg)](https://github.com/android/ndk-samples/actions)
NDK Samples
===========
This repository contains [Android NDK][0] samples with Android Studio [C++ integration](https://www.youtube.com/watch?v=f7ihSQ44WO0&feature=youtu.be).
This repository contains samples for [Android NDK][0].
These samples use the new [CMake Android plugin](https://developer.android.com/studio/projects/add-native-code.html) with C++ support.
Pre-requisites
--------------
Samples could also be built with other build systems:
- for ndk-build with Android Studio, refer to directory [other-builds/ndkbuild](https://github.com/googlesamples/android-ndk/tree/master/other-builds/ndkbuild)
- for gradle-experimental plugin, refer to directory other-builds/experimental. Note that gradle-experimental does not work with unified headers yet: use NDK version up to r15 and Android Studio up to version 2.3. When starting new project, please use CMake or ndk-build plugin.
- [Android NDK][0]
Additional Android Studio samples:
- [Google Play Game Samples with Android Studio](https://github.com/playgameservices/cpp-android-basic-samples)
- [Google Android Vulkan Tutorials](https://github.com/googlesamples/android-vulkan-tutorials)
- [Android Vulkan API Basic Samples](https://github.com/googlesamples/vulkan-basic-samples)
- [Android High Performance Audio](https://github.com/googlesamples/android-audio-high-performance)
Getting Started
---------------
Documentation
- [Add Native Code to Your Project](https://developer.android.com/studio/projects/add-native-code.html)
- [Configure NDK for Android Studio/Gradle Plugin](https://github.com/android/ndk-samples/wiki/Configure-NDK-Path)
- [CMake for NDK](https://developer.android.com/ndk/guides/cmake.html)
Known Issues
- For Studio related issues, refer to [Android Studio known issues](http://tools.android.com/knownissues) page
- For NDK issues, refer to [ndk issues](https://github.com/android/ndk/issues)
For samples using `Android.mk` build system with `ndk-build` see the [android-mk](https://github.com/googlesamples/android-ndk/tree/android-mk) branch.
Build Steps
----------
- With Android Studio: "Open An Existing Android Studio Project" or "File" > "Open", then navigate to & select project's build.gradle file.
- On Command Line: set up ANDROID_HOME and ANDROID_NDK_HOME to your SDK and NDK path, cd to individual sample dir, and do "gradlew assembleDebug"
These samples use the NDK build system, you can build them by
following the instructions in the
[NDK documentation](https://developer.android.com/tools/sdk/ndk/index.html#Samples).
Support
-------
For any issues you found in these samples, please
- submit patches with pull requests, see [CONTRIBUTING.md](CONTRIBUTING.md) for more details, or
- [create bugs](https://github.com/googlesamples/android-ndk/issues/new) here.
- [Google+ Community](https://plus.google.com/communities/105153134372062985968)
- [Stack Overflow](http://stackoverflow.com/questions/tagged/android)
For Android NDK generic questions, please ask on [Stack Overflow](https://stackoverflow.com/questions/tagged/android), Android teams are periodically monitoring questions there.
If you've found an error in this sample, please [file an issue](https://github.com/googlesamples/android-ndk/issues/new).
Patches are encouraged, and may be submitted by [forking this project](https://github.com/googlesamples/android-ndk/fork) and
submitting a pull request through GitHub. Please see [CONTRIBUTING.md](CONTRIBUTING.md) for more details.
License
-------
Copyright 2018 The Android Open Source Project, Inc.
Copyright 2015 The Android Open Source Project, Inc.
Licensed to the Apache Software Foundation (ASF) under one or more contributor
license agreements. See the NOTICE file distributed with this work for
@@ -53,7 +38,7 @@ file to you under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of
the License at
https://www.apache.org/licenses/LICENSE-2.0
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
@@ -63,4 +48,4 @@ the License.
[LICENSE](LICENSE)
[0]: https://developer.android.com/ndk
[0]: https://developer.android.com/tools/sdk/ndk/

View File

@@ -1,11 +0,0 @@
Android Studio/Gradle DSL References
| Name |Function | Type | Options | Default |
|---------------|-----------|:-------------:|----------|---------|
| debuggable | Debugging Java code | bool | true / false ||
| ndk.debuggable| Debugging JNI code | bool | true / false ||
Notation:
&nbsp;&nbsp;&nbsp;&nbsp; dot(".") notation is same as closure ("{}") notation

View File

@@ -0,0 +1,34 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.sample.teapot"
android:versionCode="1"
android:versionName="1.0" >
<uses-sdk
android:minSdkVersion="11"
android:targetSdkVersion="19" />
<uses-feature android:glEsVersion="0x00020000"></uses-feature>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"></uses-permission>
<application
android:allowBackup="true"
android:icon="@drawable/ic_launcher"
android:label="@string/app_name"
android:theme="@style/AppTheme"
android:hasCode="true"
android:name="com.sample.teapot.TeapotApplication"
>
<!-- Our activity is the built-in NativeActivity framework class.
This will take care of integrating with our NDK code. -->
<activity android:name="com.sample.teapot.TeapotNativeActivity"
android:label="@string/app_name"
android:configChanges="orientation|keyboardHidden">
<!-- Tell NativeActivity the name of or .so -->
<meta-data android:name="android.app.lib_name"
android:value="TeapotNativeActivity" />
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View File

@@ -19,7 +19,7 @@
#define USE_PHONG (1)
uniform lowp vec3 vMaterialAmbient;
uniform lowp vec4 vMaterialSpecular;
uniform mediump vec4 vMaterialSpecular;
varying lowp vec4 colorDiffuse;

20
Teapot/jni/Android.mk Normal file
View File

@@ -0,0 +1,20 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := TeapotNativeActivity
LOCAL_SRC_FILES := TeapotNativeActivity.cpp \
TeapotRenderer.cpp \
LOCAL_C_INCLUDES :=
LOCAL_CFLAGS :=
LOCAL_LDLIBS := -llog -landroid -lEGL -lGLESv2
LOCAL_STATIC_LIBRARIES := cpufeatures android_native_app_glue ndk_helper
include $(BUILD_SHARED_LIBRARY)
$(call import-module,android/ndk_helper)
$(call import-module,android/native_app_glue)
$(call import-module,android/cpufeatures)

View File

@@ -0,0 +1,4 @@
APP_PLATFORM := android-9
APP_ABI := all
APP_STL := stlport_static

View File

@@ -0,0 +1,488 @@
/*
* Copyright 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//--------------------------------------------------------------------------------
// Include files
//--------------------------------------------------------------------------------
#include <jni.h>
#include <errno.h>
#include <android/sensor.h>
#include <android/log.h>
#include <android_native_app_glue.h>
#include <android/native_window_jni.h>
#include <cpu-features.h>
#include "TeapotRenderer.h"
#include "NDKHelper.h"
//-------------------------------------------------------------------------
//Preprocessor
//-------------------------------------------------------------------------
#define HELPER_CLASS_NAME "com/sample/helper/NDKHelper" //Class name of helper function
//-------------------------------------------------------------------------
//Shared state for our app.
//-------------------------------------------------------------------------
struct android_app;
class Engine
{
TeapotRenderer renderer_;
ndk_helper::GLContext* gl_context_;
bool initialized_resources_;
bool has_focus_;
ndk_helper::DoubletapDetector doubletap_detector_;
ndk_helper::PinchDetector pinch_detector_;
ndk_helper::DragDetector drag_detector_;
ndk_helper::PerfMonitor monitor_;
ndk_helper::TapCamera tap_camera_;
android_app* app_;
ASensorManager* sensor_manager_;
const ASensor* accelerometer_sensor_;
ASensorEventQueue* sensor_event_queue_;
void UpdateFPS( float fFPS );
void ShowUI();
void TransformPosition( ndk_helper::Vec2& vec );
public:
static void HandleCmd( struct android_app* app,
int32_t cmd );
static int32_t HandleInput( android_app* app,
AInputEvent* event );
Engine();
~Engine();
void SetState( android_app* state );
int InitDisplay();
void LoadResources();
void UnloadResources();
void DrawFrame();
void TermDisplay();
void TrimMemory();
bool IsReady();
void UpdatePosition( AInputEvent* event,
int32_t iIndex,
float& fX,
float& fY );
void InitSensors();
void ProcessSensors( int32_t id );
void SuspendSensors();
void ResumeSensors();
};
//-------------------------------------------------------------------------
//Ctor
//-------------------------------------------------------------------------
Engine::Engine() :
initialized_resources_( false ),
has_focus_( false ),
app_( NULL ),
sensor_manager_( NULL ),
accelerometer_sensor_( NULL ),
sensor_event_queue_( NULL )
{
gl_context_ = ndk_helper::GLContext::GetInstance();
}
//-------------------------------------------------------------------------
//Dtor
//-------------------------------------------------------------------------
Engine::~Engine()
{
}
/**
* Load resources
*/
void Engine::LoadResources()
{
renderer_.Init();
renderer_.Bind( &tap_camera_ );
}
/**
* Unload resources
*/
void Engine::UnloadResources()
{
renderer_.Unload();
}
/**
* Initialize an EGL context for the current display.
*/
int Engine::InitDisplay()
{
if( !initialized_resources_ )
{
gl_context_->Init( app_->window );
LoadResources();
initialized_resources_ = true;
}
else
{
// initialize OpenGL ES and EGL
if( EGL_SUCCESS != gl_context_->Resume( app_->window ) )
{
UnloadResources();
LoadResources();
}
}
ShowUI();
// Initialize GL state.
glEnable( GL_CULL_FACE );
glEnable( GL_DEPTH_TEST );
glDepthFunc( GL_LEQUAL );
//Note that screen size might have been changed
glViewport( 0, 0, gl_context_->GetScreenWidth(), gl_context_->GetScreenHeight() );
renderer_.UpdateViewport();
tap_camera_.SetFlip( 1.f, -1.f, -1.f );
tap_camera_.SetPinchTransformFactor( 2.f, 2.f, 8.f );
return 0;
}
/**
* Just the current frame in the display.
*/
void Engine::DrawFrame()
{
float fFPS;
if( monitor_.Update( fFPS ) )
{
UpdateFPS( fFPS );
}
renderer_.Update( monitor_.GetCurrentTime() );
// Just fill the screen with a color.
glClearColor( 0.5f, 0.5f, 0.5f, 1.f );
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
renderer_.Render();
// Swap
if( EGL_SUCCESS != gl_context_->Swap() )
{
UnloadResources();
LoadResources();
}
}
/**
* Tear down the EGL context currently associated with the display.
*/
void Engine::TermDisplay()
{
gl_context_->Suspend();
}
void Engine::TrimMemory()
{
LOGI( "Trimming memory" );
gl_context_->Invalidate();
}
/**
* Process the next input event.
*/
int32_t Engine::HandleInput( android_app* app,
AInputEvent* event )
{
Engine* eng = (Engine*) app->userData;
if( AInputEvent_getType( event ) == AINPUT_EVENT_TYPE_MOTION )
{
ndk_helper::GESTURE_STATE doubleTapState = eng->doubletap_detector_.Detect( event );
ndk_helper::GESTURE_STATE dragState = eng->drag_detector_.Detect( event );
ndk_helper::GESTURE_STATE pinchState = eng->pinch_detector_.Detect( event );
//Double tap detector has a priority over other detectors
if( doubleTapState == ndk_helper::GESTURE_STATE_ACTION )
{
//Detect double tap
eng->tap_camera_.Reset( true );
}
else
{
//Handle drag state
if( dragState & ndk_helper::GESTURE_STATE_START )
{
//Otherwise, start dragging
ndk_helper::Vec2 v;
eng->drag_detector_.GetPointer( v );
eng->TransformPosition( v );
eng->tap_camera_.BeginDrag( v );
}
else if( dragState & ndk_helper::GESTURE_STATE_MOVE )
{
ndk_helper::Vec2 v;
eng->drag_detector_.GetPointer( v );
eng->TransformPosition( v );
eng->tap_camera_.Drag( v );
}
else if( dragState & ndk_helper::GESTURE_STATE_END )
{
eng->tap_camera_.EndDrag();
}
//Handle pinch state
if( pinchState & ndk_helper::GESTURE_STATE_START )
{
//Start new pinch
ndk_helper::Vec2 v1;
ndk_helper::Vec2 v2;
eng->pinch_detector_.GetPointers( v1, v2 );
eng->TransformPosition( v1 );
eng->TransformPosition( v2 );
eng->tap_camera_.BeginPinch( v1, v2 );
}
else if( pinchState & ndk_helper::GESTURE_STATE_MOVE )
{
//Multi touch
//Start new pinch
ndk_helper::Vec2 v1;
ndk_helper::Vec2 v2;
eng->pinch_detector_.GetPointers( v1, v2 );
eng->TransformPosition( v1 );
eng->TransformPosition( v2 );
eng->tap_camera_.Pinch( v1, v2 );
}
}
return 1;
}
return 0;
}
/**
* Process the next main command.
*/
void Engine::HandleCmd( struct android_app* app,
int32_t cmd )
{
Engine* eng = (Engine*) app->userData;
switch( cmd )
{
case APP_CMD_SAVE_STATE:
break;
case APP_CMD_INIT_WINDOW:
// The window is being shown, get it ready.
if( app->window != NULL )
{
eng->InitDisplay();
eng->DrawFrame();
}
break;
case APP_CMD_TERM_WINDOW:
// The window is being hidden or closed, clean it up.
eng->TermDisplay();
eng->has_focus_ = false;
break;
case APP_CMD_STOP:
break;
case APP_CMD_GAINED_FOCUS:
eng->ResumeSensors();
//Start animation
eng->has_focus_ = true;
break;
case APP_CMD_LOST_FOCUS:
eng->SuspendSensors();
// Also stop animating.
eng->has_focus_ = false;
eng->DrawFrame();
break;
case APP_CMD_LOW_MEMORY:
//Free up GL resources
eng->TrimMemory();
break;
}
}
//-------------------------------------------------------------------------
//Sensor handlers
//-------------------------------------------------------------------------
void Engine::InitSensors()
{
sensor_manager_ = ASensorManager_getInstance();
accelerometer_sensor_ = ASensorManager_getDefaultSensor( sensor_manager_,
ASENSOR_TYPE_ACCELEROMETER );
sensor_event_queue_ = ASensorManager_createEventQueue( sensor_manager_, app_->looper,
LOOPER_ID_USER, NULL, NULL );
}
void Engine::ProcessSensors( int32_t id )
{
// If a sensor has data, process it now.
if( id == LOOPER_ID_USER )
{
if( accelerometer_sensor_ != NULL )
{
ASensorEvent event;
while( ASensorEventQueue_getEvents( sensor_event_queue_, &event, 1 ) > 0 )
{
}
}
}
}
void Engine::ResumeSensors()
{
// When our app gains focus, we start monitoring the accelerometer.
if( accelerometer_sensor_ != NULL )
{
ASensorEventQueue_enableSensor( sensor_event_queue_, accelerometer_sensor_ );
// We'd like to get 60 events per second (in us).
ASensorEventQueue_setEventRate( sensor_event_queue_, accelerometer_sensor_,
(1000L / 60) * 1000 );
}
}
void Engine::SuspendSensors()
{
// When our app loses focus, we stop monitoring the accelerometer.
// This is to avoid consuming battery while not being used.
if( accelerometer_sensor_ != NULL )
{
ASensorEventQueue_disableSensor( sensor_event_queue_, accelerometer_sensor_ );
}
}
//-------------------------------------------------------------------------
//Misc
//-------------------------------------------------------------------------
void Engine::SetState( android_app* state )
{
app_ = state;
doubletap_detector_.SetConfiguration( app_->config );
drag_detector_.SetConfiguration( app_->config );
pinch_detector_.SetConfiguration( app_->config );
}
bool Engine::IsReady()
{
if( has_focus_ )
return true;
return false;
}
void Engine::TransformPosition( ndk_helper::Vec2& vec )
{
vec = ndk_helper::Vec2( 2.0f, 2.0f ) * vec
/ ndk_helper::Vec2( gl_context_->GetScreenWidth(), gl_context_->GetScreenHeight() )
- ndk_helper::Vec2( 1.f, 1.f );
}
void Engine::ShowUI()
{
JNIEnv *jni;
app_->activity->vm->AttachCurrentThread( &jni, NULL );
//Default class retrieval
jclass clazz = jni->GetObjectClass( app_->activity->clazz );
jmethodID methodID = jni->GetMethodID( clazz, "showUI", "()V" );
jni->CallVoidMethod( app_->activity->clazz, methodID );
app_->activity->vm->DetachCurrentThread();
return;
}
void Engine::UpdateFPS( float fFPS )
{
JNIEnv *jni;
app_->activity->vm->AttachCurrentThread( &jni, NULL );
//Default class retrieval
jclass clazz = jni->GetObjectClass( app_->activity->clazz );
jmethodID methodID = jni->GetMethodID( clazz, "updateFPS", "(F)V" );
jni->CallVoidMethod( app_->activity->clazz, methodID, fFPS );
app_->activity->vm->DetachCurrentThread();
return;
}
Engine g_engine;
/**
* This is the main entry point of a native application that is using
* android_native_app_glue. It runs in its own thread, with its own
* event loop for receiving input events and doing other things.
*/
void android_main( android_app* state )
{
app_dummy();
g_engine.SetState( state );
//Init helper functions
ndk_helper::JNIHelper::Init( state->activity, HELPER_CLASS_NAME );
state->userData = &g_engine;
state->onAppCmd = Engine::HandleCmd;
state->onInputEvent = Engine::HandleInput;
#ifdef USE_NDK_PROFILER
monstartup("libTeapotNativeActivity.so");
#endif
// Prepare to monitor accelerometer
g_engine.InitSensors();
// loop waiting for stuff to do.
while( 1 )
{
// Read all pending events.
int id;
int events;
android_poll_source* source;
// If not animating, we will block forever waiting for events.
// If animating, we loop until all events are read, then continue
// to draw the next frame of animation.
while( (id = ALooper_pollAll( g_engine.IsReady() ? 0 : -1, NULL, &events, (void**) &source ))
>= 0 )
{
// Process this event.
if( source != NULL )
source->process( state, source );
g_engine.ProcessSensors( id );
// Check if we are exiting.
if( state->destroyRequested != 0 )
{
g_engine.TermDisplay();
return;
}
}
if( g_engine.IsReady() )
{
// Drawing is throttled to the screen update rate, so there
// is no need to do timing here.
g_engine.DrawFrame();
}
}
}

View File

@@ -0,0 +1,286 @@
/*
* Copyright 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//--------------------------------------------------------------------------------
// TeapotRenderer.cpp
// Render a teapot
//--------------------------------------------------------------------------------
//--------------------------------------------------------------------------------
// Include files
//--------------------------------------------------------------------------------
#include "TeapotRenderer.h"
//--------------------------------------------------------------------------------
// Teapot model data
//--------------------------------------------------------------------------------
#include "teapot.inl"
//--------------------------------------------------------------------------------
// Ctor
//--------------------------------------------------------------------------------
TeapotRenderer::TeapotRenderer()
{
}
//--------------------------------------------------------------------------------
// Dtor
//--------------------------------------------------------------------------------
TeapotRenderer::~TeapotRenderer()
{
Unload();
}
void TeapotRenderer::Init()
{
//Settings
glFrontFace( GL_CCW );
//Load shader
LoadShaders( &shader_param_, "Shaders/VS_ShaderPlain.vsh",
"Shaders/ShaderPlain.fsh" );
//Create Index buffer
num_indices_ = sizeof(teapotIndices) / sizeof(teapotIndices[0]);
glGenBuffers( 1, &ibo_ );
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, ibo_ );
glBufferData( GL_ELEMENT_ARRAY_BUFFER, sizeof(teapotIndices), teapotIndices,
GL_STATIC_DRAW );
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, 0 );
//Create VBO
num_vertices_ = sizeof(teapotPositions) / sizeof(teapotPositions[0]) / 3;
int32_t iStride = sizeof(TEAPOT_VERTEX);
int32_t iIndex = 0;
TEAPOT_VERTEX* p = new TEAPOT_VERTEX[num_vertices_];
for( int32_t i = 0; i < num_vertices_; ++i )
{
p[i].pos[0] = teapotPositions[iIndex];
p[i].pos[1] = teapotPositions[iIndex + 1];
p[i].pos[2] = teapotPositions[iIndex + 2];
p[i].normal[0] = teapotNormals[iIndex];
p[i].normal[1] = teapotNormals[iIndex + 1];
p[i].normal[2] = teapotNormals[iIndex + 2];
iIndex += 3;
}
glGenBuffers( 1, &vbo_ );
glBindBuffer( GL_ARRAY_BUFFER, vbo_ );
glBufferData( GL_ARRAY_BUFFER, iStride * num_vertices_, p, GL_STATIC_DRAW );
glBindBuffer( GL_ARRAY_BUFFER, 0 );
delete[] p;
UpdateViewport();
mat_model_ = ndk_helper::Mat4::Translation( 0, 0, -15.f );
ndk_helper::Mat4 mat = ndk_helper::Mat4::RotationX( M_PI / 3 );
mat_model_ = mat * mat_model_;
}
void TeapotRenderer::UpdateViewport()
{
//Init Projection matrices
int32_t viewport[4];
glGetIntegerv( GL_VIEWPORT, viewport );
float fAspect = (float) viewport[2] / (float) viewport[3];
const float CAM_NEAR = 5.f;
const float CAM_FAR = 10000.f;
bool bRotate = false;
mat_projection_ = ndk_helper::Mat4::Perspective( fAspect, 1.f, CAM_NEAR, CAM_FAR );
}
void TeapotRenderer::Unload()
{
if( vbo_ )
{
glDeleteBuffers( 1, &vbo_ );
vbo_ = 0;
}
if( ibo_ )
{
glDeleteBuffers( 1, &ibo_ );
ibo_ = 0;
}
if( shader_param_.program_ )
{
glDeleteProgram( shader_param_.program_ );
shader_param_.program_ = 0;
}
}
void TeapotRenderer::Update( float fTime )
{
const float CAM_X = 0.f;
const float CAM_Y = 0.f;
const float CAM_Z = 700.f;
mat_view_ = ndk_helper::Mat4::LookAt( ndk_helper::Vec3( CAM_X, CAM_Y, CAM_Z ),
ndk_helper::Vec3( 0.f, 0.f, 0.f ), ndk_helper::Vec3( 0.f, 1.f, 0.f ) );
if( camera_ )
{
camera_->Update();
mat_view_ = camera_->GetTransformMatrix() * mat_view_
* camera_->GetRotationMatrix() * mat_model_;
}
else
{
mat_view_ = mat_view_ * mat_model_;
}
}
void TeapotRenderer::Render()
{
//
// Feed Projection and Model View matrices to the shaders
ndk_helper::Mat4 mat_vp = mat_projection_ * mat_view_;
// Bind the VBO
glBindBuffer( GL_ARRAY_BUFFER, vbo_ );
int32_t iStride = sizeof(TEAPOT_VERTEX);
// Pass the vertex data
glVertexAttribPointer( ATTRIB_VERTEX, 3, GL_FLOAT, GL_FALSE, iStride,
BUFFER_OFFSET( 0 ) );
glEnableVertexAttribArray( ATTRIB_VERTEX );
glVertexAttribPointer( ATTRIB_NORMAL, 3, GL_FLOAT, GL_FALSE, iStride,
BUFFER_OFFSET( 3 * sizeof(GLfloat) ) );
glEnableVertexAttribArray( ATTRIB_NORMAL );
// Bind the IB
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, ibo_ );
glUseProgram( shader_param_.program_ );
TEAPOT_MATERIALS material = { { 1.0f, 0.5f, 0.5f }, { 1.0f, 1.0f, 1.0f, 10.f }, {
0.1f, 0.1f, 0.1f }, };
//Update uniforms
glUniform4f( shader_param_.material_diffuse_, material.diffuse_color[0],
material.diffuse_color[1], material.diffuse_color[2], 1.f );
glUniform4f( shader_param_.material_specular_, material.specular_color[0],
material.specular_color[1], material.specular_color[2],
material.specular_color[3] );
//
//using glUniform3fv here was troublesome
//
glUniform3f( shader_param_.material_ambient_, material.ambient_color[0],
material.ambient_color[1], material.ambient_color[2] );
glUniformMatrix4fv( shader_param_.matrix_projection_, 1, GL_FALSE, mat_vp.Ptr() );
glUniformMatrix4fv( shader_param_.matrix_view_, 1, GL_FALSE, mat_view_.Ptr() );
glUniform3f( shader_param_.light0_, 100.f, -200.f, -600.f );
glDrawElements( GL_TRIANGLES, num_indices_, GL_UNSIGNED_SHORT, BUFFER_OFFSET(0) );
glBindBuffer( GL_ARRAY_BUFFER, 0 );
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, 0 );
}
bool TeapotRenderer::LoadShaders( SHADER_PARAMS* params,
const char* strVsh,
const char* strFsh )
{
GLuint program;
GLuint vert_shader, frag_shader;
char *vert_shader_pathname, *frag_shader_pathname;
// Create shader program
program = glCreateProgram();
LOGI( "Created Shader %d", program );
// Create and compile vertex shader
if( !ndk_helper::shader::CompileShader( &vert_shader, GL_VERTEX_SHADER, strVsh ) )
{
LOGI( "Failed to compile vertex shader" );
glDeleteProgram( program );
return false;
}
// Create and compile fragment shader
if( !ndk_helper::shader::CompileShader( &frag_shader, GL_FRAGMENT_SHADER, strFsh ) )
{
LOGI( "Failed to compile fragment shader" );
glDeleteProgram( program );
return false;
}
// Attach vertex shader to program
glAttachShader( program, vert_shader );
// Attach fragment shader to program
glAttachShader( program, frag_shader );
// Bind attribute locations
// this needs to be done prior to linking
glBindAttribLocation( program, ATTRIB_VERTEX, "myVertex" );
glBindAttribLocation( program, ATTRIB_NORMAL, "myNormal" );
glBindAttribLocation( program, ATTRIB_UV, "myUV" );
// Link program
if( !ndk_helper::shader::LinkProgram( program ) )
{
LOGI( "Failed to link program: %d", program );
if( vert_shader )
{
glDeleteShader( vert_shader );
vert_shader = 0;
}
if( frag_shader )
{
glDeleteShader( frag_shader );
frag_shader = 0;
}
if( program )
{
glDeleteProgram( program );
}
return false;
}
// Get uniform locations
params->matrix_projection_ = glGetUniformLocation( program, "uPMatrix" );
params->matrix_view_ = glGetUniformLocation( program, "uMVMatrix" );
params->light0_ = glGetUniformLocation( program, "vLight0" );
params->material_diffuse_ = glGetUniformLocation( program, "vMaterialDiffuse" );
params->material_ambient_ = glGetUniformLocation( program, "vMaterialAmbient" );
params->material_specular_ = glGetUniformLocation( program, "vMaterialSpecular" );
// Release vertex and fragment shaders
if( vert_shader )
glDeleteShader( vert_shader );
if( frag_shader )
glDeleteShader( frag_shader );
params->program_ = program;
return true;
}
bool TeapotRenderer::Bind( ndk_helper::TapCamera* camera )
{
camera_ = camera;
return true;
}

View File

@@ -36,67 +36,70 @@
#include <android/log.h>
#include <android_native_app_glue.h>
#include <android/native_window_jni.h>
#include <cpu-features.h>
#define CLASS_NAME "android/app/NativeActivity"
#define APPLICATION_CLASS_NAME "com/sample/teapot/TeapotApplication"
#include "NDKHelper.h"
#define BUFFER_OFFSET(i) ((char*)NULL + (i))
#define BUFFER_OFFSET(i) ((char *)NULL + (i))
struct TEAPOT_VERTEX {
float pos[3];
float normal[3];
struct TEAPOT_VERTEX
{
float pos[3];
float normal[3];
};
enum SHADER_ATTRIBUTES {
ATTRIB_VERTEX,
ATTRIB_NORMAL,
ATTRIB_UV,
enum SHADER_ATTRIBUTES
{
ATTRIB_VERTEX, ATTRIB_NORMAL, ATTRIB_UV,
};
struct SHADER_PARAMS {
GLuint program_;
GLuint light0_;
GLuint material_diffuse_;
GLuint material_ambient_;
GLuint material_specular_;
struct SHADER_PARAMS
{
GLuint program_;
GLuint light0_;
GLuint material_diffuse_;
GLuint material_ambient_;
GLuint material_specular_;
GLuint matrix_projection_;
GLuint matrix_view_;
GLuint matrix_projection_;
GLuint matrix_view_;
};
struct TEAPOT_MATERIALS {
float diffuse_color[3];
float specular_color[4];
float ambient_color[3];
struct TEAPOT_MATERIALS
{
float diffuse_color[3];
float specular_color[4];
float ambient_color[3];
};
class TeapotRenderer {
int32_t num_indices_;
int32_t num_vertices_;
GLuint ibo_;
GLuint vbo_;
class TeapotRenderer
{
int32_t num_indices_;
int32_t num_vertices_;
GLuint ibo_;
GLuint vbo_;
SHADER_PARAMS shader_param_;
bool LoadShaders(SHADER_PARAMS* params, const char* strVsh,
const char* strFsh);
SHADER_PARAMS shader_param_;
bool LoadShaders( SHADER_PARAMS* params, const char* strVsh, const char* strFsh );
ndk_helper::Mat4 mat_projection_;
ndk_helper::Mat4 mat_view_;
ndk_helper::Mat4 mat_model_;
ndk_helper::Mat4 mat_projection_;
ndk_helper::Mat4 mat_view_;
ndk_helper::Mat4 mat_model_;
ndk_helper::TapCamera* camera_;
public:
TeapotRenderer();
virtual ~TeapotRenderer();
void Init();
void Render();
void Update(float dTime);
bool Bind(ndk_helper::TapCamera* camera);
void Unload();
void UpdateViewport();
ndk_helper::TapCamera* camera_;
public:
TeapotRenderer();
virtual ~TeapotRenderer();
void Init();
void Render();
void Update( float dTime );
bool Bind( ndk_helper::TapCamera* camera );
void Unload();
void UpdateViewport();
};
#endif

3
Teapot/lint.xml Normal file
View File

@@ -0,0 +1,3 @@
<?xml version="1.0" encoding="UTF-8"?>
<lint>
</lint>

14
Teapot/project.properties Normal file
View File

@@ -0,0 +1,14 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system edit
# "ant.properties", and override values to adapt the script to your
# project structure.
#
# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
# Project target.
target=Google Inc.:Google APIs:19

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

View File

@@ -9,9 +9,9 @@
android:id="@+id/textViewFPS"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:gravity="end"
android:text="@string/fps"
android:gravity="right"
android:text="0.0 FPS"
android:textAppearance="?android:attr/textAppearanceMedium"
android:textColor="@android:color/white" />
</LinearLayout>
</LinearLayout>

View File

@@ -1,6 +1,5 @@
<resources>
<string name="app_name">Teapot</string>
<string name="fps">0.0 FPS</string>
</resources>
</resources>

View File

@@ -0,0 +1,203 @@
/*
* Copyright 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample.helper;
import java.io.File;
import java.io.FileInputStream;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.opengl.GLUtils;
import android.util.Log;
public class NDKHelper
{
private static Context context;
public static void setContext(Context c)
{
Log.i("NDKHelper", "setContext:" + c);
context = c;
}
//
// Load Bitmap
// Java helper is useful decoding PNG, TIFF etc rather than linking libPng
// etc separately
//
private int nextPOT(int i)
{
int pot = 1;
while (pot < i)
pot <<= 1;
return pot;
}
private Bitmap scaleBitmap(Bitmap bitmapToScale, float newWidth, float newHeight)
{
if (bitmapToScale == null)
return null;
// get the original width and height
int width = bitmapToScale.getWidth();
int height = bitmapToScale.getHeight();
// create a matrix for the manipulation
Matrix matrix = new Matrix();
// resize the bit map
matrix.postScale(newWidth / width, newHeight / height);
// recreate the new Bitmap and set it back
return Bitmap.createBitmap(bitmapToScale, 0, 0, bitmapToScale.getWidth(),
bitmapToScale.getHeight(), matrix, true);
}
public boolean loadTexture(String path)
{
Bitmap bitmap = null;
try
{
String str = path;
if (!path.startsWith("/"))
{
str = "/" + path;
}
File file = new File(context.getExternalFilesDir(null), str);
if (file.canRead())
{
bitmap = BitmapFactory.decodeStream(new FileInputStream(file));
} else
{
bitmap = BitmapFactory.decodeStream(context.getResources().getAssets()
.open(path));
}
// Matrix matrix = new Matrix();
// // resize the bit map
// matrix.postScale(-1F, 1F);
//
// // recreate the new Bitmap and set it back
// bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(),
// bitmap.getHeight(), matrix, true);
} catch (Exception e)
{
Log.w("NDKHelper", "Coundn't load a file:" + path);
return false;
}
if (bitmap != null)
{
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);
}
return true;
}
public Bitmap openBitmap(String path, boolean iScalePOT)
{
Bitmap bitmap = null;
try
{
bitmap = BitmapFactory.decodeStream(context.getResources().getAssets()
.open(path));
if (iScalePOT)
{
int originalWidth = getBitmapWidth(bitmap);
int originalHeight = getBitmapHeight(bitmap);
int width = nextPOT(originalWidth);
int height = nextPOT(originalHeight);
if (originalWidth != width || originalHeight != height)
{
// Scale it
bitmap = scaleBitmap(bitmap, width, height);
}
}
} catch (Exception e)
{
Log.w("NDKHelper", "Coundn't load a file:" + path);
}
return bitmap;
}
public int getBitmapWidth(Bitmap bmp)
{
return bmp.getWidth();
}
public int getBitmapHeight(Bitmap bmp)
{
return bmp.getHeight();
}
public void getBitmapPixels(Bitmap bmp, int[] pixels)
{
int w = bmp.getWidth();
int h = bmp.getHeight();
bmp.getPixels(pixels, 0, w, 0, 0, w, h);
}
public void closeBitmap(Bitmap bmp)
{
bmp.recycle();
}
public static String getNativeLibraryDirectory(Context appContext)
{
ApplicationInfo ai = context.getApplicationInfo();
Log.w("NDKHelper", "ai.nativeLibraryDir:" + ai.nativeLibraryDir);
if ((ai.flags & ApplicationInfo.FLAG_UPDATED_SYSTEM_APP) != 0
|| (ai.flags & ApplicationInfo.FLAG_SYSTEM) == 0)
{
return ai.nativeLibraryDir;
}
return "/system/lib/";
}
public int getNativeAudioBufferSize()
{
int SDK_INT = android.os.Build.VERSION.SDK_INT;
if (SDK_INT >= 17)
{
AudioManager am = (AudioManager) context
.getSystemService(Context.AUDIO_SERVICE);
String framesPerBuffer = am
.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
return Integer.parseInt(framesPerBuffer);
} else
{
return 0;
}
}
public int getNativeAudioSampleRate()
{
return AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_SYSTEM);
}
}

View File

@@ -32,7 +32,6 @@ import android.widget.Toast;
public class TeapotApplication extends Application {
public void onCreate(){
super.onCreate();
Log.w("native-activity", "onCreate");
final PackageManager pm = getApplicationContext().getPackageManager();

View File

@@ -16,8 +16,6 @@
package com.sample.teapot;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.app.NativeActivity;
import android.os.Bundle;
import android.view.Gravity;
@@ -51,7 +49,6 @@ public class TeapotNativeActivity extends NativeActivity {
}
@TargetApi(19)
protected void onResume() {
super.onResume();
@@ -73,7 +70,6 @@ public class TeapotNativeActivity extends NativeActivity {
}
// Our popup window, you will call it from your C/C++ code later
@TargetApi(19)
void setImmersiveSticky() {
View decorView = getWindow().getDecorView();
decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_FULLSCREEN
@@ -88,7 +84,6 @@ public class TeapotNativeActivity extends NativeActivity {
PopupWindow _popupWindow;
TextView _label;
@SuppressLint("InflateParams")
public void showUI()
{
if( _popupWindow != null )
@@ -114,7 +109,7 @@ public class TeapotNativeActivity extends NativeActivity {
_activity.setContentView(mainLayout, params);
// Show our UI over NativeActivity window
_popupWindow.showAtLocation(mainLayout, Gravity.TOP | Gravity.START, 10, 10);
_popupWindow.showAtLocation(mainLayout, Gravity.TOP | Gravity.LEFT, 10, 10);
_popupWindow.update();
_label = (TextView)popupView.findViewById(R.id.textViewFPS);
@@ -125,6 +120,10 @@ public class TeapotNativeActivity extends NativeActivity {
protected void onPause()
{
super.onPause();
if (_popupWindow != null) {
_popupWindow.dismiss();
_popupWindow = null;
}
}
public void updateFPS(final float fFPS)

View File

@@ -1,7 +0,0 @@
status: PUBLISHED
technologies: [Android, NDK]
categories: [NDK]
languages: [C++, Java]
solutions: [Mobile]
github: googlesamples/android-ndk
license: apache2

View File

@@ -1,93 +0,0 @@
Audio-Echo
==========
The sample demos how to use OpenSL ES to create a player and recorder in Android Fast Audio Path, and connect them to loopback audio. On most android devices, there is a optimized audio path that is tuned up for low latency purpose. The sample creates player/recorder to work in this highly optimized audio path(sometimes called native audio path, [low latency path](http://stackoverflow.com/questions/14842803/low-latency-audio-playback-on-android?rq=1), or fast audio path). The application is validated against the following configurations:
* Android L AndroidOne
* Android M Nexus 5, Nexus 9
This sample uses the new Android Studio with CMake support, and shows how to use shared stl lib with android studio version 2.2.0, see CMakeLists.txt for details
***Note that OpenSL ES is [deprecated from Android 11](https://developer.android.com/preview/features#deprecate-opensl), developers are recommended to use [Oboe](https://github.com/google/oboe) library instead.***
Pre-requisites
--------------
- Android Studio 2.2+ with [NDK](https://developer.android.com/ndk/) bundle.
Getting Started
---------------
1. [Download Android Studio](http://developer.android.com/sdk/index.html)
1. Launch Android Studio.
1. Open the sample directory.
1. Open *File/Project Structure...*
- Click *Download* or *Select NDK location*.
1. Click *Tools/Android/Sync Project with Gradle Files*.
1. Click *Run/Run 'app'*.
Usage
-----
App will capture audio from android devices and playback on the same device; the playback on speaker will be captured immediately and played back...! So to verify it, it is recommended to "mute" the playback audio with a earspeaker/earphone/earbug so it does not get looped back. Some device like Nexus 9, once you plug in an external headphone/headspeaker, it stops to use onboard microphone AND speaker anymore -- in this case, you need turn on the microphone coming with your headphone. Another point, when switching between external headphone and internal one, the volume is sometimes very low/muted; recommend to increase the playback volume with volume buttons on the phone/pad after plugging external headphone.
Low Latency Verification
------------------------
1. execute "adb shell dumpsys media.audio_flinger". Find a list of the running processes
Name Active Client Type Fmt Chn mask Session fCount S F SRate L dB R dB Server Main buf Aux Buf Flags UndFrmCnt
F 2 no 704 1 00000001 00000003 562 13248 S 1 48000 -inf -inf 000033C0 0xabab8480 0x0 0x600 0
F 6 yes 9345 3 00000001 00000001 576 128 A 1 48000 0 0 0376AA00 0xabab8480 0x0 0x400 256
1. execute adb shell ps | grep echo
* find the sample app pid
* check with result on step 1.
if there is one "F" in the front of your echo pid, **player** is on fast audio path
For fast audio capture [it is totally different story], if you do **NOT** see
com.example.nativeaudio W/AudioRecord﹕ AUDIO_INPUT_FLAG_FAST denied by client
in your logcat output when you are creating audio recorder, you could "assume" you are on the fast path.
If your system image was built with muted ALOGW, you will not be able to see the above warning message.
Tune-ups
--------
A couple of knobs in the code for lower latency purpose:
* audio buffer size
* number of audio buffers cached before kicking start player
The lower you go with them, the lower latency you get and also the lower budget for audio processing. All audio processing has to be completed in the time period they are captured / played back, plus extra time needed for:
* audio driver
* audio flinger framework,
* bufferqueue callbacks etc
Besides those, the irregularity of the buffer queue player/capture callback time is another factor. The callback from openSL may not as regular as you assumed, the more irregularity it is, the more likely have choopy audio. To fight that, more buffering is needed, which defeats the low-latency purpose! The low latency path is highly tuned up so you have better chance to get more regular callbacks. You may experiment with your platform to find the best parameters for lower latency and continuously playback audio experience.
The app capture and playback on the same device [most of times the same chip], capture and playback clocks are assumed synchronized naturally [so we are not dealing with it]
Credits
-------
* The sample is greatly inspired by native-audio sample
* Don Turner @ Google for the helping of low latency path
* Ian Ni-Lewis @ Google for producer/consumer queue and many others
Support
-------
If you've found an error in these samples, please [file an issue](https://github.com/googlesamples/android-ndk/issues/new).
Patches are encouraged, and may be submitted by [forking this project](https://github.com/googlesamples/android-ndk/fork) and
submitting a pull request through GitHub. Please see [CONTRIBUTING.md](../CONTRIBUTING.md) for more details.
- [Stack Overflow](http://stackoverflow.com/questions/tagged/android-ndk)
- [Android Tools Feedbacks](http://tools.android.com/feedback)
License
-------
Copyright 2015 Google, Inc.
Licensed to the Apache Software Foundation (ASF) under one or more contributor
license agreements. See the NOTICE file distributed with this work for
additional information regarding copyright ownership. The ASF licenses this
file to you under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of
the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.

View File

@@ -1,42 +0,0 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion 29
ndkVersion '21.2.6472646'
defaultConfig {
applicationId 'com.google.sample.echo'
/*
To run on earlier version of Android than android-21, do:
*) set this minSDKVersion and cmake's ANDROID_PLATFORM to your version
*) set ANDROID_STL to c++_static for some very earlier version android.
*/
minSdkVersion 23
targetSdkVersion 28
versionCode 1
versionName '1.0'
externalNativeBuild {
cmake {
arguments '-DANDROID_STL=c++_static'
}
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'),
'proguard-rules.pro'
}
}
externalNativeBuild {
cmake {
version '3.18.1'
path 'src/main/cpp/CMakeLists.txt'
}
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'androidx.appcompat:appcompat:1.0.2'
}

View File

@@ -1,17 +0,0 @@
# Add project specific ProGuard rules here.
# By default, the flags in this file are appended to flags specified
# in /Users/gfan/dev/android-sdk/tools/proguard/proguard-android.txt
# You can edit the include path and order by changing the proguardFiles
# directive in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# Add any project specific keep options here:
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}

View File

@@ -1,24 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.google.sample.echo" >
<uses-permission android:name="android.permission.RECORD_AUDIO"></uses-permission>
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS"></uses-permission>
<application
android:allowBackup="false"
android:fullBackupContent="false"
android:supportsRtl="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:theme="@style/AppTheme" >
<activity
android:name=".MainActivity"
android:label="@string/app_name"
android:screenOrientation="portrait">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View File

@@ -1,23 +0,0 @@
cmake_minimum_required(VERSION 3.4.1)
project(echo LANGUAGES C CXX)
add_library(echo
SHARED
audio_main.cpp
audio_player.cpp
audio_recorder.cpp
audio_effect.cpp
audio_common.cpp
debug_utils.cpp)
#include libraries needed for echo lib
target_link_libraries(echo
PRIVATE
OpenSLES
android
log
atomic)
target_compile_options(echo
PRIVATE
-Wall -Werror)

View File

@@ -1,47 +0,0 @@
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#ifndef NATIVE_AUDIO_ANDROID_DEBUG_H_H
#define NATIVE_AUDIO_ANDROID_DEBUG_H_H
#include <android/log.h>
#if 1
#define MODULE_NAME "AUDIO-ECHO"
#define LOGV(...) \
__android_log_print(ANDROID_LOG_VERBOSE, MODULE_NAME, __VA_ARGS__)
#define LOGD(...) \
__android_log_print(ANDROID_LOG_DEBUG, MODULE_NAME, __VA_ARGS__)
#define LOGI(...) \
__android_log_print(ANDROID_LOG_INFO, MODULE_NAME, __VA_ARGS__)
#define LOGW(...) \
__android_log_print(ANDROID_LOG_WARN, MODULE_NAME, __VA_ARGS__)
#define LOGE(...) \
__android_log_print(ANDROID_LOG_ERROR, MODULE_NAME, __VA_ARGS__)
#define LOGF(...) \
__android_log_print(ANDROID_LOG_FATAL, MODULE_NAME, __VA_ARGS__)
#else
#define LOGV(...)
#define LOGD(...)
#define LOGI(...)
#define LOGW(...)
#define LOGE(...)
#define LOGF(...)
#endif
#endif // NATIVE_AUDIO_ANDROID_DEBUG_H_H

View File

@@ -1,66 +0,0 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "audio_common.h"
void ConvertToSLSampleFormat(SLAndroidDataFormat_PCM_EX* pFormat,
SampleFormat* pSampleInfo_) {
assert(pFormat);
memset(pFormat, 0, sizeof(*pFormat));
pFormat->formatType = SL_DATAFORMAT_PCM;
// Only support 2 channels
// For channelMask, refer to wilhelm/src/android/channels.c for details
if (pSampleInfo_->channels_ <= 1) {
pFormat->numChannels = 1;
pFormat->channelMask = SL_SPEAKER_FRONT_LEFT;
} else {
pFormat->numChannels = 2;
pFormat->channelMask = SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT;
}
pFormat->sampleRate = pSampleInfo_->sampleRate_;
pFormat->endianness = SL_BYTEORDER_LITTLEENDIAN;
pFormat->bitsPerSample = pSampleInfo_->pcmFormat_;
pFormat->containerSize = pSampleInfo_->pcmFormat_;
/*
* fixup for android extended representations...
*/
pFormat->representation = pSampleInfo_->representation_;
switch (pFormat->representation) {
case SL_ANDROID_PCM_REPRESENTATION_UNSIGNED_INT:
pFormat->bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_8;
pFormat->containerSize = SL_PCMSAMPLEFORMAT_FIXED_8;
pFormat->formatType = SL_ANDROID_DATAFORMAT_PCM_EX;
break;
case SL_ANDROID_PCM_REPRESENTATION_SIGNED_INT:
pFormat->bitsPerSample =
SL_PCMSAMPLEFORMAT_FIXED_16; // supports 16, 24, and 32
pFormat->containerSize = SL_PCMSAMPLEFORMAT_FIXED_16;
pFormat->formatType = SL_ANDROID_DATAFORMAT_PCM_EX;
break;
case SL_ANDROID_PCM_REPRESENTATION_FLOAT:
pFormat->bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_32;
pFormat->containerSize = SL_PCMSAMPLEFORMAT_FIXED_32;
pFormat->formatType = SL_ANDROID_DATAFORMAT_PCM_EX;
break;
case 0:
break;
default:
assert(0);
}
}

View File

@@ -1,79 +0,0 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVE_AUDIO_AUDIO_COMMON_H
#define NATIVE_AUDIO_AUDIO_COMMON_H
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include "android_debug.h"
#include "debug_utils.h"
#include "buf_manager.h"
/*
* Audio Sample Controls...
*/
#define AUDIO_SAMPLE_CHANNELS 1
/*
* Sample Buffer Controls...
*/
#define RECORD_DEVICE_KICKSTART_BUF_COUNT 2
#define PLAY_KICKSTART_BUFFER_COUNT 3
#define DEVICE_SHADOW_BUFFER_QUEUE_LEN 4
#define BUF_COUNT 16
struct SampleFormat {
uint32_t sampleRate_;
uint32_t framesPerBuf_;
uint16_t channels_;
uint16_t pcmFormat_; // 8 bit, 16 bit, 24 bit ...
uint32_t representation_; // android extensions
};
extern void ConvertToSLSampleFormat(SLAndroidDataFormat_PCM_EX* pFormat,
SampleFormat* format);
/*
* GetSystemTicks(void): return the time in micro sec
*/
__inline__ uint64_t GetSystemTicks(void) {
struct timeval Time;
gettimeofday(&Time, NULL);
return (static_cast<uint64_t>(1000000) * Time.tv_sec + Time.tv_usec);
}
#define SLASSERT(x) \
do { \
assert(SL_RESULT_SUCCESS == (x)); \
(void)(x); \
} while (0)
/*
* Interface for player and recorder to communicate with engine
*/
#define ENGINE_SERVICE_MSG_KICKSTART_PLAYER 1
#define ENGINE_SERVICE_MSG_RETRIEVE_DUMP_BUFS 2
#define ENGINE_SERVICE_MSG_RECORDED_AUDIO_AVAILABLE 3
typedef bool (*ENGINE_CALLBACK)(void* pCTX, uint32_t msg, void* pData);
/*
* flag to enable file dumping
*/
// #define ENABLE_LOG 1
#endif // NATIVE_AUDIO_AUDIO_COMMON_H

View File

@@ -1,170 +0,0 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "audio_effect.h"
#include "audio_common.h"
#include <climits>
#include <cstring>
/*
* Mixing Audio in integer domain to avoid FP calculation
* (FG * ( MixFactor * 16 ) + BG * ( (1.0f-MixFactor) * 16 )) / 16
*/
static const int32_t kFloatToIntMapFactor = 128;
static const uint32_t kMsPerSec = 1000;
/**
* Constructor for AudioDelay
* @param sampleRate
* @param channelCount
* @param format
* @param delayTimeInMs
*/
AudioDelay::AudioDelay(int32_t sampleRate, int32_t channelCount,
SLuint32 format, size_t delayTimeInMs,
float decayWeight)
: AudioFormat(sampleRate, channelCount, format),
delayTime_(delayTimeInMs),
decayWeight_(decayWeight) {
feedbackFactor_ = static_cast<int32_t>(decayWeight_ * kFloatToIntMapFactor);
liveAudioFactor_ = kFloatToIntMapFactor - feedbackFactor_;
allocateBuffer();
}
/**
* Destructor
*/
AudioDelay::~AudioDelay() {
if (buffer_) delete static_cast<uint8_t*>(buffer_);
}
/**
* Configure for delay time ( in miliseconds ), dynamically adjustable
* @param delayTimeInMS in miliseconds
* @return true if delay time is set successfully
*/
bool AudioDelay::setDelayTime(size_t delayTimeInMS) {
if (delayTimeInMS == delayTime_) return true;
std::lock_guard<std::mutex> lock(lock_);
if (buffer_) {
delete static_cast<uint8_t*>(buffer_);
buffer_ = nullptr;
}
delayTime_ = delayTimeInMS;
allocateBuffer();
return buffer_ != nullptr;
}
/**
* Internal helper function to allocate buffer for the delay
* - calculate the buffer size for the delay time
* - allocate and zero out buffer (0 means silent audio)
* - configure bufSize_ to be size of audioFrames
*/
void AudioDelay::allocateBuffer(void) {
float floatDelayTime = (float)delayTime_ / kMsPerSec;
float fNumFrames = floatDelayTime * (float)sampleRate_ / kMsPerSec;
size_t sampleCount = static_cast<uint32_t>(fNumFrames + 0.5f) * channelCount_;
uint32_t bytePerSample = format_ / 8;
assert(bytePerSample <= 4 && bytePerSample);
uint32_t bytePerFrame = channelCount_ * bytePerSample;
// get bufCapacity in bytes
bufCapacity_ = sampleCount * bytePerSample;
bufCapacity_ =
((bufCapacity_ + bytePerFrame - 1) / bytePerFrame) * bytePerFrame;
buffer_ = new uint8_t[bufCapacity_];
assert(buffer_);
memset(buffer_, 0, bufCapacity_);
curPos_ = 0;
// bufSize_ is in Frames ( not samples, not bytes )
bufSize_ = bufCapacity_ / bytePerFrame;
}
size_t AudioDelay::getDelayTime(void) const { return delayTime_; }
/**
* setDecayWeight(): set the decay factor
* ratio: value of 0.0 -- 1.0f;
*
* the calculation is in integer ( not in float )
* for performance purpose
*/
void AudioDelay::setDecayWeight(float weight) {
if (weight > 0.0f && weight < 1.0f) {
float feedback = (weight * kFloatToIntMapFactor + 0.5f);
feedbackFactor_ = static_cast<int32_t>(feedback);
liveAudioFactor_ = kFloatToIntMapFactor - feedbackFactor_;
}
}
float AudioDelay::getDecayWeight(void) const { return decayWeight_; }
/**
* process() filter live audio with "echo" effect:
* delay time is run-time adjustable
* decay time could also be adjustable, but not used
* in this sample, hardcoded to .5
*
* @param liveAudio is recorded audio stream
* @param channelCount for liveAudio, must be 2 for stereo
* @param numFrames is length of liveAudio in Frames ( not in byte )
*/
void AudioDelay::process(int16_t* liveAudio, int32_t numFrames) {
if (feedbackFactor_ == 0 || bufSize_ < numFrames) {
return;
}
if (!lock_.try_lock()) {
return;
}
if (numFrames + curPos_ > bufSize_) {
curPos_ = 0;
}
// process every sample
int32_t sampleCount = channelCount_ * numFrames;
int16_t* samples = &static_cast<int16_t*>(buffer_)[curPos_ * channelCount_];
for (size_t idx = 0; idx < sampleCount; idx++) {
#if 1
int32_t curSample =
(samples[idx] * feedbackFactor_ + liveAudio[idx] * liveAudioFactor_) /
kFloatToIntMapFactor;
if (curSample > SHRT_MAX)
curSample = SHRT_MAX;
else if (curSample < SHRT_MIN)
curSample = SHRT_MIN;
liveAudio[idx] = samples[idx];
samples[idx] = static_cast<int16_t>(curSample);
#else
// Pure delay
int16_t tmp = liveAudio[idx];
liveAudio[idx] = samples[idx];
samples[idx] = tmp;
#endif
}
curPos_ += numFrames;
lock_.unlock();
}

View File

@@ -1,66 +0,0 @@
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef EFFECT_PROCESSOR_H
#define EFFECT_PROCESSOR_H
#include <SLES/OpenSLES_Android.h>
#include <cstdint>
#include <atomic>
#include <mutex>
class AudioFormat {
protected:
int32_t sampleRate_ = SL_SAMPLINGRATE_48;
int32_t channelCount_ = 2;
SLuint32 format_ = SL_PCMSAMPLEFORMAT_FIXED_16;
AudioFormat(int32_t sampleRate, int32_t channelCount, SLuint32 format)
: sampleRate_(sampleRate), channelCount_(channelCount), format_(format){};
virtual ~AudioFormat() {}
};
/**
* An audio delay effect:
* - decay is for feedback(echo)weight
* - delay time is adjustable
*/
class AudioDelay : public AudioFormat {
public:
~AudioDelay();
explicit AudioDelay(int32_t sampleRate, int32_t channelCount, SLuint32 format,
size_t delayTimeInMs, float Weight);
bool setDelayTime(size_t delayTimeInMiliSec);
size_t getDelayTime(void) const;
void setDecayWeight(float weight);
float getDecayWeight(void) const;
void process(int16_t *liveAudio, int32_t numFrames);
private:
size_t delayTime_ = 0;
float decayWeight_ = 0.5;
void *buffer_ = nullptr;
size_t bufCapacity_ = 0;
size_t bufSize_ = 0;
size_t curPos_ = 0;
std::mutex lock_;
int32_t feedbackFactor_;
int32_t liveAudioFactor_;
void allocateBuffer(void);
};
#endif // EFFECT_PROCESSOR_H

View File

@@ -1,259 +0,0 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "jni_interface.h"
#include "audio_recorder.h"
#include "audio_player.h"
#include "audio_effect.h"
#include "audio_common.h"
#include <jni.h>
#include <SLES/OpenSLES_Android.h>
#include <sys/types.h>
#include <cassert>
#include <cstring>
struct EchoAudioEngine {
SLmilliHertz fastPathSampleRate_;
uint32_t fastPathFramesPerBuf_;
uint16_t sampleChannels_;
uint16_t bitsPerSample_;
SLObjectItf slEngineObj_;
SLEngineItf slEngineItf_;
AudioRecorder *recorder_;
AudioPlayer *player_;
AudioQueue *freeBufQueue_; // Owner of the queue
AudioQueue *recBufQueue_; // Owner of the queue
sample_buf *bufs_;
uint32_t bufCount_;
uint32_t frameCount_;
int64_t echoDelay_;
float echoDecay_;
AudioDelay *delayEffect_;
};
static EchoAudioEngine engine;
bool EngineService(void *ctx, uint32_t msg, void *data);
JNIEXPORT void JNICALL Java_com_google_sample_echo_MainActivity_createSLEngine(
JNIEnv *env, jclass type, jint sampleRate, jint framesPerBuf,
jlong delayInMs, jfloat decay) {
SLresult result;
memset(&engine, 0, sizeof(engine));
engine.fastPathSampleRate_ = static_cast<SLmilliHertz>(sampleRate) * 1000;
engine.fastPathFramesPerBuf_ = static_cast<uint32_t>(framesPerBuf);
engine.sampleChannels_ = AUDIO_SAMPLE_CHANNELS;
engine.bitsPerSample_ = SL_PCMSAMPLEFORMAT_FIXED_16;
result = slCreateEngine(&engine.slEngineObj_, 0, NULL, 0, NULL, NULL);
SLASSERT(result);
result =
(*engine.slEngineObj_)->Realize(engine.slEngineObj_, SL_BOOLEAN_FALSE);
SLASSERT(result);
result = (*engine.slEngineObj_)
->GetInterface(engine.slEngineObj_, SL_IID_ENGINE,
&engine.slEngineItf_);
SLASSERT(result);
// compute the RECOMMENDED fast audio buffer size:
// the lower latency required
// *) the smaller the buffer should be (adjust it here) AND
// *) the less buffering should be before starting player AFTER
// receiving the recorder buffer
// Adjust the bufSize here to fit your bill [before it busts]
uint32_t bufSize = engine.fastPathFramesPerBuf_ * engine.sampleChannels_ *
engine.bitsPerSample_;
bufSize = (bufSize + 7) >> 3; // bits --> byte
engine.bufCount_ = BUF_COUNT;
engine.bufs_ = allocateSampleBufs(engine.bufCount_, bufSize);
assert(engine.bufs_);
engine.freeBufQueue_ = new AudioQueue(engine.bufCount_);
engine.recBufQueue_ = new AudioQueue(engine.bufCount_);
assert(engine.freeBufQueue_ && engine.recBufQueue_);
for (uint32_t i = 0; i < engine.bufCount_; i++) {
engine.freeBufQueue_->push(&engine.bufs_[i]);
}
engine.echoDelay_ = delayInMs;
engine.echoDecay_ = decay;
engine.delayEffect_ = new AudioDelay(
engine.fastPathSampleRate_, engine.sampleChannels_, engine.bitsPerSample_,
engine.echoDelay_, engine.echoDecay_);
assert(engine.delayEffect_);
}
JNIEXPORT jboolean JNICALL
Java_com_google_sample_echo_MainActivity_configureEcho(JNIEnv *env, jclass type,
jint delayInMs,
jfloat decay) {
engine.echoDelay_ = delayInMs;
engine.echoDecay_ = decay;
engine.delayEffect_->setDelayTime(delayInMs);
engine.delayEffect_->setDecayWeight(decay);
return JNI_FALSE;
}
JNIEXPORT jboolean JNICALL
Java_com_google_sample_echo_MainActivity_createSLBufferQueueAudioPlayer(
JNIEnv *env, jclass type) {
SampleFormat sampleFormat;
memset(&sampleFormat, 0, sizeof(sampleFormat));
sampleFormat.pcmFormat_ = (uint16_t)engine.bitsPerSample_;
sampleFormat.framesPerBuf_ = engine.fastPathFramesPerBuf_;
// SampleFormat.representation_ = SL_ANDROID_PCM_REPRESENTATION_SIGNED_INT;
sampleFormat.channels_ = (uint16_t)engine.sampleChannels_;
sampleFormat.sampleRate_ = engine.fastPathSampleRate_;
engine.player_ = new AudioPlayer(&sampleFormat, engine.slEngineItf_);
assert(engine.player_);
if (engine.player_ == nullptr) return JNI_FALSE;
engine.player_->SetBufQueue(engine.recBufQueue_, engine.freeBufQueue_);
engine.player_->RegisterCallback(EngineService, (void *)&engine);
return JNI_TRUE;
}
JNIEXPORT void JNICALL
Java_com_google_sample_echo_MainActivity_deleteSLBufferQueueAudioPlayer(
JNIEnv *env, jclass type) {
if (engine.player_) {
delete engine.player_;
engine.player_ = nullptr;
}
}
JNIEXPORT jboolean JNICALL
Java_com_google_sample_echo_MainActivity_createAudioRecorder(JNIEnv *env,
jclass type) {
SampleFormat sampleFormat;
memset(&sampleFormat, 0, sizeof(sampleFormat));
sampleFormat.pcmFormat_ = static_cast<uint16_t>(engine.bitsPerSample_);
// SampleFormat.representation_ = SL_ANDROID_PCM_REPRESENTATION_SIGNED_INT;
sampleFormat.channels_ = engine.sampleChannels_;
sampleFormat.sampleRate_ = engine.fastPathSampleRate_;
sampleFormat.framesPerBuf_ = engine.fastPathFramesPerBuf_;
engine.recorder_ = new AudioRecorder(&sampleFormat, engine.slEngineItf_);
if (!engine.recorder_) {
return JNI_FALSE;
}
engine.recorder_->SetBufQueues(engine.freeBufQueue_, engine.recBufQueue_);
engine.recorder_->RegisterCallback(EngineService, (void *)&engine);
return JNI_TRUE;
}
JNIEXPORT void JNICALL
Java_com_google_sample_echo_MainActivity_deleteAudioRecorder(JNIEnv *env,
jclass type) {
if (engine.recorder_) delete engine.recorder_;
engine.recorder_ = nullptr;
}
JNIEXPORT void JNICALL
Java_com_google_sample_echo_MainActivity_startPlay(JNIEnv *env, jclass type) {
engine.frameCount_ = 0;
/*
* start player: make it into waitForData state
*/
if (SL_BOOLEAN_FALSE == engine.player_->Start()) {
LOGE("====%s failed", __FUNCTION__);
return;
}
engine.recorder_->Start();
}
JNIEXPORT void JNICALL
Java_com_google_sample_echo_MainActivity_stopPlay(JNIEnv *env, jclass type) {
engine.recorder_->Stop();
engine.player_->Stop();
delete engine.recorder_;
delete engine.player_;
engine.recorder_ = NULL;
engine.player_ = NULL;
}
JNIEXPORT void JNICALL Java_com_google_sample_echo_MainActivity_deleteSLEngine(
JNIEnv *env, jclass type) {
delete engine.recBufQueue_;
delete engine.freeBufQueue_;
releaseSampleBufs(engine.bufs_, engine.bufCount_);
if (engine.slEngineObj_ != NULL) {
(*engine.slEngineObj_)->Destroy(engine.slEngineObj_);
engine.slEngineObj_ = NULL;
engine.slEngineItf_ = NULL;
}
if (engine.delayEffect_) {
delete engine.delayEffect_;
engine.delayEffect_ = nullptr;
}
}
uint32_t dbgEngineGetBufCount(void) {
uint32_t count = engine.player_->dbgGetDevBufCount();
count += engine.recorder_->dbgGetDevBufCount();
count += engine.freeBufQueue_->size();
count += engine.recBufQueue_->size();
LOGE(
"Buf Disrtibutions: PlayerDev=%d, RecDev=%d, FreeQ=%d, "
"RecQ=%d",
engine.player_->dbgGetDevBufCount(),
engine.recorder_->dbgGetDevBufCount(), engine.freeBufQueue_->size(),
engine.recBufQueue_->size());
if (count != engine.bufCount_) {
LOGE("====Lost Bufs among the queue(supposed = %d, found = %d)", BUF_COUNT,
count);
}
return count;
}
/*
* simple message passing for player/recorder to communicate with engine
*/
bool EngineService(void *ctx, uint32_t msg, void *data) {
assert(ctx == &engine);
switch (msg) {
case ENGINE_SERVICE_MSG_RETRIEVE_DUMP_BUFS: {
*(static_cast<uint32_t *>(data)) = dbgEngineGetBufCount();
break;
}
case ENGINE_SERVICE_MSG_RECORDED_AUDIO_AVAILABLE: {
// adding audio delay effect
sample_buf *buf = static_cast<sample_buf *>(data);
assert(engine.fastPathFramesPerBuf_ ==
buf->size_ / engine.sampleChannels_ / (engine.bitsPerSample_ / 8));
engine.delayEffect_->process(reinterpret_cast<int16_t *>(buf->buf_),
engine.fastPathFramesPerBuf_);
break;
}
default:
assert(false);
return false;
}
return true;
}

View File

@@ -1,260 +0,0 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cstdlib>
#include "audio_player.h"
/*
* Called by OpenSL SimpleBufferQueue for every audio buffer played
* directly pass thru to our handler.
* The regularity of this callback from openSL/Android System affects
* playback continuity. If it does not callback in the regular time
* slot, you are under big pressure for audio processing[here we do
* not do any filtering/mixing]. Callback from fast audio path are
* much more regular than other audio paths by my observation. If it
* very regular, you could buffer much less audio samples between
* recorder and player, hence lower latency.
*/
void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *ctx) {
(static_cast<AudioPlayer *>(ctx))->ProcessSLCallback(bq);
}
void AudioPlayer::ProcessSLCallback(SLAndroidSimpleBufferQueueItf bq) {
#ifdef ENABLE_LOG
logFile_->logTime();
#endif
std::lock_guard<std::mutex> lock(stopMutex_);
// retrieve the finished device buf and put onto the free queue
// so recorder could re-use it
sample_buf *buf;
if (!devShadowQueue_->front(&buf)) {
/*
* This should not happen: we got a callback,
* but we have no buffer in deviceShadowedQueue
* we lost buffers this way...(ERROR)
*/
if (callback_) {
uint32_t count;
callback_(ctx_, ENGINE_SERVICE_MSG_RETRIEVE_DUMP_BUFS, &count);
}
return;
}
devShadowQueue_->pop();
if (buf != &silentBuf_) {
buf->size_ = 0;
freeQueue_->push(buf);
if (!playQueue_->front(&buf)) {
#ifdef ENABLE_LOG
logFile_->log("%s", "====Warning: running out of the Audio buffers");
#endif
return;
}
devShadowQueue_->push(buf);
(*bq)->Enqueue(bq, buf->buf_, buf->size_);
playQueue_->pop();
return;
}
if (playQueue_->size() < PLAY_KICKSTART_BUFFER_COUNT) {
(*bq)->Enqueue(bq, buf->buf_, buf->size_);
devShadowQueue_->push(&silentBuf_);
return;
}
assert(PLAY_KICKSTART_BUFFER_COUNT <=
(DEVICE_SHADOW_BUFFER_QUEUE_LEN - devShadowQueue_->size()));
for (int32_t idx = 0; idx < PLAY_KICKSTART_BUFFER_COUNT; idx++) {
playQueue_->front(&buf);
playQueue_->pop();
devShadowQueue_->push(buf);
(*bq)->Enqueue(bq, buf->buf_, buf->size_);
}
}
AudioPlayer::AudioPlayer(SampleFormat *sampleFormat, SLEngineItf slEngine)
: freeQueue_(nullptr),
playQueue_(nullptr),
devShadowQueue_(nullptr),
callback_(nullptr) {
SLresult result;
assert(sampleFormat);
sampleInfo_ = *sampleFormat;
result = (*slEngine)
->CreateOutputMix(slEngine, &outputMixObjectItf_, 0, NULL, NULL);
SLASSERT(result);
// realize the output mix
result =
(*outputMixObjectItf_)->Realize(outputMixObjectItf_, SL_BOOLEAN_FALSE);
SLASSERT(result);
// configure audio source
SLDataLocator_AndroidSimpleBufferQueue loc_bufq = {
SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, DEVICE_SHADOW_BUFFER_QUEUE_LEN};
SLAndroidDataFormat_PCM_EX format_pcm;
ConvertToSLSampleFormat(&format_pcm, &sampleInfo_);
SLDataSource audioSrc = {&loc_bufq, &format_pcm};
// configure audio sink
SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX,
outputMixObjectItf_};
SLDataSink audioSnk = {&loc_outmix, NULL};
/*
* create fast path audio player: SL_IID_BUFFERQUEUE and SL_IID_VOLUME
* and other non-signal processing interfaces are ok.
*/
SLInterfaceID ids[2] = {SL_IID_BUFFERQUEUE, SL_IID_VOLUME};
SLboolean req[2] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
result = (*slEngine)->CreateAudioPlayer(
slEngine, &playerObjectItf_, &audioSrc, &audioSnk,
sizeof(ids) / sizeof(ids[0]), ids, req);
SLASSERT(result);
// realize the player
result = (*playerObjectItf_)->Realize(playerObjectItf_, SL_BOOLEAN_FALSE);
SLASSERT(result);
// get the play interface
result = (*playerObjectItf_)
->GetInterface(playerObjectItf_, SL_IID_PLAY, &playItf_);
SLASSERT(result);
// get the buffer queue interface
result = (*playerObjectItf_)
->GetInterface(playerObjectItf_, SL_IID_BUFFERQUEUE,
&playBufferQueueItf_);
SLASSERT(result);
// register callback on the buffer queue
result = (*playBufferQueueItf_)
->RegisterCallback(playBufferQueueItf_, bqPlayerCallback, this);
SLASSERT(result);
result = (*playItf_)->SetPlayState(playItf_, SL_PLAYSTATE_STOPPED);
SLASSERT(result);
// create an empty queue to track deviceQueue
devShadowQueue_ = new AudioQueue(DEVICE_SHADOW_BUFFER_QUEUE_LEN);
assert(devShadowQueue_);
silentBuf_.cap_ = (format_pcm.containerSize >> 3) * format_pcm.numChannels *
sampleInfo_.framesPerBuf_;
silentBuf_.buf_ = new uint8_t[silentBuf_.cap_];
memset(silentBuf_.buf_, 0, silentBuf_.cap_);
silentBuf_.size_ = silentBuf_.cap_;
#ifdef ENABLE_LOG
std::string name = "play";
logFile_ = new AndroidLog(name);
#endif
}
AudioPlayer::~AudioPlayer() {
std::lock_guard<std::mutex> lock(stopMutex_);
// destroy buffer queue audio player object, and invalidate all associated
// interfaces
if (playerObjectItf_ != NULL) {
(*playerObjectItf_)->Destroy(playerObjectItf_);
}
// Consume all non-completed audio buffers
sample_buf *buf = NULL;
while (devShadowQueue_->front(&buf)) {
buf->size_ = 0;
devShadowQueue_->pop();
if(buf != &silentBuf_) {
freeQueue_->push(buf);
}
}
delete devShadowQueue_;
while (playQueue_->front(&buf)) {
buf->size_ = 0;
playQueue_->pop();
freeQueue_->push(buf);
}
// destroy output mix object, and invalidate all associated interfaces
if (outputMixObjectItf_) {
(*outputMixObjectItf_)->Destroy(outputMixObjectItf_);
}
delete[] silentBuf_.buf_;
}
void AudioPlayer::SetBufQueue(AudioQueue *playQ, AudioQueue *freeQ) {
playQueue_ = playQ;
freeQueue_ = freeQ;
}
SLresult AudioPlayer::Start(void) {
SLuint32 state;
SLresult result = (*playItf_)->GetPlayState(playItf_, &state);
if (result != SL_RESULT_SUCCESS) {
return SL_BOOLEAN_FALSE;
}
if (state == SL_PLAYSTATE_PLAYING) {
return SL_BOOLEAN_TRUE;
}
result = (*playItf_)->SetPlayState(playItf_, SL_PLAYSTATE_STOPPED);
SLASSERT(result);
result =
(*playBufferQueueItf_)
->Enqueue(playBufferQueueItf_, silentBuf_.buf_, silentBuf_.size_);
SLASSERT(result);
devShadowQueue_->push(&silentBuf_);
result = (*playItf_)->SetPlayState(playItf_, SL_PLAYSTATE_PLAYING);
SLASSERT(result);
return SL_BOOLEAN_TRUE;
}
void AudioPlayer::Stop(void) {
SLuint32 state;
SLresult result = (*playItf_)->GetPlayState(playItf_, &state);
SLASSERT(result);
if (state == SL_PLAYSTATE_STOPPED) return;
std::lock_guard<std::mutex> lock(stopMutex_);
result = (*playItf_)->SetPlayState(playItf_, SL_PLAYSTATE_STOPPED);
SLASSERT(result);
(*playBufferQueueItf_)->Clear(playBufferQueueItf_);
#ifdef ENABLE_LOG
if (logFile_) {
delete logFile_;
logFile_ = nullptr;
}
#endif
}
void AudioPlayer::RegisterCallback(ENGINE_CALLBACK cb, void *ctx) {
callback_ = cb;
ctx_ = ctx;
}
uint32_t AudioPlayer::dbgGetDevBufCount(void) {
return (devShadowQueue_->size());
}

View File

@@ -1,55 +0,0 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVE_AUDIO_AUDIO_PLAYER_H
#define NATIVE_AUDIO_AUDIO_PLAYER_H
#include <sys/types.h>
#include "audio_common.h"
#include "buf_manager.h"
#include "debug_utils.h"
class AudioPlayer {
// buffer queue player interfaces
SLObjectItf outputMixObjectItf_;
SLObjectItf playerObjectItf_;
SLPlayItf playItf_;
SLAndroidSimpleBufferQueueItf playBufferQueueItf_;
SampleFormat sampleInfo_;
AudioQueue *freeQueue_; // user
AudioQueue *playQueue_; // user
AudioQueue *devShadowQueue_; // owner
ENGINE_CALLBACK callback_;
void *ctx_;
sample_buf silentBuf_;
#ifdef ENABLE_LOG
AndroidLog *logFile_;
#endif
std::mutex stopMutex_;
public:
explicit AudioPlayer(SampleFormat *sampleFormat, SLEngineItf engine);
~AudioPlayer();
void SetBufQueue(AudioQueue *playQ, AudioQueue *freeQ);
SLresult Start(void);
void Stop(void);
void ProcessSLCallback(SLAndroidSimpleBufferQueueItf bq);
uint32_t dbgGetDevBufCount(void);
void RegisterCallback(ENGINE_CALLBACK cb, void *ctx);
};
#endif // NATIVE_AUDIO_AUDIO_PLAYER_H

View File

@@ -1,213 +0,0 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cstring>
#include <cstdlib>
#include "audio_recorder.h"
/*
* bqRecorderCallback(): called for every buffer is full;
* pass directly to handler
*/
void bqRecorderCallback(SLAndroidSimpleBufferQueueItf bq, void *rec) {
(static_cast<AudioRecorder *>(rec))->ProcessSLCallback(bq);
}
void AudioRecorder::ProcessSLCallback(SLAndroidSimpleBufferQueueItf bq) {
#ifdef ENABLE_LOG
recLog_->logTime();
#endif
assert(bq == recBufQueueItf_);
sample_buf *dataBuf = NULL;
devShadowQueue_->front(&dataBuf);
devShadowQueue_->pop();
dataBuf->size_ = dataBuf->cap_; // device only calls us when it is really
// full
callback_(ctx_, ENGINE_SERVICE_MSG_RECORDED_AUDIO_AVAILABLE, dataBuf);
recQueue_->push(dataBuf);
sample_buf *freeBuf;
while (freeQueue_->front(&freeBuf) && devShadowQueue_->push(freeBuf)) {
freeQueue_->pop();
SLresult result = (*bq)->Enqueue(bq, freeBuf->buf_, freeBuf->cap_);
SLASSERT(result);
}
++audioBufCount;
// should leave the device to sleep to save power if no buffers
if (devShadowQueue_->size() == 0) {
(*recItf_)->SetRecordState(recItf_, SL_RECORDSTATE_STOPPED);
}
}
AudioRecorder::AudioRecorder(SampleFormat *sampleFormat, SLEngineItf slEngine)
: freeQueue_(nullptr),
recQueue_(nullptr),
devShadowQueue_(nullptr),
callback_(nullptr) {
SLresult result;
sampleInfo_ = *sampleFormat;
SLAndroidDataFormat_PCM_EX format_pcm;
ConvertToSLSampleFormat(&format_pcm, &sampleInfo_);
// configure audio source
SLDataLocator_IODevice loc_dev = {SL_DATALOCATOR_IODEVICE,
SL_IODEVICE_AUDIOINPUT,
SL_DEFAULTDEVICEID_AUDIOINPUT, NULL};
SLDataSource audioSrc = {&loc_dev, NULL};
// configure audio sink
SLDataLocator_AndroidSimpleBufferQueue loc_bq = {
SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, DEVICE_SHADOW_BUFFER_QUEUE_LEN};
SLDataSink audioSnk = {&loc_bq, &format_pcm};
// create audio recorder
// (requires the RECORD_AUDIO permission)
const SLInterfaceID id[2] = {SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
SL_IID_ANDROIDCONFIGURATION};
const SLboolean req[2] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
result = (*slEngine)->CreateAudioRecorder(
slEngine, &recObjectItf_, &audioSrc, &audioSnk,
sizeof(id) / sizeof(id[0]), id, req);
SLASSERT(result);
// Configure the voice recognition preset which has no
// signal processing for lower latency.
SLAndroidConfigurationItf inputConfig;
result = (*recObjectItf_)
->GetInterface(recObjectItf_, SL_IID_ANDROIDCONFIGURATION,
&inputConfig);
if (SL_RESULT_SUCCESS == result) {
SLuint32 presetValue = SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION;
(*inputConfig)
->SetConfiguration(inputConfig, SL_ANDROID_KEY_RECORDING_PRESET,
&presetValue, sizeof(SLuint32));
}
result = (*recObjectItf_)->Realize(recObjectItf_, SL_BOOLEAN_FALSE);
SLASSERT(result);
result =
(*recObjectItf_)->GetInterface(recObjectItf_, SL_IID_RECORD, &recItf_);
SLASSERT(result);
result = (*recObjectItf_)
->GetInterface(recObjectItf_, SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
&recBufQueueItf_);
SLASSERT(result);
result = (*recBufQueueItf_)
->RegisterCallback(recBufQueueItf_, bqRecorderCallback, this);
SLASSERT(result);
devShadowQueue_ = new AudioQueue(DEVICE_SHADOW_BUFFER_QUEUE_LEN);
assert(devShadowQueue_);
#ifdef ENABLE_LOG
std::string name = "rec";
recLog_ = new AndroidLog(name);
#endif
}
SLboolean AudioRecorder::Start(void) {
if (!freeQueue_ || !recQueue_ || !devShadowQueue_) {
LOGE("====NULL poiter to Start(%p, %p, %p)", freeQueue_, recQueue_,
devShadowQueue_);
return SL_BOOLEAN_FALSE;
}
audioBufCount = 0;
SLresult result;
// in case already recording, stop recording and clear buffer queue
result = (*recItf_)->SetRecordState(recItf_, SL_RECORDSTATE_STOPPED);
SLASSERT(result);
result = (*recBufQueueItf_)->Clear(recBufQueueItf_);
SLASSERT(result);
for (int i = 0; i < RECORD_DEVICE_KICKSTART_BUF_COUNT; i++) {
sample_buf *buf = NULL;
if (!freeQueue_->front(&buf)) {
LOGE("=====OutOfFreeBuffers @ startingRecording @ (%d)", i);
break;
}
freeQueue_->pop();
assert(buf->buf_ && buf->cap_ && !buf->size_);
result = (*recBufQueueItf_)->Enqueue(recBufQueueItf_, buf->buf_, buf->cap_);
SLASSERT(result);
devShadowQueue_->push(buf);
}
result = (*recItf_)->SetRecordState(recItf_, SL_RECORDSTATE_RECORDING);
SLASSERT(result);
return (result == SL_RESULT_SUCCESS ? SL_BOOLEAN_TRUE : SL_BOOLEAN_FALSE);
}
SLboolean AudioRecorder::Stop(void) {
// in case already recording, stop recording and clear buffer queue
SLuint32 curState;
SLresult result = (*recItf_)->GetRecordState(recItf_, &curState);
SLASSERT(result);
if (curState == SL_RECORDSTATE_STOPPED) {
return SL_BOOLEAN_TRUE;
}
result = (*recItf_)->SetRecordState(recItf_, SL_RECORDSTATE_STOPPED);
SLASSERT(result);
result = (*recBufQueueItf_)->Clear(recBufQueueItf_);
SLASSERT(result);
#ifdef ENABLE_LOG
recLog_->flush();
#endif
return SL_BOOLEAN_TRUE;
}
AudioRecorder::~AudioRecorder() {
// destroy audio recorder object, and invalidate all associated interfaces
if (recObjectItf_ != NULL) {
(*recObjectItf_)->Destroy(recObjectItf_);
}
if (devShadowQueue_) {
sample_buf *buf = NULL;
while (devShadowQueue_->front(&buf)) {
devShadowQueue_->pop();
freeQueue_->push(buf);
}
delete (devShadowQueue_);
}
#ifdef ENABLE_LOG
if (recLog_) {
delete recLog_;
}
#endif
}
void AudioRecorder::SetBufQueues(AudioQueue *freeQ, AudioQueue *recQ) {
assert(freeQ && recQ);
freeQueue_ = freeQ;
recQueue_ = recQ;
}
void AudioRecorder::RegisterCallback(ENGINE_CALLBACK cb, void *ctx) {
callback_ = cb;
ctx_ = ctx;
}
int32_t AudioRecorder::dbgGetDevBufCount(void) {
return devShadowQueue_->size();
}

View File

@@ -1,55 +0,0 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVE_AUDIO_AUDIO_RECORDER_H
#define NATIVE_AUDIO_AUDIO_RECORDER_H
#include <sys/types.h>
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
#include "audio_common.h"
#include "buf_manager.h"
#include "debug_utils.h"
class AudioRecorder {
SLObjectItf recObjectItf_;
SLRecordItf recItf_;
SLAndroidSimpleBufferQueueItf recBufQueueItf_;
SampleFormat sampleInfo_;
AudioQueue *freeQueue_; // user
AudioQueue *recQueue_; // user
AudioQueue *devShadowQueue_; // owner
uint32_t audioBufCount;
ENGINE_CALLBACK callback_;
void *ctx_;
public:
explicit AudioRecorder(SampleFormat *, SLEngineItf engineEngine);
~AudioRecorder();
SLboolean Start(void);
SLboolean Stop(void);
void SetBufQueues(AudioQueue *freeQ, AudioQueue *recQ);
void ProcessSLCallback(SLAndroidSimpleBufferQueueItf bq);
void RegisterCallback(ENGINE_CALLBACK cb, void *ctx);
int32_t dbgGetDevBufCount(void);
#ifdef ENABLE_LOG
AndroidLog *recLog_;
#endif
};
#endif // NATIVE_AUDIO_AUDIO_RECORDER_H

View File

@@ -1,202 +0,0 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVE_AUDIO_BUF_MANAGER_H
#define NATIVE_AUDIO_BUF_MANAGER_H
#include <sys/types.h>
#include <SLES/OpenSLES.h>
#include <atomic>
#include <cassert>
#include <memory>
#include <limits>
#ifndef CACHE_ALIGN
#define CACHE_ALIGN 64
#endif
/*
* ProducerConsumerQueue, borrowed from Ian NiLewis
*/
template <typename T>
class ProducerConsumerQueue {
public:
explicit ProducerConsumerQueue(int size)
: ProducerConsumerQueue(size, new T[size]) {}
explicit ProducerConsumerQueue(int size, T* buffer)
: size_(size), buffer_(buffer) {
// This is necessary because we depend on twos-complement wraparound
// to take care of overflow conditions.
assert(size < std::numeric_limits<int>::max());
}
bool push(const T& item) {
return push([&](T* ptr) -> bool {
*ptr = item;
return true;
});
}
// get() is idempotent between calls to commit().
T* getWriteablePtr() {
T* result = nullptr;
bool check __attribute__((unused)); //= false;
check = push([&](T* head) -> bool {
result = head;
return false; // don't increment
});
// if there's no space, result should not have been set, and vice versa
assert(check == (result != nullptr));
return result;
}
bool commitWriteablePtr(T* ptr) {
bool result = push([&](T* head) -> bool {
// this writer func does nothing, because we assume that the caller
// has already written to *ptr after acquiring it from a call to get().
// So just double-check that ptr is actually at the write head, and
// return true to indicate that it's safe to advance.
// if this isn't the same pointer we got from a call to get(), then
// something has gone terribly wrong. Either there was an intervening
// call to push() or commit(), or the pointer is spurious.
assert(ptr == head);
return true;
});
return result;
}
// writer() can return false, which indicates that the caller
// of push() changed its mind while writing (e.g. ran out of bytes)
template <typename F>
bool push(const F& writer) {
bool result = false;
int readptr = read_.load(std::memory_order_acquire);
int writeptr = write_.load(std::memory_order_relaxed);
// note that while readptr and writeptr will eventually
// wrap around, taking their difference is still valid as
// long as size_ < MAXINT.
int space = size_ - (int)(writeptr - readptr);
if (space >= 1) {
result = true;
// writer
if (writer(buffer_.get() + (writeptr % size_))) {
++writeptr;
write_.store(writeptr, std::memory_order_release);
}
}
return result;
}
// front out the queue, but not pop-out
bool front(T* out_item) {
return front([&](T* ptr) -> bool {
*out_item = *ptr;
return true;
});
}
void pop(void) {
int readptr = read_.load(std::memory_order_relaxed);
++readptr;
read_.store(readptr, std::memory_order_release);
}
template <typename F>
bool front(const F& reader) {
bool result = false;
int writeptr = write_.load(std::memory_order_acquire);
int readptr = read_.load(std::memory_order_relaxed);
// As above, wraparound is ok
int available = (int)(writeptr - readptr);
if (available >= 1) {
result = true;
reader(buffer_.get() + (readptr % size_));
}
return result;
}
uint32_t size(void) {
int writeptr = write_.load(std::memory_order_acquire);
int readptr = read_.load(std::memory_order_relaxed);
return (uint32_t)(writeptr - readptr);
}
private:
int size_;
std::unique_ptr<T> buffer_;
// forcing cache line alignment to eliminate false sharing of the
// frequently-updated read and write pointers. The object is to never
// let these get into the "shared" state where they'd cause a cache miss
// for every write.
alignas(CACHE_ALIGN) std::atomic<int> read_{0};
alignas(CACHE_ALIGN) std::atomic<int> write_{0};
};
struct sample_buf {
uint8_t* buf_; // audio sample container
uint32_t cap_; // buffer capacity in byte
uint32_t size_; // audio sample size (n buf) in byte
};
using AudioQueue = ProducerConsumerQueue<sample_buf*>;
__inline__ void releaseSampleBufs(sample_buf* bufs, uint32_t& count) {
if (!bufs || !count) {
return;
}
for (uint32_t i = 0; i < count; i++) {
if (bufs[i].buf_) delete[] bufs[i].buf_;
}
delete[] bufs;
}
__inline__ sample_buf* allocateSampleBufs(uint32_t count, uint32_t sizeInByte) {
if (count <= 0 || sizeInByte <= 0) {
return nullptr;
}
sample_buf* bufs = new sample_buf[count];
assert(bufs);
memset(bufs, 0, sizeof(sample_buf) * count);
uint32_t allocSize = (sizeInByte + 3) & ~3; // padding to 4 bytes aligned
uint32_t i;
for (i = 0; i < count; i++) {
bufs[i].buf_ = new uint8_t[allocSize];
if (bufs[i].buf_ == nullptr) {
LOGW("====Requesting %d buffers, allocated %d in %s", count, i,
__FUNCTION__);
break;
}
bufs[i].cap_ = sizeInByte;
bufs[i].size_ = 0; // 0 data in it
}
if (i < 2) {
releaseSampleBufs(bufs, i);
bufs = nullptr;
}
count = i;
return bufs;
}
#endif // NATIVE_AUDIO_BUF_MANAGER_H

View File

@@ -1,104 +0,0 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <cstdio>
#include <sys/stat.h>
#include "debug_utils.h"
#include "android_debug.h"
#include <inttypes.h>
static const char* FILE_PREFIX = "/sdcard/data/audio";
volatile uint32_t AndroidLog::fileIdx_ = 0;
AndroidLog::AndroidLog() : fp_(NULL), prevTick_(static_cast<uint64_t>(0)) {
fileName_ = FILE_PREFIX;
openFile();
}
AndroidLog::AndroidLog(std::string& file_name)
: fp_(NULL), prevTick_(static_cast<uint64_t>(0)) {
fileName_ = std::string(FILE_PREFIX) + std::string("_") + file_name;
openFile();
}
AndroidLog::~AndroidLog() { flush(); }
void AndroidLog::flush() {
if (fp_) {
fflush(fp_);
fclose(fp_);
fp_ = NULL;
}
prevTick_ = static_cast<uint64_t>(0);
}
void AndroidLog::log(void* buf, uint32_t size) {
Lock fileLock(&mutex_);
if (!buf || !size) return;
if (fp_ || openFile()) {
fwrite(buf, size, 1, fp_);
}
}
void AndroidLog::log(const char* fmt, ...) {
Lock fileLock(&mutex_);
if (!fmt) {
return;
}
if (fp_ || openFile()) {
va_list vp;
va_start(vp, fmt);
vfprintf(fp_, fmt, vp);
va_end(vp);
}
}
FILE* AndroidLog::openFile() {
Lock fileLock(&mutex_);
if (fp_) {
return fp_;
}
char fileName[64];
sprintf(fileName, "%s_%d", fileName_.c_str(), AndroidLog::fileIdx_++);
fp_ = fopen(fileName, "wb");
if (fp_ == NULL) {
LOGE("====failed to open file %s", fileName);
}
return fp_;
}
void AndroidLog::logTime() {
if (prevTick_ == static_cast<uint64_t>(0)) {
/*
* init counter, bypass the first one
*/
prevTick_ = getCurrentTicks();
return;
}
uint64_t curTick = getCurrentTicks();
uint64_t delta = curTick - prevTick_;
log("%" PRIu64 " %" PRIu64 "\n", curTick, delta);
prevTick_ = curTick;
}
uint64_t AndroidLog::getCurrentTicks() {
struct timeval Time;
gettimeofday(&Time, NULL);
return (static_cast<uint64_t>(1000000) * Time.tv_sec + Time.tv_usec);
}

View File

@@ -1,61 +0,0 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NATIVE_AUDIO_DEBUG_UTILS_H
#define NATIVE_AUDIO_DEBUG_UTILS_H
#include <cstdio>
#include <mutex>
#include <string>
/*
* debug_write_file()
* Write given data to a file as binary file. File name is
* "/sdcard/data/audio_%d", file_index++
* requirement: must have /sdcard/data already created on android device
*/
class Lock {
public:
explicit Lock(std::recursive_mutex* mtx) {
mutex_ = mtx;
mutex_->lock();
}
~Lock() { mutex_->unlock(); }
private:
std::recursive_mutex* mutex_;
};
class AndroidLog {
public:
AndroidLog();
AndroidLog(std::string& fileName);
~AndroidLog();
void log(void* buf, uint32_t size);
void log(const char* fmt, ...);
void logTime();
void flush();
static volatile uint32_t fileIdx_;
private:
uint64_t getCurrentTicks();
FILE* fp_;
FILE* openFile();
uint64_t prevTick_; // Tick in milisecond
std::recursive_mutex mutex_;
std::string fileName_;
};
void debug_write_file(void* buf, uint32_t size);
#endif // NATIVE_AUDIO_DEBUG_UTILS_H

View File

@@ -1,54 +0,0 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef JNI_INTERFACE_H
#define JNI_INTERFACE_H
#include <jni.h>
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT void JNICALL Java_com_google_sample_echo_MainActivity_createSLEngine(
JNIEnv *env, jclass, jint, jint, jlong delayInMs, jfloat decay);
JNIEXPORT void JNICALL Java_com_google_sample_echo_MainActivity_deleteSLEngine(
JNIEnv *env, jclass type);
JNIEXPORT jboolean JNICALL
Java_com_google_sample_echo_MainActivity_createSLBufferQueueAudioPlayer(
JNIEnv *env, jclass);
JNIEXPORT void JNICALL
Java_com_google_sample_echo_MainActivity_deleteSLBufferQueueAudioPlayer(
JNIEnv *env, jclass type);
JNIEXPORT jboolean JNICALL
Java_com_google_sample_echo_MainActivity_createAudioRecorder(JNIEnv *env,
jclass type);
JNIEXPORT void JNICALL
Java_com_google_sample_echo_MainActivity_deleteAudioRecorder(JNIEnv *env,
jclass type);
JNIEXPORT void JNICALL
Java_com_google_sample_echo_MainActivity_startPlay(JNIEnv *env, jclass type);
JNIEXPORT void JNICALL
Java_com_google_sample_echo_MainActivity_stopPlay(JNIEnv *env, jclass type);
JNIEXPORT jboolean JNICALL
Java_com_google_sample_echo_MainActivity_configureEcho(JNIEnv *env, jclass type,
jint delayInMs,
jfloat decay);
#ifdef __cplusplus
}
#endif
#endif // JNI_INTERFACE_H

View File

@@ -1,304 +0,0 @@
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.sample.echo;
import android.Manifest;
import android.app.Activity;
import android.content.Context;
import android.content.pm.PackageManager;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.core.app.ActivityCompat;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.SeekBar;
import android.widget.TextView;
import android.widget.Toast;
public class MainActivity extends Activity
implements ActivityCompat.OnRequestPermissionsResultCallback {
private static final int AUDIO_ECHO_REQUEST = 0;
private Button controlButton;
private TextView statusView;
private String nativeSampleRate;
private String nativeSampleBufSize;
private SeekBar delaySeekBar;
private TextView curDelayTV;
private int echoDelayProgress;
private SeekBar decaySeekBar;
private TextView curDecayTV;
private float echoDecayProgress;
private boolean supportRecording;
private Boolean isPlaying = false;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
controlButton = (Button)findViewById((R.id.capture_control_button));
statusView = (TextView)findViewById(R.id.statusView);
queryNativeAudioParameters();
delaySeekBar = (SeekBar)findViewById(R.id.delaySeekBar);
curDelayTV = (TextView)findViewById(R.id.curDelay);
echoDelayProgress = delaySeekBar.getProgress() * 1000 / delaySeekBar.getMax();
delaySeekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
float curVal = (float)progress / delaySeekBar.getMax();
curDelayTV.setText(String.format("%s", curVal));
setSeekBarPromptPosition(delaySeekBar, curDelayTV);
if (!fromUser) return;
echoDelayProgress = progress * 1000 / delaySeekBar.getMax();
configureEcho(echoDelayProgress, echoDecayProgress);
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {}
});
delaySeekBar.post(new Runnable() {
@Override
public void run() {
setSeekBarPromptPosition(delaySeekBar, curDelayTV);
}
});
decaySeekBar = (SeekBar)findViewById(R.id.decaySeekBar);
curDecayTV = (TextView)findViewById(R.id.curDecay);
echoDecayProgress = (float)decaySeekBar.getProgress() / decaySeekBar.getMax();
decaySeekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
float curVal = (float)progress / seekBar.getMax();
curDecayTV.setText(String.format("%s", curVal));
setSeekBarPromptPosition(decaySeekBar, curDecayTV);
if (!fromUser)
return;
echoDecayProgress = curVal;
configureEcho(echoDelayProgress, echoDecayProgress);
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {}
});
decaySeekBar.post(new Runnable() {
@Override
public void run() {
setSeekBarPromptPosition(decaySeekBar, curDecayTV);
}
});
// initialize native audio system
updateNativeAudioUI();
if (supportRecording) {
createSLEngine(
Integer.parseInt(nativeSampleRate),
Integer.parseInt(nativeSampleBufSize),
echoDelayProgress,
echoDecayProgress);
}
}
private void setSeekBarPromptPosition(SeekBar seekBar, TextView label) {
float thumbX = (float)seekBar.getProgress()/ seekBar.getMax() *
seekBar.getWidth() + seekBar.getX();
label.setX(thumbX - label.getWidth()/2.0f);
}
@Override
protected void onDestroy() {
if (supportRecording) {
if (isPlaying) {
stopPlay();
}
deleteSLEngine();
isPlaying = false;
}
super.onDestroy();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
private void startEcho() {
if(!supportRecording){
return;
}
if (!isPlaying) {
if(!createSLBufferQueueAudioPlayer()) {
statusView.setText(getString(R.string.player_error_msg));
return;
}
if(!createAudioRecorder()) {
deleteSLBufferQueueAudioPlayer();
statusView.setText(getString(R.string.recorder_error_msg));
return;
}
startPlay(); // startPlay() triggers startRecording()
statusView.setText(getString(R.string.echoing_status_msg));
} else {
stopPlay(); // stopPlay() triggers stopRecording()
updateNativeAudioUI();
deleteAudioRecorder();
deleteSLBufferQueueAudioPlayer();
}
isPlaying = !isPlaying;
controlButton.setText(getString(isPlaying ?
R.string.cmd_stop_echo: R.string.cmd_start_echo));
}
public void onEchoClick(View view) {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) !=
PackageManager.PERMISSION_GRANTED) {
statusView.setText(getString(R.string.request_permission_status_msg));
ActivityCompat.requestPermissions(
this,
new String[] { Manifest.permission.RECORD_AUDIO },
AUDIO_ECHO_REQUEST);
return;
}
startEcho();
}
public void getLowLatencyParameters(View view) {
updateNativeAudioUI();
}
private void queryNativeAudioParameters() {
supportRecording = true;
AudioManager myAudioMgr = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
if(myAudioMgr == null) {
supportRecording = false;
return;
}
nativeSampleRate = myAudioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
nativeSampleBufSize =myAudioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
// hardcoded channel to mono: both sides -- C++ and Java sides
int recBufSize = AudioRecord.getMinBufferSize(
Integer.parseInt(nativeSampleRate),
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
if (recBufSize == AudioRecord.ERROR ||
recBufSize == AudioRecord.ERROR_BAD_VALUE) {
supportRecording = false;
}
}
private void updateNativeAudioUI() {
if (!supportRecording) {
statusView.setText(getString(R.string.mic_error_msg));
controlButton.setEnabled(false);
return;
}
statusView.setText(getString(R.string.fast_audio_info_msg,
nativeSampleRate, nativeSampleBufSize));
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
/*
* if any permission failed, the sample could not play
*/
if (AUDIO_ECHO_REQUEST != requestCode) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
return;
}
if (grantResults.length != 1 ||
grantResults[0] != PackageManager.PERMISSION_GRANTED) {
/*
* When user denied permission, throw a Toast to prompt that RECORD_AUDIO
* is necessary; also display the status on UI
* Then application goes back to the original state: it behaves as if the button
* was not clicked. The assumption is that user will re-click the "start" button
* (to retry), or shutdown the app in normal way.
*/
statusView.setText(getString(R.string.permission_error_msg));
Toast.makeText(getApplicationContext(),
getString(R.string.permission_prompt_msg),
Toast.LENGTH_SHORT).show();
return;
}
/*
* When permissions are granted, we prompt the user the status. User would
* re-try the "start" button to perform the normal operation. This saves us the extra
* logic in code for async processing of the button listener.
*/
statusView.setText(getString(R.string.permission_granted_msg,getString(R.string.cmd_start_echo)));
// The callback runs on app's thread, so we are safe to resume the action
startEcho();
}
/*
* Loading our lib
*/
static {
System.loadLibrary("echo");
}
/*
* jni function declarations
*/
static native void createSLEngine(int rate, int framesPerBuf,
long delayInMs, float decay);
static native void deleteSLEngine();
static native boolean configureEcho(int delayInMs, float decay);
static native boolean createSLBufferQueueAudioPlayer();
static native void deleteSLBufferQueueAudioPlayer();
static native boolean createAudioRecorder();
static native void deleteAudioRecorder();
static native void startPlay();
static native void stopPlay();
}

View File

@@ -1,109 +0,0 @@
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools" android:layout_width="match_parent"
android:id="@+id/mainLayout"
android:layout_height="match_parent" android:paddingLeft="@dimen/activity_horizontal_margin"
android:paddingRight="@dimen/activity_horizontal_margin"
android:paddingTop="@dimen/activity_vertical_margin"
android:paddingBottom="@dimen/activity_vertical_margin" tools:context=".MainActivity">
<TextView
android:id="@+id/curDelay"
android:layout_gravity="center"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentTop="true"
android:layout_marginTop="192dp"
android:layout_toRightOf="@+id/minDelayLabel"
android:text="@string/init_delay_val_msg"
android:visibility="visible" />
<TextView
android:id="@+id/minDelayLabel"
android:layout_gravity="start"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentLeft="true"
android:layout_below="@+id/curDelay"
android:layout_marginTop="0dp"
android:text="@string/min_delay_label_msg"
android:visibility="visible" />
<SeekBar
android:id="@+id/delaySeekBar"
android:layout_alignParentRight="true"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_alignTop="@+id/minDelayLabel"
android:layout_centerHorizontal="true"
android:layout_toRightOf="@+id/minDelayLabel"
android:maxHeight="3dp"
android:minHeight="3dp"
android:max="10"
android:progress="1" />
<TextView
android:id="@+id/curDecay"
android:layout_gravity="center"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="@id/minDelayLabel"
android:layout_marginTop="20dp"
android:layout_toRightOf="@+id/minDecayLabel"
android:text="@string/init_decay_val_msg"
android:visibility="visible" />
<TextView
android:id="@+id/minDecayLabel"
android:layout_gravity="start"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentLeft="true"
android:layout_below="@+id/curDecay"
android:layout_marginTop="0dp"
android:text="@string/min_decay_label_msg"
android:visibility="visible" />
<SeekBar
android:id="@+id/decaySeekBar"
android:layout_alignParentRight="true"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_alignTop="@+id/minDecayLabel"
android:layout_centerHorizontal="true"
android:layout_toRightOf="@+id/minDecayLabel"
android:maxHeight="3dp"
android:minHeight="3dp"
android:max="10"
android:progress="1" />
<Button
android:id="@+id/capture_control_button"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="@+id/decaySeekBar"
android:layout_centerHorizontal="true"
android:layout_marginTop="30dp"
android:onClick="onEchoClick"
android:text="@string/cmd_start_echo"
android:textAllCaps="false" />
<Button
android:id="@+id/get_parameter_button"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_above="@+id/statusView"
android:layout_alignParentStart="true"
android:onClick="getLowLatencyParameters"
android:text="@string/cmd_get_param"
android:textAllCaps="false" />
<TextView android:text="@string/init_status_msg"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:lines="3"
android:id="@+id/statusView"
android:layout_centerHorizontal="true"
android:layout_alignParentBottom="true"/>
</RelativeLayout>

View File

@@ -1,5 +0,0 @@
<menu xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools" tools:context=".MainActivity">
<item android:id="@+id/action_settings" android:title="@string/action_settings"
android:orderInCategory="100" android:showAsAction="never" />
</menu>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.5 KiB

View File

@@ -1,5 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<style name="AppTheme" parent="android:Theme.Material.Light">
</style>
</resources>

View File

@@ -1,6 +0,0 @@
<resources>
<!-- Example customization of dimensions originally defined in res/values/dimens.xml
(such as screen margins) for screens with more than 820dp of available width. This
would include 7" and 10" devices in landscape (~960dp and ~1280dp respectively). -->
<dimen name="activity_horizontal_margin">64dp</dimen>
</resources>

View File

@@ -1,5 +0,0 @@
<resources>
<!-- Default screen margins, per the Android Design guidelines. -->
<dimen name="activity_horizontal_margin">16dp</dimen>
<dimen name="activity_vertical_margin">16dp</dimen>
</resources>

View File

@@ -1,24 +0,0 @@
<resources>
<string name="app_name">audio-echo</string>
<string name="init_status_msg">Starting Up</string>
<string name="request_permission_status_msg">"Requesting RECORD_AUDIO Permission..."</string>
<string name="echoing_status_msg">Engine Echoing...</string>
<string name="action_settings">Settings</string>
<string name="cmd_start_echo">Start Echo</string>
<string name="cmd_stop_echo">Stop Echo</string>
<string name="cmd_get_param">FastPathInfo</string>
<string name="fast_audio_info_msg">nativeSampleRate = %1$s\nnativeSampleBufSize = %2$s\n</string>
<string name="player_error_msg">Failed to Create Audio Player</string>
<string name="recorder_error_msg">Failed to Create Audio Recorder</string>
<string name="mic_error_msg">"Audio recording is not supported"</string>
<string name="permission_prompt_msg">"This sample needs RECORD_AUDIO permission"</string>
<string name="permission_granted_msg">RECORD_AUDIO permission granted, touch %1$s to begin</string>
<string name="permission_error_msg">"Permission for RECORD_AUDIO was denied"</string>
<string name="min_delay_label_msg">delay(seconds)</string>
<string name="init_delay_val_msg">0.1</string>
<string name="min_decay_label_msg">decay(decimal)</string>
<string name="init_decay_val_msg">0.1</string>
</resources>

View File

@@ -1,8 +0,0 @@
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="android:Theme.Holo.Light.DarkActionBar">
<!-- Customize your theme here. -->
</style>
</resources>

View File

@@ -1,21 +0,0 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:4.2.0'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}

View File

@@ -1,20 +0,0 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
# Default value: -Xmx10248m -XX:MaxPermSize=256m
# org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
android.enableJetifier=true
android.useAndroidX=true

Binary file not shown.

View File

@@ -1,5 +0,0 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-6.7.1-all.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

183
audio-echo/gradlew vendored
View File

@@ -1,183 +0,0 @@
#!/usr/bin/env sh
#
# Copyright 2015 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin or MSYS, switch paths to Windows format before running java
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=`expr $i + 1`
done
case $i in
0) set -- ;;
1) set -- "$args0" ;;
2) set -- "$args0" "$args1" ;;
3) set -- "$args0" "$args1" "$args2" ;;
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=`save "$@"`
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
exec "$JAVACMD" "$@"

100
audio-echo/gradlew.bat vendored
View File

@@ -1,100 +0,0 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@@ -1 +0,0 @@
include ':app'

View File

@@ -1,7 +0,0 @@
status: PUBLISHED
technologies: [Android, NDK]
categories: [NDK]
languages: [C++, Java]
solutions: [Mobile]
github: googlesamples/android-ndk
license: apache2

View File

@@ -1,20 +1,16 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.hellolibs">
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity android:name=".MainActivity">
package="com.example.plasma"
android:versionCode="1"
android:versionName="1.0">
<application android:label="@string/app_name" android:debuggable="true">
<activity android:name=".Plasma"
android:label="@string/app_name">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
<uses-sdk android:minSdkVersion="8"/>
</manifest>

View File

@@ -1,52 +0,0 @@
Bitmap Plasma
=============
Bitmap Plasma is an Android sample that uses JNI to render a plasma effect in an Android [Bitmap](http://developer.android.com/reference/android/graphics/Bitmap.html) from C code.
This sample uses the new [Android Studio CMake plugin](http://tools.android.com/tech-docs/external-c-builds) with C++ support.
Pre-requisites
--------------
- Android Studio 2.2+ with [NDK](https://developer.android.com/ndk/) bundle.
Getting Started
---------------
1. [Download Android Studio](http://developer.android.com/sdk/index.html)
1. Launch Android Studio.
1. Open the sample directory.
1. Open *File/Project Structure...*
- Click *Download* or *Select NDK location*.
1. Click *Tools/Android/Sync Project with Gradle Files*.
1. Click *Run/Run 'app'*.
Screenshots
-----------
![screenshot](screenshot.png)
Support
-------
If you've found an error in these samples, please [file an issue](https://github.com/googlesamples/android-ndk/issues/new).
Patches are encouraged, and may be submitted by [forking this project](https://github.com/googlesamples/android-ndk/fork) and
submitting a pull request through GitHub. Please see [CONTRIBUTING.md](../CONTRIBUTING.md) for more details.
- [Stack Overflow](http://stackoverflow.com/questions/tagged/android-ndk)
- [Android Tools Feedbacks](http://tools.android.com/feedback)
License
-------
Copyright 2015 Google, Inc.
Licensed to the Apache Software Foundation (ASF) under one or more contributor
license agreements. See the NOTICE file distributed with this work for
additional information regarding copyright ownership. The ASF licenses this
file to you under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy of
the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.

Some files were not shown because too many files have changed in this diff Show More