Remove webrtc/examples/android/media_demo.
The JNI code for VoiceEngine is not maintained and VoiceEngine is being refactored. This is not a supported Java interface, use AppRTCDemo as a starting point instead. Also renames webrtc/libjingle_examples.gyp webrtc/webrtc_examples.gyp to replace the previous file (that only contained media_demo). BUG= R=henrika@webrtc.org, kjellander@webrtc.org Review URL: https://codereview.webrtc.org/1439593002 . Cr-Commit-Position: refs/heads/master@{#10599}
This commit is contained in:
parent
cbfabbf818
commit
fa566d610f
6
.gitignore
vendored
6
.gitignore
vendored
@ -152,12 +152,6 @@
|
|||||||
/tools/win
|
/tools/win
|
||||||
/tools/xdisplaycheck
|
/tools/xdisplaycheck
|
||||||
/tools/whitespace.txt
|
/tools/whitespace.txt
|
||||||
/webrtc/examples/android/media_demo/bin
|
|
||||||
/webrtc/examples/android/media_demo/gen
|
|
||||||
/webrtc/examples/android/media_demo/libs
|
|
||||||
/webrtc/examples/android/media_demo/local.properties
|
|
||||||
/webrtc/examples/android/media_demo/obj
|
|
||||||
/webrtc/examples/android/media_demo/proguard-project.txt
|
|
||||||
/webrtc/examples/android/opensl_loopback/bin
|
/webrtc/examples/android/opensl_loopback/bin
|
||||||
/webrtc/examples/android/opensl_loopback/gen
|
/webrtc/examples/android/opensl_loopback/gen
|
||||||
/webrtc/examples/android/opensl_loopback/libs
|
/webrtc/examples/android/opensl_loopback/libs
|
||||||
|
|||||||
1
all.gyp
1
all.gyp
@ -24,7 +24,6 @@
|
|||||||
'conditions': [
|
'conditions': [
|
||||||
['include_examples==1', {
|
['include_examples==1', {
|
||||||
'dependencies': [
|
'dependencies': [
|
||||||
'webrtc/libjingle_examples.gyp:*',
|
|
||||||
'webrtc/webrtc_examples.gyp:*',
|
'webrtc/webrtc_examples.gyp:*',
|
||||||
],
|
],
|
||||||
}],
|
}],
|
||||||
|
|||||||
@ -378,7 +378,7 @@
|
|||||||
'<(webrtc_root)/base/base_tests.gyp:rtc_base_tests_utils',
|
'<(webrtc_root)/base/base_tests.gyp:rtc_base_tests_utils',
|
||||||
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:field_trial_default',
|
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:field_trial_default',
|
||||||
'<(DEPTH)/third_party/ocmock/ocmock.gyp:ocmock',
|
'<(DEPTH)/third_party/ocmock/ocmock.gyp:ocmock',
|
||||||
'<(webrtc_root)/libjingle_examples.gyp:apprtc_signaling',
|
'<(webrtc_root)/webrtc_examples.gyp:apprtc_signaling',
|
||||||
],
|
],
|
||||||
'sources': [
|
'sources': [
|
||||||
'app/webrtc/objctests/mac/main.mm',
|
'app/webrtc/objctests/mac/main.mm',
|
||||||
|
|||||||
@ -1,29 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
|
||||||
android:versionCode="1" package="org.webrtc.webrtcdemo" android:versionName="1.07">
|
|
||||||
<application android:icon="@drawable/logo"
|
|
||||||
android:label="@string/appName"
|
|
||||||
android:debuggable="true">
|
|
||||||
<activity android:name=".WebRTCDemo"
|
|
||||||
android:theme="@android:style/Theme.Holo"
|
|
||||||
android:label="@string/appName"
|
|
||||||
android:screenOrientation="landscape"
|
|
||||||
>
|
|
||||||
<intent-filter>
|
|
||||||
<action android:name="android.intent.action.MAIN" />
|
|
||||||
<category android:name="android.intent.category.LAUNCHER" />
|
|
||||||
<action android:name="android.intent.action.HEADSET_PLUG"/>
|
|
||||||
</intent-filter>
|
|
||||||
</activity>
|
|
||||||
</application>
|
|
||||||
|
|
||||||
<uses-sdk android:minSdkVersion="14" />
|
|
||||||
<uses-permission android:name="android.permission.CAMERA"></uses-permission>
|
|
||||||
<uses-feature android:name="android.hardware.camera" />
|
|
||||||
<uses-feature android:name="android.hardware.camera.autofocus" />
|
|
||||||
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
|
|
||||||
<uses-permission android:name="android.permission.RECORD_AUDIO" />
|
|
||||||
<uses-permission android:name="android.permission.INTERNET" />
|
|
||||||
<uses-permission android:name="android.permission.WAKE_LOCK" />
|
|
||||||
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
|
|
||||||
</manifest>
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
This directory contains a sample app for sending and receiving audio
|
|
||||||
on Android. It further lets you enable and disable some call quality
|
|
||||||
enhancements such as echo cancellation, noise suppression etc.
|
|
||||||
|
|
||||||
Prerequisites:
|
|
||||||
- Make sure gclient is checking out tools necessary to target Android: your
|
|
||||||
.gclient file should contain a line like:
|
|
||||||
target_os = ['android']
|
|
||||||
Make sure to re-run gclient sync after adding this to download the tools.
|
|
||||||
- Env vars need to be set up to target Android; easiest way to do this is to run
|
|
||||||
(from the libjingle trunk directory):
|
|
||||||
. ./build/android/envsetup.sh
|
|
||||||
Note that this clobbers any previously-set $GYP_DEFINES so it must be done
|
|
||||||
before the next item.
|
|
||||||
- Set up webrtc-related GYP variables:
|
|
||||||
export GYP_DEFINES="$GYP_DEFINES java_home=</path/to/JDK>"
|
|
||||||
- Finally, run "gclient runhooks" to generate Android-targeting .ninja files.
|
|
||||||
|
|
||||||
Example of building the app:
|
|
||||||
cd <path/to/repository>/trunk
|
|
||||||
ninja -C out/Debug WebRTCDemo
|
|
||||||
|
|
||||||
It can then be installed and run on the device:
|
|
||||||
adb install -r out/Debug/WebRTCDemo-debug.apk
|
|
||||||
@ -1,92 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project name="WebRTCDemo" default="help">
|
|
||||||
|
|
||||||
<!-- The local.properties file is created and updated by the 'android' tool.
|
|
||||||
It contains the path to the SDK. It should *NOT* be checked into
|
|
||||||
Version Control Systems. -->
|
|
||||||
<property file="local.properties" />
|
|
||||||
|
|
||||||
<!-- The ant.properties file can be created by you. It is only edited by the
|
|
||||||
'android' tool to add properties to it.
|
|
||||||
This is the place to change some Ant specific build properties.
|
|
||||||
Here are some properties you may want to change/update:
|
|
||||||
|
|
||||||
source.dir
|
|
||||||
The name of the source directory. Default is 'src'.
|
|
||||||
out.dir
|
|
||||||
The name of the output directory. Default is 'bin'.
|
|
||||||
|
|
||||||
For other overridable properties, look at the beginning of the rules
|
|
||||||
files in the SDK, at tools/ant/build.xml
|
|
||||||
|
|
||||||
Properties related to the SDK location or the project target should
|
|
||||||
be updated using the 'android' tool with the 'update' action.
|
|
||||||
|
|
||||||
This file is an integral part of the build system for your
|
|
||||||
application and should be checked into Version Control Systems.
|
|
||||||
|
|
||||||
-->
|
|
||||||
<property file="ant.properties" />
|
|
||||||
|
|
||||||
<!-- if sdk.dir was not set from one of the property file, then
|
|
||||||
get it from the ANDROID_HOME env var.
|
|
||||||
This must be done before we load project.properties since
|
|
||||||
the proguard config can use sdk.dir -->
|
|
||||||
<property environment="env" />
|
|
||||||
<condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
|
|
||||||
<isset property="env.ANDROID_SDK_ROOT" />
|
|
||||||
</condition>
|
|
||||||
|
|
||||||
<!-- The project.properties file is created and updated by the 'android'
|
|
||||||
tool, as well as ADT.
|
|
||||||
|
|
||||||
This contains project specific properties such as project target, and library
|
|
||||||
dependencies. Lower level build properties are stored in ant.properties
|
|
||||||
(or in .classpath for Eclipse projects).
|
|
||||||
|
|
||||||
This file is an integral part of the build system for your
|
|
||||||
application and should be checked into Version Control Systems. -->
|
|
||||||
<loadproperties srcFile="project.properties" />
|
|
||||||
|
|
||||||
<!-- quick check on sdk.dir -->
|
|
||||||
<fail
|
|
||||||
message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_SDK_ROOT environment variable."
|
|
||||||
unless="sdk.dir"
|
|
||||||
/>
|
|
||||||
|
|
||||||
<!--
|
|
||||||
Import per project custom build rules if present at the root of the project.
|
|
||||||
This is the place to put custom intermediary targets such as:
|
|
||||||
-pre-build
|
|
||||||
-pre-compile
|
|
||||||
-post-compile (This is typically used for code obfuscation.
|
|
||||||
Compiled code location: ${out.classes.absolute.dir}
|
|
||||||
If this is not done in place, override ${out.dex.input.absolute.dir})
|
|
||||||
-post-package
|
|
||||||
-post-build
|
|
||||||
-pre-clean
|
|
||||||
-->
|
|
||||||
<import file="custom_rules.xml" optional="true" />
|
|
||||||
|
|
||||||
<!-- Import the actual build file.
|
|
||||||
|
|
||||||
To customize existing targets, there are two options:
|
|
||||||
- Customize only one target:
|
|
||||||
- copy/paste the target into this file, *before* the
|
|
||||||
<import> task.
|
|
||||||
- customize it to your needs.
|
|
||||||
- Customize the whole content of build.xml
|
|
||||||
- copy/paste the content of the rules files (minus the top node)
|
|
||||||
into this file, replacing the <import> task.
|
|
||||||
- customize to your needs.
|
|
||||||
|
|
||||||
***********************
|
|
||||||
****** IMPORTANT ******
|
|
||||||
***********************
|
|
||||||
In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
|
|
||||||
in order to avoid having your file be overridden by tools such as "android update project"
|
|
||||||
-->
|
|
||||||
<!-- version-tag: 1 -->
|
|
||||||
<import file="${sdk.dir}/tools/ant/build.xml" />
|
|
||||||
|
|
||||||
</project>
|
|
||||||
@ -1,77 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
|
|
||||||
|
|
||||||
#include <limits>
|
|
||||||
|
|
||||||
jmethodID GetMethodID(JNIEnv* jni, jclass c, const std::string& name,
|
|
||||||
const char* signature) {
|
|
||||||
jmethodID m = jni->GetMethodID(c, name.c_str(), signature);
|
|
||||||
CHECK_JNI_EXCEPTION(jni, "error during GetMethodID");
|
|
||||||
return m;
|
|
||||||
}
|
|
||||||
|
|
||||||
jlong jlongFromPointer(void* ptr) {
|
|
||||||
CHECK(sizeof(intptr_t) <= sizeof(jlong), "Time to rethink the use of jlongs");
|
|
||||||
// Going through intptr_t to be obvious about the definedness of the
|
|
||||||
// conversion from pointer to integral type. intptr_t to jlong is a standard
|
|
||||||
// widening by the COMPILE_ASSERT above.
|
|
||||||
jlong ret = reinterpret_cast<intptr_t>(ptr);
|
|
||||||
CHECK(reinterpret_cast<void*>(ret) == ptr,
|
|
||||||
"jlong does not convert back to pointer");
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Given a (UTF-16) jstring return a new UTF-8 native string.
|
|
||||||
std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) {
|
|
||||||
const char* chars = jni->GetStringUTFChars(j_string, NULL);
|
|
||||||
CHECK_JNI_EXCEPTION(jni, "Error during GetStringUTFChars");
|
|
||||||
std::string str(chars, jni->GetStringUTFLength(j_string));
|
|
||||||
CHECK_JNI_EXCEPTION(jni, "Error during GetStringUTFLength");
|
|
||||||
jni->ReleaseStringUTFChars(j_string, chars);
|
|
||||||
CHECK_JNI_EXCEPTION(jni, "Error during ReleaseStringUTFChars");
|
|
||||||
return str;
|
|
||||||
}
|
|
||||||
|
|
||||||
ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni, const char** classes,
|
|
||||||
int size) {
|
|
||||||
for (int i = 0; i < size; ++i) {
|
|
||||||
LoadClass(jni, classes[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ClassReferenceHolder::~ClassReferenceHolder() {
|
|
||||||
CHECK(classes_.empty(), "Must call FreeReferences() before dtor!");
|
|
||||||
}
|
|
||||||
|
|
||||||
void ClassReferenceHolder::FreeReferences(JNIEnv* jni) {
|
|
||||||
for (std::map<std::string, jclass>::const_iterator it = classes_.begin();
|
|
||||||
it != classes_.end(); ++it) {
|
|
||||||
jni->DeleteGlobalRef(it->second);
|
|
||||||
}
|
|
||||||
classes_.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
jclass ClassReferenceHolder::GetClass(const std::string& name) {
|
|
||||||
std::map<std::string, jclass>::iterator it = classes_.find(name);
|
|
||||||
CHECK(it != classes_.end(), "Could not find class");
|
|
||||||
return it->second;
|
|
||||||
}
|
|
||||||
|
|
||||||
void ClassReferenceHolder::LoadClass(JNIEnv* jni, const std::string& name) {
|
|
||||||
jclass localRef = jni->FindClass(name.c_str());
|
|
||||||
CHECK_JNI_EXCEPTION(jni, "Could not load class");
|
|
||||||
CHECK(localRef, name.c_str());
|
|
||||||
jclass globalRef = reinterpret_cast<jclass>(jni->NewGlobalRef(localRef));
|
|
||||||
CHECK_JNI_EXCEPTION(jni, "error during NewGlobalRef");
|
|
||||||
CHECK(globalRef, name.c_str());
|
|
||||||
bool inserted = classes_.insert(std::make_pair(name, globalRef)).second;
|
|
||||||
CHECK(inserted, "Duplicate class name");
|
|
||||||
}
|
|
||||||
@ -1,79 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
#ifndef WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_
|
|
||||||
#define WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_
|
|
||||||
|
|
||||||
// TODO(henrike): this file contains duplication with regards to
|
|
||||||
// talk/app/webrtc/java/jni/peerconnection_jni.cc. When/if code can be shared
|
|
||||||
// between trunk/talk and trunk/webrtc remove the duplication.
|
|
||||||
|
|
||||||
#include <android/log.h>
|
|
||||||
#include <jni.h>
|
|
||||||
|
|
||||||
#include <assert.h>
|
|
||||||
#include <map>
|
|
||||||
#include <string>
|
|
||||||
|
|
||||||
#define TAG "WEBRTC-NATIVE"
|
|
||||||
|
|
||||||
// Abort the process if |x| is false, emitting |msg| to logcat.
|
|
||||||
#define CHECK(x, msg) \
|
|
||||||
if (x) { \
|
|
||||||
} else { \
|
|
||||||
__android_log_print(ANDROID_LOG_ERROR, TAG, "%s:%d: %s", __FILE__, \
|
|
||||||
__LINE__, msg); \
|
|
||||||
assert(false); \
|
|
||||||
}
|
|
||||||
|
|
||||||
// Abort the process if |jni| has a Java exception pending, emitting |msg| to
|
|
||||||
// logcat.
|
|
||||||
#define CHECK_JNI_EXCEPTION(jni, msg) \
|
|
||||||
if (0) { \
|
|
||||||
} else { \
|
|
||||||
if (jni->ExceptionCheck()) { \
|
|
||||||
jni->ExceptionDescribe(); \
|
|
||||||
jni->ExceptionClear(); \
|
|
||||||
CHECK(0, msg); \
|
|
||||||
} \
|
|
||||||
}
|
|
||||||
|
|
||||||
// JNIEnv-helper methods that CHECK success: no Java exception thrown and found
|
|
||||||
// object/class/method/field is non-null.
|
|
||||||
jmethodID GetMethodID(JNIEnv* jni, jclass c, const std::string& name,
|
|
||||||
const char* signature);
|
|
||||||
|
|
||||||
// Return a |jlong| that will automatically convert back to |ptr| when assigned
|
|
||||||
// to a |uint64_t|
|
|
||||||
jlong jlongFromPointer(void* ptr);
|
|
||||||
|
|
||||||
// Given a (UTF-16) jstring return a new UTF-8 native string.
|
|
||||||
std::string JavaToStdString(JNIEnv* jni, const jstring& j_string);
|
|
||||||
|
|
||||||
// Android's FindClass() is trickier than usual because the app-specific
|
|
||||||
// ClassLoader is not consulted when there is no app-specific frame on the
|
|
||||||
// stack. Consequently, we only look up classes once in JNI_OnLoad.
|
|
||||||
// http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
|
|
||||||
class ClassReferenceHolder {
|
|
||||||
public:
|
|
||||||
ClassReferenceHolder(JNIEnv* jni, const char** classes, int size);
|
|
||||||
~ClassReferenceHolder();
|
|
||||||
|
|
||||||
void FreeReferences(JNIEnv* jni);
|
|
||||||
|
|
||||||
jclass GetClass(const std::string& name);
|
|
||||||
|
|
||||||
private:
|
|
||||||
void LoadClass(JNIEnv* jni, const std::string& name);
|
|
||||||
|
|
||||||
std::map<std::string, jclass> classes_;
|
|
||||||
};
|
|
||||||
|
|
||||||
#endif // WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_
|
|
||||||
@ -1,48 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
#include <jni.h>
|
|
||||||
|
|
||||||
#include <assert.h>
|
|
||||||
|
|
||||||
#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
|
|
||||||
#include "webrtc/examples/android/media_demo/jni/voice_engine_jni.h"
|
|
||||||
#include "webrtc/voice_engine/include/voe_base.h"
|
|
||||||
|
|
||||||
// Macro for native functions that can be found by way of jni-auto discovery.
|
|
||||||
// Note extern "C" is needed for "discovery" of native methods to work.
|
|
||||||
#define JOWW(rettype, name) \
|
|
||||||
extern "C" rettype JNIEXPORT JNICALL Java_org_webrtc_webrtcdemo_##name
|
|
||||||
|
|
||||||
static JavaVM* g_vm = NULL;
|
|
||||||
|
|
||||||
extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* vm, void* reserved) {
|
|
||||||
// Only called once.
|
|
||||||
CHECK(!g_vm, "OnLoad called more than once");
|
|
||||||
g_vm = vm;
|
|
||||||
return JNI_VERSION_1_4;
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(void, NativeWebRtcContextRegistry_register)(
|
|
||||||
JNIEnv* jni,
|
|
||||||
jclass,
|
|
||||||
jobject context) {
|
|
||||||
webrtc_examples::SetVoeDeviceObjects(g_vm);
|
|
||||||
CHECK(webrtc::VoiceEngine::SetAndroidObjects(g_vm, context) == 0,
|
|
||||||
"Failed to register android objects to voice engine");
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(void, NativeWebRtcContextRegistry_unRegister)(
|
|
||||||
JNIEnv* jni,
|
|
||||||
jclass) {
|
|
||||||
CHECK(webrtc::VoiceEngine::SetAndroidObjects(NULL, NULL) == 0,
|
|
||||||
"Failed to unregister android objects from voice engine");
|
|
||||||
webrtc_examples::ClearVoeDeviceObjects();
|
|
||||||
}
|
|
||||||
@ -1,423 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
// This file contains JNI for the voice engine interfaces.
|
|
||||||
// The native functions are found using jni's auto discovery.
|
|
||||||
|
|
||||||
#include "webrtc/examples/android/media_demo/jni/voice_engine_jni.h"
|
|
||||||
|
|
||||||
#include <map>
|
|
||||||
#include <string>
|
|
||||||
|
|
||||||
#include "webrtc/base/arraysize.h"
|
|
||||||
#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
|
|
||||||
#include "webrtc/modules/utility/include/helpers_android.h"
|
|
||||||
#include "webrtc/test/channel_transport/channel_transport.h"
|
|
||||||
#include "webrtc/voice_engine/include/voe_audio_processing.h"
|
|
||||||
#include "webrtc/voice_engine/include/voe_base.h"
|
|
||||||
#include "webrtc/voice_engine/include/voe_codec.h"
|
|
||||||
#include "webrtc/voice_engine/include/voe_file.h"
|
|
||||||
#include "webrtc/voice_engine/include/voe_hardware.h"
|
|
||||||
#include "webrtc/voice_engine/include/voe_network.h"
|
|
||||||
#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
|
|
||||||
#include "webrtc/voice_engine/include/voe_volume_control.h"
|
|
||||||
|
|
||||||
// Macro for native functions that can be found by way of jni-auto discovery.
|
|
||||||
// Note extern "C" is needed for "discovery" of native methods to work.
|
|
||||||
#define JOWW(rettype, name) \
|
|
||||||
extern "C" rettype JNIEXPORT JNICALL Java_org_webrtc_webrtcdemo_##name
|
|
||||||
|
|
||||||
namespace {
|
|
||||||
|
|
||||||
static JavaVM* g_vm = NULL;
|
|
||||||
static ClassReferenceHolder* g_class_reference_holder = NULL;
|
|
||||||
|
|
||||||
jclass GetClass(JNIEnv* jni, const char* name) {
|
|
||||||
CHECK(g_class_reference_holder, "Class reference holder NULL");
|
|
||||||
return g_class_reference_holder->GetClass(name);
|
|
||||||
}
|
|
||||||
|
|
||||||
static const char* g_classes[] = {"org/webrtc/webrtcdemo/CodecInst"};
|
|
||||||
|
|
||||||
template<typename T>
|
|
||||||
void ReleaseSubApi(T instance) {
|
|
||||||
CHECK(instance->Release() >= 0, "failed to release instance")
|
|
||||||
}
|
|
||||||
|
|
||||||
class VoiceEngineData {
|
|
||||||
public:
|
|
||||||
VoiceEngineData()
|
|
||||||
: ve(webrtc::VoiceEngine::Create()),
|
|
||||||
base(webrtc::VoEBase::GetInterface(ve)),
|
|
||||||
codec(webrtc::VoECodec::GetInterface(ve)),
|
|
||||||
file(webrtc::VoEFile::GetInterface(ve)),
|
|
||||||
netw(webrtc::VoENetwork::GetInterface(ve)),
|
|
||||||
apm(webrtc::VoEAudioProcessing::GetInterface(ve)),
|
|
||||||
volume(webrtc::VoEVolumeControl::GetInterface(ve)),
|
|
||||||
hardware(webrtc::VoEHardware::GetInterface(ve)),
|
|
||||||
rtp(webrtc::VoERTP_RTCP::GetInterface(ve)) {
|
|
||||||
CHECK(ve != NULL, "Voice engine instance failed to be created");
|
|
||||||
CHECK(base != NULL, "Failed to acquire base interface");
|
|
||||||
CHECK(codec != NULL, "Failed to acquire codec interface");
|
|
||||||
CHECK(file != NULL, "Failed to acquire file interface");
|
|
||||||
CHECK(netw != NULL, "Failed to acquire netw interface");
|
|
||||||
CHECK(apm != NULL, "Failed to acquire apm interface");
|
|
||||||
CHECK(volume != NULL, "Failed to acquire volume interface");
|
|
||||||
CHECK(hardware != NULL, "Failed to acquire hardware interface");
|
|
||||||
CHECK(rtp != NULL, "Failed to acquire rtp interface");
|
|
||||||
}
|
|
||||||
|
|
||||||
~VoiceEngineData() {
|
|
||||||
CHECK(channel_transports_.empty(),
|
|
||||||
"VoE transports must be deleted before terminating");
|
|
||||||
CHECK(base->Terminate() == 0, "VoE failed to terminate");
|
|
||||||
ReleaseSubApi(base);
|
|
||||||
ReleaseSubApi(codec);
|
|
||||||
ReleaseSubApi(file);
|
|
||||||
ReleaseSubApi(netw);
|
|
||||||
ReleaseSubApi(apm);
|
|
||||||
ReleaseSubApi(volume);
|
|
||||||
ReleaseSubApi(hardware);
|
|
||||||
ReleaseSubApi(rtp);
|
|
||||||
webrtc::VoiceEngine* ve_instance = ve;
|
|
||||||
CHECK(webrtc::VoiceEngine::Delete(ve_instance), "VoE failed to be deleted");
|
|
||||||
}
|
|
||||||
|
|
||||||
int CreateChannel() {
|
|
||||||
int channel = base->CreateChannel();
|
|
||||||
if (channel == -1) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
CreateTransport(channel);
|
|
||||||
return channel;
|
|
||||||
}
|
|
||||||
|
|
||||||
int DeleteChannel(int channel) {
|
|
||||||
if (base->DeleteChannel(channel) != 0) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
DeleteTransport(channel);
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
webrtc::test::VoiceChannelTransport* GetTransport(int channel) {
|
|
||||||
ChannelTransports::iterator found = channel_transports_.find(channel);
|
|
||||||
if (found == channel_transports_.end()) {
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
return found->second;
|
|
||||||
}
|
|
||||||
|
|
||||||
webrtc::VoiceEngine* const ve;
|
|
||||||
webrtc::VoEBase* const base;
|
|
||||||
webrtc::VoECodec* const codec;
|
|
||||||
webrtc::VoEFile* const file;
|
|
||||||
webrtc::VoENetwork* const netw;
|
|
||||||
webrtc::VoEAudioProcessing* const apm;
|
|
||||||
webrtc::VoEVolumeControl* const volume;
|
|
||||||
webrtc::VoEHardware* const hardware;
|
|
||||||
webrtc::VoERTP_RTCP* const rtp;
|
|
||||||
|
|
||||||
private:
|
|
||||||
// Voice engine no longer provides a socket implementation. There is,
|
|
||||||
// however, a socket implementation in webrtc::test.
|
|
||||||
typedef std::map<int, webrtc::test::VoiceChannelTransport*>
|
|
||||||
ChannelTransports;
|
|
||||||
|
|
||||||
void CreateTransport(int channel) {
|
|
||||||
CHECK(GetTransport(channel) == NULL,
|
|
||||||
"Transport already created for VoE channel, inconsistent state");
|
|
||||||
channel_transports_[channel] =
|
|
||||||
new webrtc::test::VoiceChannelTransport(netw, channel);
|
|
||||||
}
|
|
||||||
void DeleteTransport(int channel) {
|
|
||||||
CHECK(GetTransport(channel) != NULL,
|
|
||||||
"VoE channel missing transport, inconsistent state");
|
|
||||||
delete channel_transports_[channel];
|
|
||||||
channel_transports_.erase(channel);
|
|
||||||
}
|
|
||||||
|
|
||||||
ChannelTransports channel_transports_;
|
|
||||||
};
|
|
||||||
|
|
||||||
webrtc::CodecInst* GetCodecInst(JNIEnv* jni, jobject j_codec) {
|
|
||||||
jclass j_codec_class = jni->GetObjectClass(j_codec);
|
|
||||||
jfieldID native_codec_id =
|
|
||||||
jni->GetFieldID(j_codec_class, "nativeCodecInst", "J");
|
|
||||||
jlong j_p = jni->GetLongField(j_codec, native_codec_id);
|
|
||||||
return reinterpret_cast<webrtc::CodecInst*>(j_p);
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace
|
|
||||||
|
|
||||||
namespace webrtc_examples {
|
|
||||||
|
|
||||||
void SetVoeDeviceObjects(JavaVM* vm) {
|
|
||||||
CHECK(vm, "Trying to register NULL vm");
|
|
||||||
g_vm = vm;
|
|
||||||
webrtc::AttachThreadScoped ats(g_vm);
|
|
||||||
JNIEnv* jni = ats.env();
|
|
||||||
g_class_reference_holder = new ClassReferenceHolder(
|
|
||||||
jni, g_classes, arraysize(g_classes));
|
|
||||||
}
|
|
||||||
|
|
||||||
void ClearVoeDeviceObjects() {
|
|
||||||
CHECK(g_vm, "Clearing vm without it being set");
|
|
||||||
{
|
|
||||||
webrtc::AttachThreadScoped ats(g_vm);
|
|
||||||
g_class_reference_holder->FreeReferences(ats.env());
|
|
||||||
}
|
|
||||||
g_vm = NULL;
|
|
||||||
delete g_class_reference_holder;
|
|
||||||
g_class_reference_holder = NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace webrtc_examples
|
|
||||||
|
|
||||||
VoiceEngineData* GetVoiceEngineData(JNIEnv* jni, jobject j_voe) {
|
|
||||||
jclass j_voe_class = jni->GetObjectClass(j_voe);
|
|
||||||
jfieldID native_voe_id =
|
|
||||||
jni->GetFieldID(j_voe_class, "nativeVoiceEngine", "J");
|
|
||||||
jlong j_p = jni->GetLongField(j_voe, native_voe_id);
|
|
||||||
return reinterpret_cast<VoiceEngineData*>(j_p);
|
|
||||||
}
|
|
||||||
|
|
||||||
webrtc::VoiceEngine* GetVoiceEngine(JNIEnv* jni, jobject j_voe) {
|
|
||||||
return GetVoiceEngineData(jni, j_voe)->ve;
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jlong, VoiceEngine_create)(JNIEnv* jni, jclass) {
|
|
||||||
VoiceEngineData* voe_data = new VoiceEngineData();
|
|
||||||
return jlongFromPointer(voe_data);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(void, VoiceEngine_dispose)(JNIEnv* jni, jobject j_voe) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
delete voe_data;
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_init)(JNIEnv* jni, jobject j_voe) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->base->Init();
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_createChannel)(JNIEnv* jni, jobject j_voe) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->CreateChannel();
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_deleteChannel)(JNIEnv* jni, jobject j_voe,
|
|
||||||
jint channel) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->DeleteChannel(channel);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_setLocalReceiver)(JNIEnv* jni, jobject j_voe,
|
|
||||||
jint channel, jint port) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
webrtc::test::VoiceChannelTransport* transport =
|
|
||||||
voe_data->GetTransport(channel);
|
|
||||||
return transport->SetLocalReceiver(port);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_setSendDestination)(JNIEnv* jni, jobject j_voe,
|
|
||||||
jint channel, jint port,
|
|
||||||
jstring j_addr) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
std::string addr = JavaToStdString(jni, j_addr);
|
|
||||||
webrtc::test::VoiceChannelTransport* transport =
|
|
||||||
voe_data->GetTransport(channel);
|
|
||||||
return transport->SetSendDestination(addr.c_str(), port);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_startListen)(JNIEnv* jni, jobject j_voe, jint channel) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->base->StartReceive(channel);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_startPlayout)(JNIEnv* jni, jobject j_voe, jint channel) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->base->StartPlayout(channel);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_startSend)(JNIEnv* jni, jobject j_voe, jint channel) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->base->StartSend(channel);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_stopListen)(JNIEnv* jni, jobject j_voe, jint channel) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->base->StartReceive(channel);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_stopPlayout)(JNIEnv* jni, jobject j_voe, jint channel) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->base->StopPlayout(channel);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_stopSend)(JNIEnv* jni, jobject j_voe, jint channel) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->base->StopSend(channel);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_setSpeakerVolume)(JNIEnv* jni, jobject j_voe,
|
|
||||||
jint level) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->volume->SetSpeakerVolume(level);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_startPlayingFileLocally)(JNIEnv* jni, jobject j_voe,
|
|
||||||
jint channel,
|
|
||||||
jstring j_filename,
|
|
||||||
jboolean loop) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
std::string filename = JavaToStdString(jni, j_filename);
|
|
||||||
return voe_data->file->StartPlayingFileLocally(channel,
|
|
||||||
filename.c_str(),
|
|
||||||
loop);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_stopPlayingFileLocally)(JNIEnv* jni, jobject j_voe,
|
|
||||||
jint channel) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->file->StopPlayingFileLocally(channel);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_startPlayingFileAsMicrophone)(JNIEnv* jni, jobject j_voe,
|
|
||||||
jint channel,
|
|
||||||
jstring j_filename,
|
|
||||||
jboolean loop) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
std::string filename = JavaToStdString(jni, j_filename);
|
|
||||||
return voe_data->file->StartPlayingFileAsMicrophone(channel,
|
|
||||||
filename.c_str(),
|
|
||||||
loop);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_stopPlayingFileAsMicrophone)(JNIEnv* jni, jobject j_voe,
|
|
||||||
jint channel) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->file->StopPlayingFileAsMicrophone(channel);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_numOfCodecs)(JNIEnv* jni, jobject j_voe) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->codec->NumOfCodecs();
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jobject, VoiceEngine_getCodec)(JNIEnv* jni, jobject j_voe, jint index) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
webrtc::CodecInst* codec = new webrtc::CodecInst();
|
|
||||||
CHECK(voe_data->codec->GetCodec(index, *codec) == 0,
|
|
||||||
"getCodec must be called with valid index");
|
|
||||||
jclass j_codec_class = GetClass(jni, "org/webrtc/webrtcdemo/CodecInst");
|
|
||||||
jmethodID j_codec_ctor = GetMethodID(jni, j_codec_class, "<init>", "(J)V");
|
|
||||||
jobject j_codec =
|
|
||||||
jni->NewObject(j_codec_class, j_codec_ctor, jlongFromPointer(codec));
|
|
||||||
CHECK_JNI_EXCEPTION(jni, "error during NewObject");
|
|
||||||
return j_codec;
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_setSendCodec)(JNIEnv* jni, jobject j_voe, jint channel,
|
|
||||||
jobject j_codec) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
webrtc::CodecInst* inst = GetCodecInst(jni, j_codec);
|
|
||||||
return voe_data->codec->SetSendCodec(channel, *inst);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_setEcStatus)(JNIEnv* jni, jobject j_voe, jboolean enable,
|
|
||||||
jint ec_mode) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->apm->SetEcStatus(enable,
|
|
||||||
static_cast<webrtc::EcModes>(ec_mode));
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_setAecmMode)(JNIEnv* jni, jobject j_voe, jint aecm_mode,
|
|
||||||
jboolean cng) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->apm->SetAecmMode(static_cast<webrtc::AecmModes>(aecm_mode),
|
|
||||||
cng);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_setAgcStatus)(JNIEnv* jni, jobject j_voe,
|
|
||||||
jboolean enable, jint agc_mode) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->apm->SetAgcStatus(enable,
|
|
||||||
static_cast<webrtc::AgcModes>(agc_mode));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns the native AgcConfig object associated with the Java object
|
|
||||||
// |j_codec|.
|
|
||||||
void GetNativeAgcConfig(JNIEnv* jni, jobject j_codec,
|
|
||||||
webrtc::AgcConfig* agc_config) {
|
|
||||||
jclass j_codec_class = jni->GetObjectClass(j_codec);
|
|
||||||
jfieldID dBOv_id = jni->GetFieldID(j_codec_class, "targetLevelDbOv", "I");
|
|
||||||
agc_config->targetLeveldBOv = jni->GetIntField(j_codec, dBOv_id);
|
|
||||||
jfieldID gain_id =
|
|
||||||
jni->GetFieldID(j_codec_class, "digitalCompressionGaindB", "I");
|
|
||||||
agc_config->digitalCompressionGaindB = jni->GetIntField(j_codec, gain_id);
|
|
||||||
jfieldID limiter_id = jni->GetFieldID(j_codec_class, "limiterEnable", "Z");
|
|
||||||
agc_config->limiterEnable = jni->GetBooleanField(j_codec, limiter_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_setAgcConfig)(JNIEnv* jni, jobject j_voe,
|
|
||||||
jobject j_config) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
webrtc::AgcConfig config;
|
|
||||||
GetNativeAgcConfig(jni, j_config, &config);
|
|
||||||
return voe_data->apm->SetAgcConfig(config);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_setNsStatus)(JNIEnv* jni, jobject j_voe, jboolean enable,
|
|
||||||
jint ns_mode) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->apm->SetNsStatus(enable,
|
|
||||||
static_cast<webrtc::NsModes>(ns_mode));
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_startDebugRecording)(JNIEnv* jni, jobject j_voe,
|
|
||||||
jstring j_filename) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
std::string filename = JavaToStdString(jni, j_filename);
|
|
||||||
return voe_data->apm->StartDebugRecording(filename.c_str());
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, VoiceEngine_stopDebugRecording)(JNIEnv* jni, jobject j_voe) {
|
|
||||||
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
|
|
||||||
return voe_data->apm->StopDebugRecording();
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(void, CodecInst_dispose)(JNIEnv* jni, jobject j_codec) {
|
|
||||||
delete GetCodecInst(jni, j_codec);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, CodecInst_plType)(JNIEnv* jni, jobject j_codec) {
|
|
||||||
return GetCodecInst(jni, j_codec)->pltype;
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jstring, CodecInst_name)(JNIEnv* jni, jobject j_codec) {
|
|
||||||
return jni->NewStringUTF(GetCodecInst(jni, j_codec)->plname);
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, CodecInst_plFrequency)(JNIEnv* jni, jobject j_codec) {
|
|
||||||
return GetCodecInst(jni, j_codec)->plfreq;
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, CodecInst_pacSize)(JNIEnv* jni, jobject j_codec) {
|
|
||||||
return GetCodecInst(jni, j_codec)->pacsize;
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, CodecInst_channels)(JNIEnv* jni, jobject j_codec) {
|
|
||||||
return GetCodecInst(jni, j_codec)->channels;
|
|
||||||
}
|
|
||||||
|
|
||||||
JOWW(jint, CodecInst_rate)(JNIEnv* jni, jobject j_codec) {
|
|
||||||
return GetCodecInst(jni, j_codec)->rate;
|
|
||||||
}
|
|
||||||
@ -1,31 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
#ifndef WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_
|
|
||||||
#define WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_
|
|
||||||
|
|
||||||
#include <jni.h>
|
|
||||||
|
|
||||||
namespace webrtc {
|
|
||||||
|
|
||||||
class VoiceEngine;
|
|
||||||
|
|
||||||
} // namespace webrtc
|
|
||||||
|
|
||||||
namespace webrtc_examples {
|
|
||||||
|
|
||||||
void SetVoeDeviceObjects(JavaVM* vm);
|
|
||||||
void ClearVoeDeviceObjects();
|
|
||||||
|
|
||||||
} // namespace webrtc_examples
|
|
||||||
|
|
||||||
webrtc::VoiceEngine* GetVoiceEngine(JNIEnv* jni, jobject j_voe);
|
|
||||||
|
|
||||||
#endif // WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_
|
|
||||||
@ -1,14 +0,0 @@
|
|||||||
# This file is automatically generated by Android Tools.
|
|
||||||
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
|
|
||||||
#
|
|
||||||
# This file must be checked in Version Control Systems.
|
|
||||||
#
|
|
||||||
# To customize properties used by the Ant build system use,
|
|
||||||
# "ant.properties", and override values to adapt the script to your
|
|
||||||
# project structure.
|
|
||||||
|
|
||||||
# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
|
|
||||||
#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
|
|
||||||
|
|
||||||
# Project target.
|
|
||||||
target=android-23
|
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 3.2 KiB |
@ -1,80 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
|
||||||
android:layout_width="fill_parent"
|
|
||||||
android:layout_height="fill_parent"
|
|
||||||
android:orientation="vertical">
|
|
||||||
<TextView android:layout_width="wrap_content"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:textStyle="bold"
|
|
||||||
android:textSize="24dip"
|
|
||||||
android:text="Audio Settings">
|
|
||||||
</TextView>
|
|
||||||
<TextView android:layout_height="wrap_content"
|
|
||||||
android:layout_gravity="bottom"
|
|
||||||
android:layout_width="wrap_content"
|
|
||||||
android:text="@string/codecType">
|
|
||||||
</TextView>
|
|
||||||
<Spinner android:id="@+id/spAudioCodecType"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:layout_width="fill_parent">
|
|
||||||
</Spinner>
|
|
||||||
<LinearLayout android:layout_height="wrap_content"
|
|
||||||
android:layout_width="fill_parent">
|
|
||||||
<TextView android:layout_width="wrap_content"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:text="@string/aTxPort">
|
|
||||||
</TextView>
|
|
||||||
<EditText android:id="@+id/etATxPort"
|
|
||||||
android:layout_width="wrap_content"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:imeOptions="actionDone"
|
|
||||||
android:inputType="number">
|
|
||||||
</EditText>
|
|
||||||
<TextView android:layout_width="wrap_content"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:text="@string/aRxPort">
|
|
||||||
</TextView>
|
|
||||||
<EditText android:id="@+id/etARxPort"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:layout_width="wrap_content"
|
|
||||||
android:imeOptions="actionDone"
|
|
||||||
android:inputType="number">
|
|
||||||
</EditText>
|
|
||||||
</LinearLayout>
|
|
||||||
<LinearLayout android:layout_height="wrap_content"
|
|
||||||
android:layout_width="fill_parent">
|
|
||||||
<CheckBox android:id="@+id/cbAecm"
|
|
||||||
android:layout_width="wrap_content"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:text="@string/aecm">
|
|
||||||
</CheckBox>
|
|
||||||
<CheckBox android:id="@+id/cbNoiseSuppression"
|
|
||||||
android:layout_width="wrap_content"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:text="@string/noiseSuppression">
|
|
||||||
</CheckBox>
|
|
||||||
<CheckBox android:id="@+id/cbAutoGainControl"
|
|
||||||
android:layout_width="wrap_content"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:text="@string/autoGainControl">
|
|
||||||
</CheckBox>
|
|
||||||
</LinearLayout>
|
|
||||||
<LinearLayout android:layout_height="wrap_content"
|
|
||||||
android:layout_width="fill_parent">
|
|
||||||
<CheckBox android:id="@+id/cbSpeaker"
|
|
||||||
android:layout_width="wrap_content"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:text="@string/speaker">
|
|
||||||
</CheckBox>
|
|
||||||
<CheckBox android:id="@+id/cbDebugRecording"
|
|
||||||
android:layout_width="wrap_content"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:text="@string/debugRecording">
|
|
||||||
</CheckBox>
|
|
||||||
<CheckBox android:id="@+id/cbAudioRTPDump"
|
|
||||||
android:layout_width="wrap_content"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:text="@string/rtpDump">
|
|
||||||
</CheckBox>
|
|
||||||
</LinearLayout>
|
|
||||||
</LinearLayout>
|
|
||||||
@ -1,17 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
|
||||||
android:layout_width="fill_parent"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:orientation="vertical"
|
|
||||||
android:padding="3dip">
|
|
||||||
<TextView android:id="@+id/spinner_row"
|
|
||||||
android:layout_toRightOf="@+id/image"
|
|
||||||
android:padding="3dip"
|
|
||||||
android:layout_marginTop="2dip"
|
|
||||||
android:textColor="#FFF"
|
|
||||||
android:textStyle="bold"
|
|
||||||
android:text="description"
|
|
||||||
android:layout_marginLeft="5dip"
|
|
||||||
android:layout_width="wrap_content"
|
|
||||||
android:layout_height="wrap_content"/>
|
|
||||||
</RelativeLayout>
|
|
||||||
@ -1,26 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<LinearLayout
|
|
||||||
xmlns:android="http://schemas.android.com/apk/res/android"
|
|
||||||
android:orientation="horizontal"
|
|
||||||
android:layout_width="fill_parent"
|
|
||||||
android:layout_height="fill_parent">
|
|
||||||
<LinearLayout
|
|
||||||
android:orientation="vertical"
|
|
||||||
android:layout_width="120dip"
|
|
||||||
android:layout_height="fill_parent">
|
|
||||||
<TextView android:id="@+id/tvStats"
|
|
||||||
android:layout_width="fill_parent"
|
|
||||||
android:layout_height="60dip"
|
|
||||||
android:textSize="6sp"
|
|
||||||
android:text=""/>
|
|
||||||
<Button android:id="@+id/btStats"
|
|
||||||
android:layout_width="fill_parent"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:layout_gravity="bottom"
|
|
||||||
android:text="@string/stats"/>
|
|
||||||
<Button android:id="@+id/btStartStopCall"
|
|
||||||
android:layout_width="fill_parent"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:layout_gravity="bottom"/>
|
|
||||||
</LinearLayout>
|
|
||||||
</LinearLayout>
|
|
||||||
@ -1,36 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
|
|
||||||
android:layout_width="fill_parent"
|
|
||||||
android:layout_height="fill_parent"
|
|
||||||
android:layout_gravity="right"
|
|
||||||
android:orientation="vertical">
|
|
||||||
<TextView android:layout_width="wrap_content"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:textStyle="bold"
|
|
||||||
android:textSize="24dip"
|
|
||||||
android:text="@string/gSettings">
|
|
||||||
</TextView>
|
|
||||||
<LinearLayout android:orientation="horizontal"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:layout_width="fill_parent">
|
|
||||||
<CheckBox android:id="@+id/cbAudio"
|
|
||||||
android:layout_width="wrap_content"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:text="@string/enableAudio">
|
|
||||||
</CheckBox>
|
|
||||||
<CheckBox android:id="@+id/cbLoopback"
|
|
||||||
android:layout_width="wrap_content"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:text="@string/loopback">
|
|
||||||
</CheckBox>
|
|
||||||
</LinearLayout>
|
|
||||||
<TextView android:layout_width="wrap_content"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:text="@string/remoteIp">
|
|
||||||
</TextView>
|
|
||||||
<EditText android:id="@+id/etRemoteIp"
|
|
||||||
android:layout_height="wrap_content"
|
|
||||||
android:layout_width="fill_parent"
|
|
||||||
android:imeOptions="actionDone">
|
|
||||||
</EditText>
|
|
||||||
</LinearLayout>
|
|
||||||
@ -1,5 +0,0 @@
|
|||||||
<menu xmlns:android="http://schemas.android.com/apk/res/android" >
|
|
||||||
<item android:id="@+id/action_exit"
|
|
||||||
android:icon="@drawable/logo"
|
|
||||||
android:title="Exit"/>
|
|
||||||
</menu>
|
|
||||||
@ -1,13 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<resources>
|
|
||||||
<bool name="apm_debug_enabled_default">false</bool>
|
|
||||||
<bool name="audio_enabled_default">true</bool>
|
|
||||||
<bool name="loopback_enabled_default">true</bool>
|
|
||||||
<bool name="nack_enabled_default">true</bool>
|
|
||||||
<bool name="opengl_enabled_default">true</bool>
|
|
||||||
<bool name="speaker_enabled_default">false</bool>
|
|
||||||
<bool name="stats_enabled_default">true</bool>
|
|
||||||
<bool name="trace_enabled_default">true</bool>
|
|
||||||
<bool name="video_receive_enabled_default">true</bool>
|
|
||||||
<bool name="video_send_enabled_default">true</bool>
|
|
||||||
</resources>
|
|
||||||
@ -1,13 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<resources>
|
|
||||||
<integer name="aRxPortDefault">11113</integer>
|
|
||||||
<integer name="aTxPortDefault">11113</integer>
|
|
||||||
<integer name="openGl">0</integer>
|
|
||||||
<integer name="surfaceView">1</integer>
|
|
||||||
<integer name="mediaCodec">2</integer>
|
|
||||||
<integer name="defaultView">0</integer>
|
|
||||||
<integer name="call_restart_periodicity_ms">0</integer>
|
|
||||||
<integer name="video_codec_default">0</integer>
|
|
||||||
<integer name="vRxPortDefault">11111</integer>
|
|
||||||
<integer name="vTxPortDefault">11111</integer>
|
|
||||||
</resources>
|
|
||||||
@ -1,41 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<resources>
|
|
||||||
<string name="aecm">AECM</string>
|
|
||||||
<string name="appName">WebRTC Engine Demo</string>
|
|
||||||
<string name="aRxPort">Audio Rx Port</string>
|
|
||||||
<string name="aTxPort">Audio Tx Port</string>
|
|
||||||
<string name="autoGainControl">AGC</string>
|
|
||||||
<string name="backCamera">SwitchToBack</string>
|
|
||||||
<string name="codecSize">Codec Size</string>
|
|
||||||
<string name="codecType">Codec Type</string>
|
|
||||||
<string name="debugRecording">APMRecord</string>
|
|
||||||
<string name="demoTitle">Video Engine Android Demo</string>
|
|
||||||
<string name="enableVideoReceive">Video Receive</string>
|
|
||||||
<string name="enableVideoSend">Video Send</string>
|
|
||||||
<string name="enableAudio">Audio</string>
|
|
||||||
<string name="error">Error</string>
|
|
||||||
<string name="errorCamera">Camera Error</string>
|
|
||||||
<string name="exit">Exit</string>
|
|
||||||
<string name="frontCamera">SwitchToFront</string>
|
|
||||||
<string name="gSettings">Global Settings</string>
|
|
||||||
<string name="loopback">Loopback</string>
|
|
||||||
<string name="loopbackIp">127.0.0.1</string>
|
|
||||||
<string name="nack">NACK</string>
|
|
||||||
<string name="noiseSuppression">NS</string>
|
|
||||||
<string name="remoteIp">Remote IP address</string>
|
|
||||||
<string name="rtpDump">rtpdump</string>
|
|
||||||
<string name="speaker">Speaker</string>
|
|
||||||
<string name="startBoth">Start Both</string>
|
|
||||||
<string name="startCall">StartCall</string>
|
|
||||||
<string name="startListen">Start Listen</string>
|
|
||||||
<string name="startSend">Start Send</string>
|
|
||||||
<string name="stats">Stats</string>
|
|
||||||
<string name="statsOn">Stats on</string>
|
|
||||||
<string name="statsOff">Stats off</string>
|
|
||||||
<string name="stopCall">StopCall</string>
|
|
||||||
<string name="surfaceView">SurfaceView</string>
|
|
||||||
<string name="tag">WEBRTC</string>
|
|
||||||
<string name="vRxPort">Video Rx Port</string>
|
|
||||||
<string name="vSettings">Video Settings</string>
|
|
||||||
<string name="vTxPort">Video Tx Port</string>
|
|
||||||
</resources>
|
|
||||||
@ -1,156 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.webrtc.webrtcdemo;
|
|
||||||
|
|
||||||
import android.app.Activity;
|
|
||||||
import android.app.Fragment;
|
|
||||||
import android.os.Bundle;
|
|
||||||
import android.util.Log;
|
|
||||||
import android.view.LayoutInflater;
|
|
||||||
import android.view.View;
|
|
||||||
import android.view.ViewGroup;
|
|
||||||
import android.widget.AdapterView;
|
|
||||||
import android.widget.AdapterView.OnItemSelectedListener;
|
|
||||||
import android.widget.CheckBox;
|
|
||||||
import android.widget.EditText;
|
|
||||||
import android.widget.Spinner;
|
|
||||||
import android.widget.TextView;
|
|
||||||
import java.lang.Integer;
|
|
||||||
|
|
||||||
public class AudioMenuFragment extends Fragment {
|
|
||||||
|
|
||||||
private String TAG;
|
|
||||||
private MenuStateProvider stateProvider;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public View onCreateView(LayoutInflater inflater, ViewGroup container,
|
|
||||||
Bundle savedInstanceState) {
|
|
||||||
View v = inflater.inflate(R.layout.audiomenu, container, false);
|
|
||||||
|
|
||||||
TAG = getResources().getString(R.string.tag);
|
|
||||||
|
|
||||||
String[] audioCodecsStrings = getEngine().audioCodecsAsString();
|
|
||||||
Spinner spAudioCodecType = (Spinner) v.findViewById(R.id.spAudioCodecType);
|
|
||||||
spAudioCodecType.setAdapter(new SpinnerAdapter(getActivity(),
|
|
||||||
R.layout.dropdownitems,
|
|
||||||
audioCodecsStrings,
|
|
||||||
inflater));
|
|
||||||
spAudioCodecType.setSelection(getEngine().audioCodecIndex());
|
|
||||||
spAudioCodecType.setOnItemSelectedListener(new OnItemSelectedListener() {
|
|
||||||
public void onItemSelected(AdapterView<?> adapterView, View view,
|
|
||||||
int position, long id) {
|
|
||||||
getEngine().setAudioCodec(position);
|
|
||||||
}
|
|
||||||
public void onNothingSelected(AdapterView<?> arg0) {
|
|
||||||
Log.d(TAG, "No setting selected");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
EditText etATxPort = (EditText) v.findViewById(R.id.etATxPort);
|
|
||||||
etATxPort.setText(Integer.toString(getEngine().audioTxPort()));
|
|
||||||
etATxPort.setOnClickListener(new View.OnClickListener() {
|
|
||||||
public void onClick(View editText) {
|
|
||||||
EditText etATxPort = (EditText) editText;
|
|
||||||
getEngine()
|
|
||||||
.setAudioTxPort(Integer.parseInt(etATxPort.getText().toString()));
|
|
||||||
etATxPort.setText(Integer.toString(getEngine().audioTxPort()));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
EditText etARxPort = (EditText) v.findViewById(R.id.etARxPort);
|
|
||||||
etARxPort.setText(Integer.toString(getEngine().audioRxPort()));
|
|
||||||
etARxPort.setOnClickListener(new View.OnClickListener() {
|
|
||||||
public void onClick(View editText) {
|
|
||||||
EditText etARxPort = (EditText) editText;
|
|
||||||
getEngine()
|
|
||||||
.setAudioRxPort(Integer.parseInt(etARxPort.getText().toString()));
|
|
||||||
etARxPort.setText(Integer.toString(getEngine().audioRxPort()));
|
|
||||||
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
CheckBox cbEnableAecm = (CheckBox) v.findViewById(R.id.cbAecm);
|
|
||||||
cbEnableAecm.setChecked(getEngine().aecmEnabled());
|
|
||||||
cbEnableAecm.setOnClickListener(new View.OnClickListener() {
|
|
||||||
public void onClick(View checkBox) {
|
|
||||||
CheckBox cbEnableAecm = (CheckBox) checkBox;
|
|
||||||
getEngine().setEc(cbEnableAecm.isChecked());
|
|
||||||
cbEnableAecm.setChecked(getEngine().aecmEnabled());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
CheckBox cbEnableNs = (CheckBox) v.findViewById(R.id.cbNoiseSuppression);
|
|
||||||
cbEnableNs.setChecked(getEngine().nsEnabled());
|
|
||||||
cbEnableNs.setOnClickListener(new View.OnClickListener() {
|
|
||||||
public void onClick(View checkBox) {
|
|
||||||
CheckBox cbEnableNs = (CheckBox) checkBox;
|
|
||||||
getEngine().setNs(cbEnableNs.isChecked());
|
|
||||||
cbEnableNs.setChecked(getEngine().nsEnabled());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
CheckBox cbEnableAgc = (CheckBox) v.findViewById(R.id.cbAutoGainControl);
|
|
||||||
cbEnableAgc.setChecked(getEngine().agcEnabled());
|
|
||||||
cbEnableAgc.setOnClickListener(new View.OnClickListener() {
|
|
||||||
public void onClick(View checkBox) {
|
|
||||||
CheckBox cbEnableAgc = (CheckBox) checkBox;
|
|
||||||
getEngine().setAgc(cbEnableAgc.isChecked());
|
|
||||||
cbEnableAgc.setChecked(getEngine().agcEnabled());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
CheckBox cbEnableSpeaker = (CheckBox) v.findViewById(R.id.cbSpeaker);
|
|
||||||
cbEnableSpeaker.setChecked(getEngine().speakerEnabled());
|
|
||||||
cbEnableSpeaker.setOnClickListener(new View.OnClickListener() {
|
|
||||||
public void onClick(View checkBox) {
|
|
||||||
CheckBox cbEnableSpeaker = (CheckBox) checkBox;
|
|
||||||
getEngine().setSpeaker(cbEnableSpeaker.isChecked());
|
|
||||||
cbEnableSpeaker.setChecked(getEngine().speakerEnabled());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
CheckBox cbEnableDebugAPM =
|
|
||||||
(CheckBox) v.findViewById(R.id.cbDebugRecording);
|
|
||||||
cbEnableDebugAPM.setChecked(getEngine().apmRecord());
|
|
||||||
cbEnableDebugAPM.setOnClickListener(new View.OnClickListener() {
|
|
||||||
public void onClick(View checkBox) {
|
|
||||||
CheckBox cbEnableDebugAPM = (CheckBox) checkBox;
|
|
||||||
getEngine().setDebuging(cbEnableDebugAPM.isChecked());
|
|
||||||
cbEnableDebugAPM.setChecked(getEngine().apmRecord());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
CheckBox cbEnableAudioRTPDump =
|
|
||||||
(CheckBox) v.findViewById(R.id.cbAudioRTPDump);
|
|
||||||
cbEnableAudioRTPDump.setChecked(getEngine().audioRtpDump());
|
|
||||||
cbEnableAudioRTPDump.setOnClickListener(new View.OnClickListener() {
|
|
||||||
public void onClick(View checkBox) {
|
|
||||||
CheckBox cbEnableAudioRTPDump = (CheckBox) checkBox;
|
|
||||||
getEngine().setIncomingVoeRtpDump(cbEnableAudioRTPDump.isChecked());
|
|
||||||
cbEnableAudioRTPDump.setChecked(getEngine().audioRtpDump());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return v;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onAttach(Activity activity) {
|
|
||||||
super.onAttach(activity);
|
|
||||||
|
|
||||||
// This makes sure that the container activity has implemented
|
|
||||||
// the callback interface. If not, it throws an exception.
|
|
||||||
try {
|
|
||||||
stateProvider = (MenuStateProvider) activity;
|
|
||||||
} catch (ClassCastException e) {
|
|
||||||
throw new ClassCastException(activity +
|
|
||||||
" must implement MenuStateProvider");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private MediaEngine getEngine() {
|
|
||||||
return stateProvider.getEngine();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
@ -1,39 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.webrtc.webrtcdemo;
|
|
||||||
|
|
||||||
public class CodecInst {
|
|
||||||
private final long nativeCodecInst;
|
|
||||||
|
|
||||||
// CodecInst can only be created from the native layer.
|
|
||||||
private CodecInst(long nativeCodecInst) {
|
|
||||||
this.nativeCodecInst = nativeCodecInst;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String toString() {
|
|
||||||
return name() + " " +
|
|
||||||
"PlType: " + plType() + " " +
|
|
||||||
"PlFreq: " + plFrequency() + " " +
|
|
||||||
"Size: " + pacSize() + " " +
|
|
||||||
"Channels: " + channels() + " " +
|
|
||||||
"Rate: " + rate();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Dispose must be called before all references to CodecInst are lost as it
|
|
||||||
// will free memory allocated in the native layer.
|
|
||||||
public native void dispose();
|
|
||||||
public native int plType();
|
|
||||||
public native String name();
|
|
||||||
public native int plFrequency();
|
|
||||||
public native int pacSize();
|
|
||||||
public native int channels();
|
|
||||||
public native int rate();
|
|
||||||
}
|
|
||||||
@ -1,123 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.webrtc.webrtcdemo;
|
|
||||||
|
|
||||||
import android.app.Activity;
|
|
||||||
import android.app.Fragment;
|
|
||||||
import android.os.Bundle;
|
|
||||||
import android.util.Log;
|
|
||||||
import android.view.LayoutInflater;
|
|
||||||
import android.view.SurfaceView;
|
|
||||||
import android.view.View;
|
|
||||||
import android.view.ViewGroup;
|
|
||||||
import android.widget.Button;
|
|
||||||
import android.widget.LinearLayout;
|
|
||||||
import android.widget.TextView;
|
|
||||||
|
|
||||||
public class MainMenuFragment extends Fragment implements MediaEngineObserver {
|
|
||||||
|
|
||||||
private String TAG;
|
|
||||||
private MenuStateProvider stateProvider;
|
|
||||||
|
|
||||||
private Button btStartStopCall;
|
|
||||||
private TextView tvStats;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public View onCreateView(LayoutInflater inflater, ViewGroup container,
|
|
||||||
Bundle savedInstanceState) {
|
|
||||||
View v = inflater.inflate(R.layout.mainmenu, container, false);
|
|
||||||
|
|
||||||
TAG = getResources().getString(R.string.tag);
|
|
||||||
|
|
||||||
Button btStats = (Button) v.findViewById(R.id.btStats);
|
|
||||||
boolean stats = getResources().getBoolean(R.bool.stats_enabled_default);
|
|
||||||
enableStats(btStats, stats);
|
|
||||||
btStats.setOnClickListener(new View.OnClickListener() {
|
|
||||||
public void onClick(View button) {
|
|
||||||
boolean turnOnStats = ((Button) button).getText().equals(
|
|
||||||
getResources().getString(R.string.statsOn));
|
|
||||||
enableStats((Button) button, turnOnStats);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
tvStats = (TextView) v.findViewById(R.id.tvStats);
|
|
||||||
|
|
||||||
btStartStopCall = (Button) v.findViewById(R.id.btStartStopCall);
|
|
||||||
btStartStopCall.setText(getEngine().isRunning() ?
|
|
||||||
R.string.stopCall :
|
|
||||||
R.string.startCall);
|
|
||||||
btStartStopCall.setOnClickListener(new View.OnClickListener() {
|
|
||||||
public void onClick(View button) {
|
|
||||||
toggleStart();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return v;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onAttach(Activity activity) {
|
|
||||||
super.onAttach(activity);
|
|
||||||
|
|
||||||
// This makes sure that the container activity has implemented
|
|
||||||
// the callback interface. If not, it throws an exception.
|
|
||||||
try {
|
|
||||||
stateProvider = (MenuStateProvider) activity;
|
|
||||||
} catch (ClassCastException e) {
|
|
||||||
throw new ClassCastException(activity +
|
|
||||||
" must implement MenuStateProvider");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// tvStats need to be updated on the UI thread.
|
|
||||||
public void newStats(final String stats) {
|
|
||||||
getActivity().runOnUiThread(new Runnable() {
|
|
||||||
public void run() {
|
|
||||||
tvStats.setText(stats);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private MediaEngine getEngine() {
|
|
||||||
return stateProvider.getEngine();
|
|
||||||
}
|
|
||||||
|
|
||||||
private void enableStats(Button btStats, boolean enable) {
|
|
||||||
if (enable) {
|
|
||||||
getEngine().setObserver(this);
|
|
||||||
} else {
|
|
||||||
getEngine().setObserver(null);
|
|
||||||
// Clear old stats text by posting empty stats.
|
|
||||||
newStats("");
|
|
||||||
}
|
|
||||||
// If stats was true it was just turned on. This means that
|
|
||||||
// clicking the button again should turn off stats.
|
|
||||||
btStats.setText(enable ? R.string.statsOff : R.string.statsOn);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public void toggleStart() {
|
|
||||||
if (getEngine().isRunning()) {
|
|
||||||
stopAll();
|
|
||||||
} else {
|
|
||||||
startCall();
|
|
||||||
}
|
|
||||||
btStartStopCall.setText(getEngine().isRunning() ?
|
|
||||||
R.string.stopCall :
|
|
||||||
R.string.startCall);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void stopAll() {
|
|
||||||
getEngine().stop();
|
|
||||||
}
|
|
||||||
|
|
||||||
private void startCall() {
|
|
||||||
getEngine().start();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,321 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.webrtc.webrtcdemo;
|
|
||||||
|
|
||||||
import android.app.AlertDialog;
|
|
||||||
import android.content.BroadcastReceiver;
|
|
||||||
import android.content.Context;
|
|
||||||
import android.content.DialogInterface;
|
|
||||||
import android.content.Intent;
|
|
||||||
import android.content.IntentFilter;
|
|
||||||
import android.media.AudioManager;
|
|
||||||
import android.os.Environment;
|
|
||||||
import android.util.Log;
|
|
||||||
import android.view.OrientationEventListener;
|
|
||||||
import java.io.File;
|
|
||||||
|
|
||||||
public class MediaEngine {
|
|
||||||
private static final String LOG_DIR = "webrtc";
|
|
||||||
|
|
||||||
// Checks for and communicate failures to user (logcat and popup).
|
|
||||||
private void check(boolean value, String message) {
|
|
||||||
if (value) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
Log.e("WEBRTC-CHECK", message);
|
|
||||||
AlertDialog alertDialog = new AlertDialog.Builder(context).create();
|
|
||||||
alertDialog.setTitle("WebRTC Error");
|
|
||||||
alertDialog.setMessage(message);
|
|
||||||
alertDialog.setButton(DialogInterface.BUTTON_POSITIVE,
|
|
||||||
"OK",
|
|
||||||
new DialogInterface.OnClickListener() {
|
|
||||||
public void onClick(DialogInterface dialog, int which) {
|
|
||||||
dialog.dismiss();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
alertDialog.show();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Shared Audio/Video members.
|
|
||||||
private final Context context;
|
|
||||||
private String remoteIp;
|
|
||||||
private boolean enableTrace;
|
|
||||||
|
|
||||||
// Audio
|
|
||||||
private VoiceEngine voe;
|
|
||||||
private int audioChannel;
|
|
||||||
private boolean audioEnabled;
|
|
||||||
private boolean voeRunning;
|
|
||||||
private int audioCodecIndex;
|
|
||||||
private int audioTxPort;
|
|
||||||
private int audioRxPort;
|
|
||||||
|
|
||||||
private boolean speakerEnabled;
|
|
||||||
private boolean headsetPluggedIn;
|
|
||||||
private boolean enableAgc;
|
|
||||||
private boolean enableNs;
|
|
||||||
private boolean enableAecm;
|
|
||||||
|
|
||||||
private BroadcastReceiver headsetListener;
|
|
||||||
|
|
||||||
private boolean audioRtpDump;
|
|
||||||
private boolean apmRecord;
|
|
||||||
|
|
||||||
private int inFps;
|
|
||||||
private int inKbps;
|
|
||||||
private int outFps;
|
|
||||||
private int outKbps;
|
|
||||||
private int inWidth;
|
|
||||||
private int inHeight;
|
|
||||||
|
|
||||||
public MediaEngine(Context context) {
|
|
||||||
this.context = context;
|
|
||||||
voe = new VoiceEngine();
|
|
||||||
check(voe.init() == 0, "Failed voe Init");
|
|
||||||
audioChannel = voe.createChannel();
|
|
||||||
check(audioChannel >= 0, "Failed voe CreateChannel");
|
|
||||||
check(audioChannel >= 0, "Failed voe CreateChannel");
|
|
||||||
|
|
||||||
check(voe.setAecmMode(VoiceEngine.AecmModes.SPEAKERPHONE, false) == 0,
|
|
||||||
"VoE set Aecm speakerphone mode failed");
|
|
||||||
|
|
||||||
// Set audio mode to communication
|
|
||||||
AudioManager audioManager =
|
|
||||||
((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
|
|
||||||
audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
|
|
||||||
// Listen to headset being plugged in/out.
|
|
||||||
IntentFilter receiverFilter = new IntentFilter(Intent.ACTION_HEADSET_PLUG);
|
|
||||||
headsetListener = new BroadcastReceiver() {
|
|
||||||
@Override
|
|
||||||
public void onReceive(Context context, Intent intent) {
|
|
||||||
if (intent.getAction().compareTo(Intent.ACTION_HEADSET_PLUG) == 0) {
|
|
||||||
headsetPluggedIn = intent.getIntExtra("state", 0) == 1;
|
|
||||||
updateAudioOutput();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
context.registerReceiver(headsetListener, receiverFilter);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void dispose() {
|
|
||||||
check(!voeRunning && !voeRunning, "Engines must be stopped before dispose");
|
|
||||||
context.unregisterReceiver(headsetListener);
|
|
||||||
check(voe.deleteChannel(audioChannel) == 0, "VoE delete channel failed");
|
|
||||||
voe.dispose();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void start() {
|
|
||||||
if (audioEnabled) {
|
|
||||||
startVoE();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void stop() {
|
|
||||||
stopVoe();
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isRunning() {
|
|
||||||
return voeRunning;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setRemoteIp(String remoteIp) {
|
|
||||||
this.remoteIp = remoteIp;
|
|
||||||
UpdateSendDestination();
|
|
||||||
}
|
|
||||||
|
|
||||||
public String remoteIp() { return remoteIp; }
|
|
||||||
|
|
||||||
private String getDebugDirectory() {
|
|
||||||
// Should create a folder in /scard/|LOG_DIR|
|
|
||||||
return Environment.getExternalStorageDirectory().toString() + "/" +
|
|
||||||
LOG_DIR;
|
|
||||||
}
|
|
||||||
|
|
||||||
private boolean createDebugDirectory() {
|
|
||||||
File webrtc_dir = new File(getDebugDirectory());
|
|
||||||
if (!webrtc_dir.exists()) {
|
|
||||||
return webrtc_dir.mkdir();
|
|
||||||
}
|
|
||||||
return webrtc_dir.isDirectory();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void startVoE() {
|
|
||||||
check(!voeRunning, "VoE already started");
|
|
||||||
check(voe.startListen(audioChannel) == 0, "Failed StartListen");
|
|
||||||
check(voe.startPlayout(audioChannel) == 0, "VoE start playout failed");
|
|
||||||
check(voe.startSend(audioChannel) == 0, "VoE start send failed");
|
|
||||||
voeRunning = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void stopVoe() {
|
|
||||||
check(voeRunning, "VoE not started");
|
|
||||||
check(voe.stopSend(audioChannel) == 0, "VoE stop send failed");
|
|
||||||
check(voe.stopPlayout(audioChannel) == 0, "VoE stop playout failed");
|
|
||||||
check(voe.stopListen(audioChannel) == 0, "VoE stop listen failed");
|
|
||||||
voeRunning = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setAudio(boolean audioEnabled) {
|
|
||||||
this.audioEnabled = audioEnabled;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean audioEnabled() { return audioEnabled; }
|
|
||||||
|
|
||||||
public int audioCodecIndex() { return audioCodecIndex; }
|
|
||||||
|
|
||||||
public void setAudioCodec(int codecNumber) {
|
|
||||||
audioCodecIndex = codecNumber;
|
|
||||||
CodecInst codec = voe.getCodec(codecNumber);
|
|
||||||
check(voe.setSendCodec(audioChannel, codec) == 0, "Failed setSendCodec");
|
|
||||||
codec.dispose();
|
|
||||||
}
|
|
||||||
|
|
||||||
public String[] audioCodecsAsString() {
|
|
||||||
String[] retVal = new String[voe.numOfCodecs()];
|
|
||||||
for (int i = 0; i < voe.numOfCodecs(); ++i) {
|
|
||||||
CodecInst codec = voe.getCodec(i);
|
|
||||||
retVal[i] = codec.toString();
|
|
||||||
codec.dispose();
|
|
||||||
}
|
|
||||||
return retVal;
|
|
||||||
}
|
|
||||||
|
|
||||||
private CodecInst[] defaultAudioCodecs() {
|
|
||||||
CodecInst[] retVal = new CodecInst[voe.numOfCodecs()];
|
|
||||||
for (int i = 0; i < voe.numOfCodecs(); ++i) {
|
|
||||||
retVal[i] = voe.getCodec(i);
|
|
||||||
}
|
|
||||||
return retVal;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getIsacIndex() {
|
|
||||||
CodecInst[] codecs = defaultAudioCodecs();
|
|
||||||
for (int i = 0; i < codecs.length; ++i) {
|
|
||||||
if (codecs[i].name().contains("ISAC")) {
|
|
||||||
return i;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setAudioTxPort(int audioTxPort) {
|
|
||||||
this.audioTxPort = audioTxPort;
|
|
||||||
UpdateSendDestination();
|
|
||||||
}
|
|
||||||
|
|
||||||
public int audioTxPort() { return audioTxPort; }
|
|
||||||
|
|
||||||
public void setAudioRxPort(int audioRxPort) {
|
|
||||||
check(voe.setLocalReceiver(audioChannel, audioRxPort) == 0,
|
|
||||||
"Failed setLocalReceiver");
|
|
||||||
this.audioRxPort = audioRxPort;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int audioRxPort() { return audioRxPort; }
|
|
||||||
|
|
||||||
public boolean agcEnabled() { return enableAgc; }
|
|
||||||
|
|
||||||
public void setAgc(boolean enable) {
|
|
||||||
enableAgc = enable;
|
|
||||||
VoiceEngine.AgcConfig agc_config =
|
|
||||||
new VoiceEngine.AgcConfig(3, 9, true);
|
|
||||||
check(voe.setAgcConfig(agc_config) == 0, "VoE set AGC Config failed");
|
|
||||||
check(voe.setAgcStatus(enableAgc, VoiceEngine.AgcModes.FIXED_DIGITAL) == 0,
|
|
||||||
"VoE set AGC Status failed");
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean nsEnabled() { return enableNs; }
|
|
||||||
|
|
||||||
public void setNs(boolean enable) {
|
|
||||||
enableNs = enable;
|
|
||||||
check(voe.setNsStatus(enableNs,
|
|
||||||
VoiceEngine.NsModes.MODERATE_SUPPRESSION) == 0,
|
|
||||||
"VoE set NS Status failed");
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean aecmEnabled() { return enableAecm; }
|
|
||||||
|
|
||||||
public void setEc(boolean enable) {
|
|
||||||
enableAecm = enable;
|
|
||||||
check(voe.setEcStatus(enable, VoiceEngine.EcModes.AECM) == 0,
|
|
||||||
"voe setEcStatus");
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean speakerEnabled() {
|
|
||||||
return speakerEnabled;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setSpeaker(boolean enable) {
|
|
||||||
speakerEnabled = enable;
|
|
||||||
updateAudioOutput();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Debug helpers.
|
|
||||||
public boolean apmRecord() { return apmRecord; }
|
|
||||||
|
|
||||||
public boolean audioRtpDump() { return audioRtpDump; }
|
|
||||||
|
|
||||||
public void setDebuging(boolean enable) {
|
|
||||||
apmRecord = enable;
|
|
||||||
if (!enable) {
|
|
||||||
check(voe.stopDebugRecording() == 0, "Failed stopping debug");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (!createDebugDirectory()) {
|
|
||||||
check(false, "Unable to create debug directory.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
String debugDirectory = getDebugDirectory();
|
|
||||||
check(voe.startDebugRecording(debugDirectory + String.format("/apm_%d.dat",
|
|
||||||
System.currentTimeMillis())) == 0,
|
|
||||||
"Failed starting debug");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setIncomingVoeRtpDump(boolean enable) {
|
|
||||||
audioRtpDump = enable;
|
|
||||||
if (!enable) {
|
|
||||||
check(voe.stopRtpDump(audioChannel,
|
|
||||||
VoiceEngine.RtpDirections.INCOMING) == 0,
|
|
||||||
"voe stopping rtp dump");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
String debugDirectory = getDebugDirectory();
|
|
||||||
check(voe.startRtpDump(audioChannel, debugDirectory +
|
|
||||||
String.format("/voe_%d.rtp", System.currentTimeMillis()),
|
|
||||||
VoiceEngine.RtpDirections.INCOMING) == 0,
|
|
||||||
"voe starting rtp dump");
|
|
||||||
}
|
|
||||||
|
|
||||||
private void updateAudioOutput() {
|
|
||||||
boolean useSpeaker = !headsetPluggedIn && speakerEnabled;
|
|
||||||
AudioManager audioManager =
|
|
||||||
((AudioManager) context.getSystemService(Context.AUDIO_SERVICE));
|
|
||||||
audioManager.setSpeakerphoneOn(useSpeaker);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void UpdateSendDestination() {
|
|
||||||
if (remoteIp == null) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (audioTxPort != 0) {
|
|
||||||
check(voe.setSendDestination(audioChannel, audioTxPort,
|
|
||||||
remoteIp) == 0, "VoE set send destination failed");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
MediaEngineObserver observer;
|
|
||||||
public void setObserver(MediaEngineObserver observer) {
|
|
||||||
this.observer = observer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,15 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.webrtc.webrtcdemo;
|
|
||||||
|
|
||||||
public interface MediaEngineObserver {
|
|
||||||
void newStats(String stats);
|
|
||||||
}
|
|
||||||
@ -1,15 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.webrtc.webrtcdemo;
|
|
||||||
|
|
||||||
public interface MenuStateProvider {
|
|
||||||
public MediaEngine getEngine();
|
|
||||||
}
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.webrtc.webrtcdemo;
|
|
||||||
|
|
||||||
import android.content.Context;
|
|
||||||
|
|
||||||
public class NativeWebRtcContextRegistry {
|
|
||||||
static {
|
|
||||||
System.loadLibrary("webrtcdemo-jni");
|
|
||||||
}
|
|
||||||
|
|
||||||
public native void register(Context context);
|
|
||||||
public native void unRegister();
|
|
||||||
}
|
|
||||||
@ -1,32 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.webrtc.webrtcdemo;
|
|
||||||
|
|
||||||
public class RtcpStatistics {
|
|
||||||
// Definition of fraction lost can be found in RFC3550.
|
|
||||||
// It is equivalent to taking the integer part after multiplying the loss
|
|
||||||
// fraction by 256.
|
|
||||||
public final int fractionLost;
|
|
||||||
public final int cumulativeLost;
|
|
||||||
public final int extendedMax;
|
|
||||||
public final int jitter;
|
|
||||||
public final int rttMs;
|
|
||||||
|
|
||||||
// Only allowed to be created by the native layer.
|
|
||||||
private RtcpStatistics(int fractionLost, int cumulativeLost, int extendedMax,
|
|
||||||
int jitter, int rttMs) {
|
|
||||||
this.fractionLost = fractionLost;
|
|
||||||
this.cumulativeLost = cumulativeLost;
|
|
||||||
this.extendedMax = extendedMax;
|
|
||||||
this.jitter = jitter;
|
|
||||||
this.rttMs = rttMs;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,129 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.webrtc.webrtcdemo;
|
|
||||||
|
|
||||||
import android.app.Activity;
|
|
||||||
import android.app.Fragment;
|
|
||||||
import android.os.Bundle;
|
|
||||||
import android.util.Log;
|
|
||||||
import android.view.LayoutInflater;
|
|
||||||
import android.view.View;
|
|
||||||
import android.view.ViewGroup;
|
|
||||||
import android.widget.CheckBox;
|
|
||||||
import android.widget.EditText;
|
|
||||||
import android.widget.RadioGroup;
|
|
||||||
import android.widget.TextView;
|
|
||||||
import java.net.InetAddress;
|
|
||||||
import java.net.NetworkInterface;
|
|
||||||
import java.net.SocketException;
|
|
||||||
import java.util.Enumeration;
|
|
||||||
|
|
||||||
public class SettingsMenuFragment extends Fragment
|
|
||||||
implements RadioGroup.OnCheckedChangeListener {
|
|
||||||
|
|
||||||
private String TAG;
|
|
||||||
private MenuStateProvider stateProvider;
|
|
||||||
|
|
||||||
EditText etRemoteIp;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public View onCreateView(LayoutInflater inflater, ViewGroup container,
|
|
||||||
Bundle savedInstanceState) {
|
|
||||||
View v = inflater.inflate(R.layout.settingsmenu, container, false);
|
|
||||||
|
|
||||||
TAG = getResources().getString(R.string.tag);
|
|
||||||
|
|
||||||
CheckBox cbAudio = (CheckBox) v.findViewById(R.id.cbAudio);
|
|
||||||
cbAudio.setChecked(getEngine().audioEnabled());
|
|
||||||
cbAudio.setOnClickListener(new View.OnClickListener() {
|
|
||||||
public void onClick(View checkBox) {
|
|
||||||
CheckBox cbAudio = (CheckBox) checkBox;
|
|
||||||
getEngine().setAudio(cbAudio.isChecked());
|
|
||||||
cbAudio.setChecked(getEngine().audioEnabled());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
boolean loopback =
|
|
||||||
getResources().getBoolean(R.bool.loopback_enabled_default);
|
|
||||||
CheckBox cbLoopback = (CheckBox) v.findViewById(R.id.cbLoopback);
|
|
||||||
cbLoopback.setChecked(loopback);
|
|
||||||
cbLoopback.setOnClickListener(new View.OnClickListener() {
|
|
||||||
public void onClick(View checkBox) {
|
|
||||||
loopbackChanged((CheckBox) checkBox);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
etRemoteIp = (EditText) v.findViewById(R.id.etRemoteIp);
|
|
||||||
etRemoteIp.setOnFocusChangeListener(new View.OnFocusChangeListener() {
|
|
||||||
public void onFocusChange(View editText, boolean hasFocus) {
|
|
||||||
if (!hasFocus) {
|
|
||||||
getEngine().setRemoteIp(etRemoteIp.getText().toString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
// Has to be after remote IP as loopback changes it.
|
|
||||||
loopbackChanged(cbLoopback);
|
|
||||||
return v;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onAttach(Activity activity) {
|
|
||||||
super.onAttach(activity);
|
|
||||||
|
|
||||||
// This makes sure that the container activity has implemented
|
|
||||||
// the callback interface. If not, it throws an exception.
|
|
||||||
try {
|
|
||||||
stateProvider = (MenuStateProvider) activity;
|
|
||||||
} catch (ClassCastException e) {
|
|
||||||
throw new ClassCastException(activity +
|
|
||||||
" must implement MenuStateProvider");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void loopbackChanged(CheckBox cbLoopback) {
|
|
||||||
boolean loopback = cbLoopback.isChecked();
|
|
||||||
etRemoteIp.setText(loopback ? getLoopbackIPString() : getLocalIpAddress());
|
|
||||||
getEngine().setRemoteIp(etRemoteIp.getText().toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
private String getLoopbackIPString() {
|
|
||||||
return getResources().getString(R.string.loopbackIp);
|
|
||||||
}
|
|
||||||
|
|
||||||
private String getLocalIpAddress() {
|
|
||||||
String localIp = "";
|
|
||||||
try {
|
|
||||||
for (Enumeration<NetworkInterface> en = NetworkInterface
|
|
||||||
.getNetworkInterfaces(); en.hasMoreElements();) {
|
|
||||||
NetworkInterface intf = en.nextElement();
|
|
||||||
for (Enumeration<InetAddress> enumIpAddr =
|
|
||||||
intf.getInetAddresses();
|
|
||||||
enumIpAddr.hasMoreElements(); ) {
|
|
||||||
InetAddress inetAddress = enumIpAddr.nextElement();
|
|
||||||
if (!inetAddress.isLoopbackAddress()) {
|
|
||||||
// Set the remote ip address the same as
|
|
||||||
// the local ip address of the last netif
|
|
||||||
localIp = inetAddress.getHostAddress().toString();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (SocketException e) {
|
|
||||||
Log.e(TAG, "Unable to get local IP address. Not the end of the world", e);
|
|
||||||
}
|
|
||||||
return localIp;
|
|
||||||
}
|
|
||||||
|
|
||||||
private MediaEngine getEngine() {
|
|
||||||
return stateProvider.getEngine();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onCheckedChanged(RadioGroup group, int checkedId) {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,49 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.webrtc.webrtcdemo;
|
|
||||||
|
|
||||||
import android.widget.ArrayAdapter;
|
|
||||||
import android.content.Context;
|
|
||||||
import android.widget.TextView;
|
|
||||||
import android.view.View;
|
|
||||||
import android.view.ViewGroup;
|
|
||||||
import android.view.LayoutInflater;
|
|
||||||
|
|
||||||
public class SpinnerAdapter extends ArrayAdapter<String> {
|
|
||||||
private String[] menuItems;
|
|
||||||
LayoutInflater inflater;
|
|
||||||
int textViewResourceId;
|
|
||||||
|
|
||||||
public SpinnerAdapter(Context context, int textViewResourceId,
|
|
||||||
String[] objects, LayoutInflater inflater) {
|
|
||||||
super(context, textViewResourceId, objects);
|
|
||||||
menuItems = objects;
|
|
||||||
this.inflater = inflater;
|
|
||||||
this.textViewResourceId = textViewResourceId;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override public View getDropDownView(int position, View convertView,
|
|
||||||
ViewGroup parent) {
|
|
||||||
return getCustomView(position, convertView, parent);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override public View getView(int position, View convertView,
|
|
||||||
ViewGroup parent) {
|
|
||||||
return getCustomView(position, convertView, parent);
|
|
||||||
}
|
|
||||||
|
|
||||||
private View getCustomView(int position, View v, ViewGroup parent) {
|
|
||||||
View row = inflater.inflate(textViewResourceId, parent, false);
|
|
||||||
TextView label = (TextView) row.findViewById(R.id.spinner_row);
|
|
||||||
label.setText(menuItems[position]);
|
|
||||||
return row;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,117 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.webrtc.webrtcdemo;
|
|
||||||
|
|
||||||
public class VoiceEngine {
|
|
||||||
private final long nativeVoiceEngine;
|
|
||||||
|
|
||||||
// Keep in sync (including this comment) with
|
|
||||||
// webrtc/common_types.h:NsModes
|
|
||||||
public enum NsModes {
|
|
||||||
UNCHANGED, DEFAULT, CONFERENCE, LOW_SUPPRESSION,
|
|
||||||
MODERATE_SUPPRESSION, HIGH_SUPPRESSION, VERY_HIGH_SUPPRESSION
|
|
||||||
}
|
|
||||||
|
|
||||||
// Keep in sync (including this comment) with
|
|
||||||
// webrtc/common_types.h:AgcModes
|
|
||||||
public enum AgcModes {
|
|
||||||
UNCHANGED, DEFAULT, ADAPTIVE_ANALOG, ADAPTIVE_DIGITAL,
|
|
||||||
FIXED_DIGITAL
|
|
||||||
}
|
|
||||||
|
|
||||||
// Keep in sync (including this comment) with
|
|
||||||
// webrtc/common_types.h:AecmModes
|
|
||||||
public enum AecmModes {
|
|
||||||
QUIET_EARPIECE_OR_HEADSET, EARPIECE, LOUD_EARPIECE,
|
|
||||||
SPEAKERPHONE, LOUD_SPEAKERPHONE
|
|
||||||
}
|
|
||||||
|
|
||||||
// Keep in sync (including this comment) with
|
|
||||||
// webrtc/common_types.h:EcModes
|
|
||||||
public enum EcModes { UNCHANGED, DEFAULT, CONFERENCE, AEC, AECM }
|
|
||||||
|
|
||||||
// Keep in sync (including this comment) with
|
|
||||||
// webrtc/common_types.h:RtpDirections
|
|
||||||
public enum RtpDirections { INCOMING, OUTGOING }
|
|
||||||
|
|
||||||
public static class AgcConfig {
|
|
||||||
AgcConfig(int targetLevelDbOv, int digitalCompressionGaindB,
|
|
||||||
boolean limiterEnable) {
|
|
||||||
this.targetLevelDbOv = targetLevelDbOv;
|
|
||||||
this.digitalCompressionGaindB = digitalCompressionGaindB;
|
|
||||||
this.limiterEnable = limiterEnable;
|
|
||||||
}
|
|
||||||
private final int targetLevelDbOv;
|
|
||||||
private final int digitalCompressionGaindB;
|
|
||||||
private final boolean limiterEnable;
|
|
||||||
}
|
|
||||||
|
|
||||||
public VoiceEngine() {
|
|
||||||
nativeVoiceEngine = create();
|
|
||||||
}
|
|
||||||
private static native long create();
|
|
||||||
public native int init();
|
|
||||||
public native void dispose();
|
|
||||||
public native int createChannel();
|
|
||||||
public native int deleteChannel(int channel);
|
|
||||||
public native int setLocalReceiver(int channel, int port);
|
|
||||||
public native int setSendDestination(int channel, int port, String ipaddr);
|
|
||||||
public native int startListen(int channel);
|
|
||||||
public native int startPlayout(int channel);
|
|
||||||
public native int startSend(int channel);
|
|
||||||
public native int stopListen(int channel);
|
|
||||||
public native int stopPlayout(int channel);
|
|
||||||
public native int stopSend(int channel);
|
|
||||||
public native int setSpeakerVolume(int volume);
|
|
||||||
public native int setLoudspeakerStatus(boolean enable);
|
|
||||||
public native int startPlayingFileLocally(
|
|
||||||
int channel,
|
|
||||||
String fileName,
|
|
||||||
boolean loop);
|
|
||||||
public native int stopPlayingFileLocally(int channel);
|
|
||||||
public native int startPlayingFileAsMicrophone(
|
|
||||||
int channel,
|
|
||||||
String fileName,
|
|
||||||
boolean loop);
|
|
||||||
public native int stopPlayingFileAsMicrophone(int channel);
|
|
||||||
public native int numOfCodecs();
|
|
||||||
public native CodecInst getCodec(int index);
|
|
||||||
public native int setSendCodec(int channel, CodecInst codec);
|
|
||||||
public int setEcStatus(boolean enable, EcModes mode) {
|
|
||||||
return setEcStatus(enable, mode.ordinal());
|
|
||||||
}
|
|
||||||
private native int setEcStatus(boolean enable, int ec_mode);
|
|
||||||
public int setAecmMode(AecmModes aecm_mode, boolean cng) {
|
|
||||||
return setAecmMode(aecm_mode.ordinal(), cng);
|
|
||||||
}
|
|
||||||
private native int setAecmMode(int aecm_mode, boolean cng);
|
|
||||||
public int setAgcStatus(boolean enable, AgcModes agc_mode) {
|
|
||||||
return setAgcStatus(enable, agc_mode.ordinal());
|
|
||||||
}
|
|
||||||
private native int setAgcStatus(boolean enable, int agc_mode);
|
|
||||||
public native int setAgcConfig(AgcConfig agc_config);
|
|
||||||
public int setNsStatus(boolean enable, NsModes ns_mode) {
|
|
||||||
return setNsStatus(enable, ns_mode.ordinal());
|
|
||||||
}
|
|
||||||
private native int setNsStatus(boolean enable, int ns_mode);
|
|
||||||
public native int startDebugRecording(String file);
|
|
||||||
public native int stopDebugRecording();
|
|
||||||
public int startRtpDump(int channel, String file,
|
|
||||||
RtpDirections direction) {
|
|
||||||
return startRtpDump(channel, file, direction.ordinal());
|
|
||||||
}
|
|
||||||
private native int startRtpDump(int channel, String file,
|
|
||||||
int direction);
|
|
||||||
public int stopRtpDump(int channel, RtpDirections direction) {
|
|
||||||
return stopRtpDump(channel, direction.ordinal());
|
|
||||||
}
|
|
||||||
private native int stopRtpDump(int channel, int direction);
|
|
||||||
}
|
|
||||||
@ -1,210 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.webrtc.webrtcdemo;
|
|
||||||
|
|
||||||
import android.app.ActionBar.Tab;
|
|
||||||
import android.app.ActionBar.TabListener;
|
|
||||||
import android.app.ActionBar;
|
|
||||||
import android.app.Activity;
|
|
||||||
import android.app.Fragment;
|
|
||||||
import android.app.FragmentTransaction;
|
|
||||||
import android.content.pm.ActivityInfo;
|
|
||||||
import android.media.AudioManager;
|
|
||||||
import android.os.Bundle;
|
|
||||||
import android.os.Handler;
|
|
||||||
import android.view.KeyEvent;
|
|
||||||
import android.view.Menu;
|
|
||||||
import android.view.MenuInflater;
|
|
||||||
import android.view.MenuItem;
|
|
||||||
import android.view.WindowManager;
|
|
||||||
|
|
||||||
public class WebRTCDemo extends Activity implements MenuStateProvider {
|
|
||||||
|
|
||||||
// From http://developer.android.com/guide/topics/ui/actionbar.html
|
|
||||||
public static class TabListener<T extends Fragment>
|
|
||||||
implements ActionBar.TabListener {
|
|
||||||
private Fragment fragment;
|
|
||||||
private final Activity activity;
|
|
||||||
private final String tag;
|
|
||||||
private final Class<T> instance;
|
|
||||||
private final Bundle args;
|
|
||||||
|
|
||||||
public TabListener(Activity activity, String tag, Class<T> clz) {
|
|
||||||
this(activity, tag, clz, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
public TabListener(Activity activity, String tag, Class<T> clz,
|
|
||||||
Bundle args) {
|
|
||||||
this.activity = activity;
|
|
||||||
this.tag = tag;
|
|
||||||
this.instance = clz;
|
|
||||||
this.args = args;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void onTabSelected(Tab tab, FragmentTransaction ft) {
|
|
||||||
// Check if the fragment is already initialized
|
|
||||||
if (fragment == null) {
|
|
||||||
// If not, instantiate and add it to the activity
|
|
||||||
fragment = Fragment.instantiate(activity, instance.getName(), args);
|
|
||||||
ft.add(android.R.id.content, fragment, tag);
|
|
||||||
} else {
|
|
||||||
// If it exists, simply attach it in order to show it
|
|
||||||
ft.attach(fragment);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void onTabUnselected(Tab tab, FragmentTransaction ft) {
|
|
||||||
if (fragment != null) {
|
|
||||||
// Detach the fragment, because another one is being attached
|
|
||||||
ft.detach(fragment);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void onTabReselected(Tab tab, FragmentTransaction ft) {
|
|
||||||
// User selected the already selected tab. Do nothing.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private NativeWebRtcContextRegistry contextRegistry = null;
|
|
||||||
private MediaEngine mediaEngine = null;
|
|
||||||
private Handler handler;
|
|
||||||
public MediaEngine getEngine() { return mediaEngine; }
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onCreate(Bundle savedInstanceState) {
|
|
||||||
super.onCreate(savedInstanceState);
|
|
||||||
|
|
||||||
// Global settings.
|
|
||||||
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
|
|
||||||
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
|
|
||||||
|
|
||||||
// State.
|
|
||||||
// Must be instantiated before MediaEngine.
|
|
||||||
contextRegistry = new NativeWebRtcContextRegistry();
|
|
||||||
contextRegistry.register(this);
|
|
||||||
|
|
||||||
// Load all settings dictated in xml.
|
|
||||||
mediaEngine = new MediaEngine(this);
|
|
||||||
mediaEngine.setRemoteIp(getResources().getString(R.string.loopbackIp));
|
|
||||||
|
|
||||||
mediaEngine.setAudio(getResources().getBoolean(
|
|
||||||
R.bool.audio_enabled_default));
|
|
||||||
mediaEngine.setAudioCodec(mediaEngine.getIsacIndex());
|
|
||||||
mediaEngine.setAudioRxPort(getResources().getInteger(
|
|
||||||
R.integer.aRxPortDefault));
|
|
||||||
mediaEngine.setAudioTxPort(getResources().getInteger(
|
|
||||||
R.integer.aTxPortDefault));
|
|
||||||
mediaEngine.setSpeaker(getResources().getBoolean(
|
|
||||||
R.bool.speaker_enabled_default));
|
|
||||||
mediaEngine.setDebuging(getResources().getBoolean(
|
|
||||||
R.bool.apm_debug_enabled_default));
|
|
||||||
|
|
||||||
// Create action bar with all tabs.
|
|
||||||
ActionBar actionBar = getActionBar();
|
|
||||||
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS);
|
|
||||||
actionBar.setDisplayShowTitleEnabled(false);
|
|
||||||
|
|
||||||
Tab tab = actionBar.newTab()
|
|
||||||
.setText("Main")
|
|
||||||
.setTabListener(new TabListener<MainMenuFragment>(
|
|
||||||
this, "main", MainMenuFragment.class));
|
|
||||||
actionBar.addTab(tab);
|
|
||||||
|
|
||||||
tab = actionBar.newTab()
|
|
||||||
.setText("Settings")
|
|
||||||
.setTabListener(new TabListener<SettingsMenuFragment>(
|
|
||||||
this, "Settings", SettingsMenuFragment.class));
|
|
||||||
actionBar.addTab(tab);
|
|
||||||
|
|
||||||
tab = actionBar.newTab()
|
|
||||||
.setText("Audio")
|
|
||||||
.setTabListener(new TabListener<AudioMenuFragment>(
|
|
||||||
this, "Audio", AudioMenuFragment.class));
|
|
||||||
actionBar.addTab(tab);
|
|
||||||
|
|
||||||
enableTimedStartStop();
|
|
||||||
|
|
||||||
// Hint that voice call audio stream should be used for hardware volume
|
|
||||||
// controls.
|
|
||||||
setVolumeControlStream(AudioManager.STREAM_VOICE_CALL);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean onCreateOptionsMenu(Menu menu) {
|
|
||||||
MenuInflater inflater = getMenuInflater();
|
|
||||||
inflater.inflate(R.menu.main_activity_actions, menu);
|
|
||||||
return super.onCreateOptionsMenu(menu);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean onOptionsItemSelected(MenuItem item) {
|
|
||||||
// Handle presses on the action bar items
|
|
||||||
switch (item.getItemId()) {
|
|
||||||
case R.id.action_exit:
|
|
||||||
MainMenuFragment main = (MainMenuFragment)getFragmentManager()
|
|
||||||
.findFragmentByTag("main");
|
|
||||||
main.stopAll();
|
|
||||||
finish();
|
|
||||||
return true;
|
|
||||||
default:
|
|
||||||
return super.onOptionsItemSelected(item);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onDestroy() {
|
|
||||||
disableTimedStartStop();
|
|
||||||
mediaEngine.dispose();
|
|
||||||
contextRegistry.unRegister();
|
|
||||||
super.onDestroy();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean onKeyDown(int keyCode, KeyEvent event) {
|
|
||||||
if (keyCode == KeyEvent.KEYCODE_BACK) {
|
|
||||||
// Prevent app from running in the background.
|
|
||||||
MainMenuFragment main = (MainMenuFragment)getFragmentManager()
|
|
||||||
.findFragmentByTag("main");
|
|
||||||
main.stopAll();
|
|
||||||
finish();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return super.onKeyDown(keyCode, event);
|
|
||||||
}
|
|
||||||
|
|
||||||
private int getCallRestartPeriodicity() {
|
|
||||||
return getResources().getInteger(R.integer.call_restart_periodicity_ms);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Thread repeatedly calling start/stop.
|
|
||||||
void enableTimedStartStop() {
|
|
||||||
if (getCallRestartPeriodicity() > 0) {
|
|
||||||
// Periodicity == 0 <-> Disabled.
|
|
||||||
handler = new Handler();
|
|
||||||
handler.postDelayed(startOrStopCallback, getCallRestartPeriodicity());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void disableTimedStartStop() {
|
|
||||||
if (handler != null) {
|
|
||||||
handler.removeCallbacks(startOrStopCallback);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Runnable startOrStopCallback = new Runnable() {
|
|
||||||
public void run() {
|
|
||||||
MainMenuFragment main = (MainMenuFragment)getFragmentManager()
|
|
||||||
.findFragmentByTag("main");
|
|
||||||
main.toggleStart();
|
|
||||||
handler.postDelayed(startOrStopCallback, getCallRestartPeriodicity());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@ -1,409 +0,0 @@
|
|||||||
#
|
|
||||||
# Copyright 2012 The WebRTC Project Authors. All rights reserved.
|
|
||||||
#
|
|
||||||
# Use of this source code is governed by a BSD-style license
|
|
||||||
# that can be found in the LICENSE file in the root of the source
|
|
||||||
# tree. An additional intellectual property rights grant can be found
|
|
||||||
# in the file PATENTS. All contributing project authors may
|
|
||||||
# be found in the AUTHORS file in the root of the source tree.
|
|
||||||
|
|
||||||
{
|
|
||||||
'includes': [
|
|
||||||
'../talk/build/common.gypi',
|
|
||||||
],
|
|
||||||
'targets': [
|
|
||||||
{
|
|
||||||
'target_name': 'relayserver',
|
|
||||||
'type': 'executable',
|
|
||||||
'dependencies': [
|
|
||||||
'../talk/libjingle.gyp:libjingle',
|
|
||||||
'../talk/libjingle.gyp:libjingle_p2p',
|
|
||||||
],
|
|
||||||
'sources': [
|
|
||||||
'examples/relayserver/relayserver_main.cc',
|
|
||||||
],
|
|
||||||
}, # target relayserver
|
|
||||||
{
|
|
||||||
'target_name': 'stunserver',
|
|
||||||
'type': 'executable',
|
|
||||||
'dependencies': [
|
|
||||||
'../talk/libjingle.gyp:libjingle',
|
|
||||||
'../talk/libjingle.gyp:libjingle_p2p',
|
|
||||||
],
|
|
||||||
'sources': [
|
|
||||||
'examples/stunserver/stunserver_main.cc',
|
|
||||||
],
|
|
||||||
}, # target stunserver
|
|
||||||
{
|
|
||||||
'target_name': 'turnserver',
|
|
||||||
'type': 'executable',
|
|
||||||
'dependencies': [
|
|
||||||
'../talk/libjingle.gyp:libjingle',
|
|
||||||
'../talk/libjingle.gyp:libjingle_p2p',
|
|
||||||
],
|
|
||||||
'sources': [
|
|
||||||
'examples/turnserver/turnserver_main.cc',
|
|
||||||
],
|
|
||||||
}, # target turnserver
|
|
||||||
{
|
|
||||||
'target_name': 'peerconnection_server',
|
|
||||||
'type': 'executable',
|
|
||||||
'sources': [
|
|
||||||
'examples/peerconnection/server/data_socket.cc',
|
|
||||||
'examples/peerconnection/server/data_socket.h',
|
|
||||||
'examples/peerconnection/server/main.cc',
|
|
||||||
'examples/peerconnection/server/peer_channel.cc',
|
|
||||||
'examples/peerconnection/server/peer_channel.h',
|
|
||||||
'examples/peerconnection/server/utils.cc',
|
|
||||||
'examples/peerconnection/server/utils.h',
|
|
||||||
],
|
|
||||||
'dependencies': [
|
|
||||||
'<(webrtc_root)/common.gyp:webrtc_common',
|
|
||||||
'../talk/libjingle.gyp:libjingle',
|
|
||||||
],
|
|
||||||
# TODO(ronghuawu): crbug.com/167187 fix size_t to int truncations.
|
|
||||||
'msvs_disabled_warnings': [ 4309, ],
|
|
||||||
}, # target peerconnection_server
|
|
||||||
],
|
|
||||||
'conditions': [
|
|
||||||
['OS=="linux" or OS=="win"', {
|
|
||||||
'targets': [
|
|
||||||
{
|
|
||||||
'target_name': 'peerconnection_client',
|
|
||||||
'type': 'executable',
|
|
||||||
'sources': [
|
|
||||||
'examples/peerconnection/client/conductor.cc',
|
|
||||||
'examples/peerconnection/client/conductor.h',
|
|
||||||
'examples/peerconnection/client/defaults.cc',
|
|
||||||
'examples/peerconnection/client/defaults.h',
|
|
||||||
'examples/peerconnection/client/peer_connection_client.cc',
|
|
||||||
'examples/peerconnection/client/peer_connection_client.h',
|
|
||||||
],
|
|
||||||
'dependencies': [
|
|
||||||
'../talk/libjingle.gyp:libjingle_peerconnection',
|
|
||||||
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:field_trial_default',
|
|
||||||
'<@(libjingle_tests_additional_deps)',
|
|
||||||
],
|
|
||||||
'conditions': [
|
|
||||||
['build_json==1', {
|
|
||||||
'dependencies': [
|
|
||||||
'<(DEPTH)/third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
|
|
||||||
],
|
|
||||||
}],
|
|
||||||
# TODO(ronghuawu): Move these files to a win/ directory then they
|
|
||||||
# can be excluded automatically.
|
|
||||||
['OS=="win"', {
|
|
||||||
'sources': [
|
|
||||||
'examples/peerconnection/client/flagdefs.h',
|
|
||||||
'examples/peerconnection/client/main.cc',
|
|
||||||
'examples/peerconnection/client/main_wnd.cc',
|
|
||||||
'examples/peerconnection/client/main_wnd.h',
|
|
||||||
],
|
|
||||||
'msvs_settings': {
|
|
||||||
'VCLinkerTool': {
|
|
||||||
'SubSystem': '2', # Windows
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}], # OS=="win"
|
|
||||||
['OS=="linux"', {
|
|
||||||
'sources': [
|
|
||||||
'examples/peerconnection/client/linux/main.cc',
|
|
||||||
'examples/peerconnection/client/linux/main_wnd.cc',
|
|
||||||
'examples/peerconnection/client/linux/main_wnd.h',
|
|
||||||
],
|
|
||||||
'cflags': [
|
|
||||||
'<!@(pkg-config --cflags glib-2.0 gobject-2.0 gtk+-2.0)',
|
|
||||||
],
|
|
||||||
'link_settings': {
|
|
||||||
'ldflags': [
|
|
||||||
'<!@(pkg-config --libs-only-L --libs-only-other glib-2.0'
|
|
||||||
' gobject-2.0 gthread-2.0 gtk+-2.0)',
|
|
||||||
],
|
|
||||||
'libraries': [
|
|
||||||
'<!@(pkg-config --libs-only-l glib-2.0 gobject-2.0'
|
|
||||||
' gthread-2.0 gtk+-2.0)',
|
|
||||||
'-lX11',
|
|
||||||
'-lXcomposite',
|
|
||||||
'-lXext',
|
|
||||||
'-lXrender',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
}], # OS=="linux"
|
|
||||||
], # conditions
|
|
||||||
}, # target peerconnection_client
|
|
||||||
], # targets
|
|
||||||
}], # OS=="linux" or OS=="win"
|
|
||||||
|
|
||||||
['OS=="ios" or (OS=="mac" and target_arch!="ia32" and mac_sdk>="10.8")', {
|
|
||||||
'targets': [
|
|
||||||
{
|
|
||||||
'target_name': 'apprtc_common',
|
|
||||||
'type': 'static_library',
|
|
||||||
'dependencies': [
|
|
||||||
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:field_trial_default',
|
|
||||||
'../talk/libjingle.gyp:libjingle_peerconnection_objc',
|
|
||||||
],
|
|
||||||
'sources': [
|
|
||||||
'examples/objc/AppRTCDemo/common/ARDUtilities.h',
|
|
||||||
'examples/objc/AppRTCDemo/common/ARDUtilities.m',
|
|
||||||
],
|
|
||||||
'include_dirs': [
|
|
||||||
'examples/objc/AppRTCDemo/common',
|
|
||||||
],
|
|
||||||
'direct_dependent_settings': {
|
|
||||||
'include_dirs': [
|
|
||||||
'examples/objc/AppRTCDemo/common',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
'conditions': [
|
|
||||||
['OS=="mac"', {
|
|
||||||
'xcode_settings': {
|
|
||||||
'MACOSX_DEPLOYMENT_TARGET' : '10.8',
|
|
||||||
},
|
|
||||||
}],
|
|
||||||
],
|
|
||||||
'link_settings': {
|
|
||||||
'xcode_settings': {
|
|
||||||
'OTHER_LDFLAGS': [
|
|
||||||
'-framework QuartzCore',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'target_name': 'apprtc_signaling',
|
|
||||||
'type': 'static_library',
|
|
||||||
'dependencies': [
|
|
||||||
'apprtc_common',
|
|
||||||
'../talk/libjingle.gyp:libjingle_peerconnection_objc',
|
|
||||||
'socketrocket',
|
|
||||||
],
|
|
||||||
'sources': [
|
|
||||||
'examples/objc/AppRTCDemo/ARDAppClient.h',
|
|
||||||
'examples/objc/AppRTCDemo/ARDAppClient.m',
|
|
||||||
'examples/objc/AppRTCDemo/ARDAppClient+Internal.h',
|
|
||||||
'examples/objc/AppRTCDemo/ARDAppEngineClient.h',
|
|
||||||
'examples/objc/AppRTCDemo/ARDAppEngineClient.m',
|
|
||||||
'examples/objc/AppRTCDemo/ARDBitrateTracker.h',
|
|
||||||
'examples/objc/AppRTCDemo/ARDBitrateTracker.m',
|
|
||||||
'examples/objc/AppRTCDemo/ARDCEODTURNClient.h',
|
|
||||||
'examples/objc/AppRTCDemo/ARDCEODTURNClient.m',
|
|
||||||
'examples/objc/AppRTCDemo/ARDJoinResponse.h',
|
|
||||||
'examples/objc/AppRTCDemo/ARDJoinResponse.m',
|
|
||||||
'examples/objc/AppRTCDemo/ARDJoinResponse+Internal.h',
|
|
||||||
'examples/objc/AppRTCDemo/ARDMessageResponse.h',
|
|
||||||
'examples/objc/AppRTCDemo/ARDMessageResponse.m',
|
|
||||||
'examples/objc/AppRTCDemo/ARDMessageResponse+Internal.h',
|
|
||||||
'examples/objc/AppRTCDemo/ARDRoomServerClient.h',
|
|
||||||
'examples/objc/AppRTCDemo/ARDSDPUtils.h',
|
|
||||||
'examples/objc/AppRTCDemo/ARDSDPUtils.m',
|
|
||||||
'examples/objc/AppRTCDemo/ARDSignalingChannel.h',
|
|
||||||
'examples/objc/AppRTCDemo/ARDSignalingMessage.h',
|
|
||||||
'examples/objc/AppRTCDemo/ARDSignalingMessage.m',
|
|
||||||
'examples/objc/AppRTCDemo/ARDStatsBuilder.h',
|
|
||||||
'examples/objc/AppRTCDemo/ARDStatsBuilder.m',
|
|
||||||
'examples/objc/AppRTCDemo/ARDTURNClient.h',
|
|
||||||
'examples/objc/AppRTCDemo/ARDWebSocketChannel.h',
|
|
||||||
'examples/objc/AppRTCDemo/ARDWebSocketChannel.m',
|
|
||||||
'examples/objc/AppRTCDemo/RTCICECandidate+JSON.h',
|
|
||||||
'examples/objc/AppRTCDemo/RTCICECandidate+JSON.m',
|
|
||||||
'examples/objc/AppRTCDemo/RTCICEServer+JSON.h',
|
|
||||||
'examples/objc/AppRTCDemo/RTCICEServer+JSON.m',
|
|
||||||
'examples/objc/AppRTCDemo/RTCMediaConstraints+JSON.h',
|
|
||||||
'examples/objc/AppRTCDemo/RTCMediaConstraints+JSON.m',
|
|
||||||
'examples/objc/AppRTCDemo/RTCSessionDescription+JSON.h',
|
|
||||||
'examples/objc/AppRTCDemo/RTCSessionDescription+JSON.m',
|
|
||||||
],
|
|
||||||
'include_dirs': [
|
|
||||||
'examples/objc/AppRTCDemo',
|
|
||||||
],
|
|
||||||
'direct_dependent_settings': {
|
|
||||||
'include_dirs': [
|
|
||||||
'examples/objc/AppRTCDemo',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
'export_dependent_settings': [
|
|
||||||
'../talk/libjingle.gyp:libjingle_peerconnection_objc',
|
|
||||||
],
|
|
||||||
'conditions': [
|
|
||||||
['OS=="mac"', {
|
|
||||||
'xcode_settings': {
|
|
||||||
'MACOSX_DEPLOYMENT_TARGET' : '10.8',
|
|
||||||
},
|
|
||||||
}],
|
|
||||||
],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'target_name': 'AppRTCDemo',
|
|
||||||
'type': 'executable',
|
|
||||||
'product_name': 'AppRTCDemo',
|
|
||||||
'mac_bundle': 1,
|
|
||||||
'dependencies': [
|
|
||||||
'apprtc_common',
|
|
||||||
'apprtc_signaling',
|
|
||||||
],
|
|
||||||
'conditions': [
|
|
||||||
['OS=="ios"', {
|
|
||||||
'mac_bundle_resources': [
|
|
||||||
'examples/objc/AppRTCDemo/ios/resources/iPhone5@2x.png',
|
|
||||||
'examples/objc/AppRTCDemo/ios/resources/iPhone6@2x.png',
|
|
||||||
'examples/objc/AppRTCDemo/ios/resources/iPhone6p@3x.png',
|
|
||||||
'examples/objc/AppRTCDemo/ios/resources/Roboto-Regular.ttf',
|
|
||||||
'examples/objc/AppRTCDemo/ios/resources/ic_call_end_black_24dp.png',
|
|
||||||
'examples/objc/AppRTCDemo/ios/resources/ic_call_end_black_24dp@2x.png',
|
|
||||||
'examples/objc/AppRTCDemo/ios/resources/ic_clear_black_24dp.png',
|
|
||||||
'examples/objc/AppRTCDemo/ios/resources/ic_clear_black_24dp@2x.png',
|
|
||||||
'examples/objc/AppRTCDemo/ios/resources/ic_switch_video_black_24dp.png',
|
|
||||||
'examples/objc/AppRTCDemo/ios/resources/ic_switch_video_black_24dp@2x.png',
|
|
||||||
'examples/objc/Icon.png',
|
|
||||||
],
|
|
||||||
'sources': [
|
|
||||||
'examples/objc/AppRTCDemo/ios/ARDAppDelegate.h',
|
|
||||||
'examples/objc/AppRTCDemo/ios/ARDAppDelegate.m',
|
|
||||||
'examples/objc/AppRTCDemo/ios/ARDMainView.h',
|
|
||||||
'examples/objc/AppRTCDemo/ios/ARDMainView.m',
|
|
||||||
'examples/objc/AppRTCDemo/ios/ARDMainViewController.h',
|
|
||||||
'examples/objc/AppRTCDemo/ios/ARDMainViewController.m',
|
|
||||||
'examples/objc/AppRTCDemo/ios/ARDStatsView.h',
|
|
||||||
'examples/objc/AppRTCDemo/ios/ARDStatsView.m',
|
|
||||||
'examples/objc/AppRTCDemo/ios/ARDVideoCallView.h',
|
|
||||||
'examples/objc/AppRTCDemo/ios/ARDVideoCallView.m',
|
|
||||||
'examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.h',
|
|
||||||
'examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m',
|
|
||||||
'examples/objc/AppRTCDemo/ios/AppRTCDemo-Prefix.pch',
|
|
||||||
'examples/objc/AppRTCDemo/ios/UIImage+ARDUtilities.h',
|
|
||||||
'examples/objc/AppRTCDemo/ios/UIImage+ARDUtilities.m',
|
|
||||||
'examples/objc/AppRTCDemo/ios/main.m',
|
|
||||||
],
|
|
||||||
'xcode_settings': {
|
|
||||||
'INFOPLIST_FILE': 'examples/objc/AppRTCDemo/ios/Info.plist',
|
|
||||||
},
|
|
||||||
}],
|
|
||||||
['OS=="mac"', {
|
|
||||||
'sources': [
|
|
||||||
'examples/objc/AppRTCDemo/mac/APPRTCAppDelegate.h',
|
|
||||||
'examples/objc/AppRTCDemo/mac/APPRTCAppDelegate.m',
|
|
||||||
'examples/objc/AppRTCDemo/mac/APPRTCViewController.h',
|
|
||||||
'examples/objc/AppRTCDemo/mac/APPRTCViewController.m',
|
|
||||||
'examples/objc/AppRTCDemo/mac/main.m',
|
|
||||||
],
|
|
||||||
'xcode_settings': {
|
|
||||||
'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'NO',
|
|
||||||
'INFOPLIST_FILE': 'examples/objc/AppRTCDemo/mac/Info.plist',
|
|
||||||
'MACOSX_DEPLOYMENT_TARGET' : '10.8',
|
|
||||||
'OTHER_LDFLAGS': [
|
|
||||||
'-framework AVFoundation',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
}],
|
|
||||||
['target_arch=="ia32"', {
|
|
||||||
'dependencies' : [
|
|
||||||
'<(DEPTH)/testing/iossim/iossim.gyp:iossim#host',
|
|
||||||
],
|
|
||||||
}],
|
|
||||||
],
|
|
||||||
}, # target AppRTCDemo
|
|
||||||
{
|
|
||||||
# TODO(tkchin): move this into the real third party location and
|
|
||||||
# have it mirrored on chrome infra.
|
|
||||||
'target_name': 'socketrocket',
|
|
||||||
'type': 'static_library',
|
|
||||||
'sources': [
|
|
||||||
'examples/objc/AppRTCDemo/third_party/SocketRocket/SRWebSocket.h',
|
|
||||||
'examples/objc/AppRTCDemo/third_party/SocketRocket/SRWebSocket.m',
|
|
||||||
],
|
|
||||||
'conditions': [
|
|
||||||
['OS=="mac"', {
|
|
||||||
'xcode_settings': {
|
|
||||||
# SocketRocket autosynthesizes some properties. Disable the
|
|
||||||
# warning so we can compile successfully.
|
|
||||||
'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'NO',
|
|
||||||
'MACOSX_DEPLOYMENT_TARGET' : '10.8',
|
|
||||||
# SRWebSocket.m uses code with partial availability.
|
|
||||||
# https://code.google.com/p/webrtc/issues/detail?id=4695
|
|
||||||
'WARNING_CFLAGS!': ['-Wpartial-availability'],
|
|
||||||
},
|
|
||||||
}],
|
|
||||||
],
|
|
||||||
'direct_dependent_settings': {
|
|
||||||
'include_dirs': [
|
|
||||||
'examples/objc/AppRTCDemo/third_party/SocketRocket',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
'xcode_settings': {
|
|
||||||
'CLANG_ENABLE_OBJC_ARC': 'YES',
|
|
||||||
'WARNING_CFLAGS': [
|
|
||||||
'-Wno-deprecated-declarations',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
'link_settings': {
|
|
||||||
'xcode_settings': {
|
|
||||||
'OTHER_LDFLAGS': [
|
|
||||||
'-framework CFNetwork',
|
|
||||||
'-licucore',
|
|
||||||
],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}, # target socketrocket
|
|
||||||
], # targets
|
|
||||||
}], # OS=="ios" or (OS=="mac" and target_arch!="ia32" and mac_sdk>="10.8")
|
|
||||||
|
|
||||||
['OS=="android"', {
|
|
||||||
'targets': [
|
|
||||||
{
|
|
||||||
'target_name': 'AppRTCDemo',
|
|
||||||
'type': 'none',
|
|
||||||
'dependencies': [
|
|
||||||
'../talk/libjingle.gyp:libjingle_peerconnection_java',
|
|
||||||
],
|
|
||||||
'variables': {
|
|
||||||
'apk_name': 'AppRTCDemo',
|
|
||||||
'java_in_dir': 'examples/androidapp',
|
|
||||||
'has_java_resources': 1,
|
|
||||||
'resource_dir': 'examples/androidapp/res',
|
|
||||||
'R_package': 'org.appspot.apprtc',
|
|
||||||
'R_package_relpath': 'org/appspot/apprtc',
|
|
||||||
'input_jars_paths': [
|
|
||||||
'examples/androidapp/third_party/autobanh/autobanh.jar',
|
|
||||||
],
|
|
||||||
'library_dexed_jars_paths': [
|
|
||||||
'examples/androidapp/third_party/autobanh/autobanh.jar',
|
|
||||||
],
|
|
||||||
'native_lib_target': 'libjingle_peerconnection_so',
|
|
||||||
'add_to_dependents_classpaths':1,
|
|
||||||
},
|
|
||||||
'includes': [ '../build/java_apk.gypi' ],
|
|
||||||
}, # target AppRTCDemo
|
|
||||||
|
|
||||||
{
|
|
||||||
# AppRTCDemo creates a .jar as a side effect. Any java targets
|
|
||||||
# that need that .jar in their classpath should depend on this target,
|
|
||||||
# AppRTCDemo_apk. Dependents of AppRTCDemo_apk receive its
|
|
||||||
# jar path in the variable 'apk_output_jar_path'.
|
|
||||||
# This target should only be used by targets which instrument
|
|
||||||
# AppRTCDemo_apk.
|
|
||||||
'target_name': 'AppRTCDemo_apk',
|
|
||||||
'type': 'none',
|
|
||||||
'dependencies': [
|
|
||||||
'AppRTCDemo',
|
|
||||||
],
|
|
||||||
'includes': [ '../build/apk_fake_jar.gypi' ],
|
|
||||||
}, # target AppRTCDemo_apk
|
|
||||||
|
|
||||||
{
|
|
||||||
'target_name': 'AppRTCDemoTest',
|
|
||||||
'type': 'none',
|
|
||||||
'dependencies': [
|
|
||||||
'AppRTCDemo_apk',
|
|
||||||
],
|
|
||||||
'variables': {
|
|
||||||
'apk_name': 'AppRTCDemoTest',
|
|
||||||
'java_in_dir': 'examples/androidtests',
|
|
||||||
'is_test_apk': 1,
|
|
||||||
},
|
|
||||||
'includes': [ '../build/java_apk.gypi' ],
|
|
||||||
},
|
|
||||||
], # targets
|
|
||||||
}], # OS=="android"
|
|
||||||
],
|
|
||||||
}
|
|
||||||
462
webrtc/webrtc_examples.gyp
Normal file → Executable file
462
webrtc/webrtc_examples.gyp
Normal file → Executable file
@ -1,4 +1,4 @@
|
|||||||
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
# Copyright (c) 2012 The WebRTC Project Authors. All rights reserved.
|
||||||
#
|
#
|
||||||
# Use of this source code is governed by a BSD-style license
|
# Use of this source code is governed by a BSD-style license
|
||||||
# that can be found in the LICENSE file in the root of the source
|
# that can be found in the LICENSE file in the root of the source
|
||||||
@ -6,84 +6,402 @@
|
|||||||
# in the file PATENTS. All contributing project authors may
|
# in the file PATENTS. All contributing project authors may
|
||||||
# be found in the AUTHORS file in the root of the source tree.
|
# be found in the AUTHORS file in the root of the source tree.
|
||||||
{
|
{
|
||||||
'includes': ['build/common.gypi'],
|
'includes': [
|
||||||
'targets': [],
|
'../talk/build/common.gypi',
|
||||||
|
],
|
||||||
|
'targets': [
|
||||||
|
{
|
||||||
|
'target_name': 'relayserver',
|
||||||
|
'type': 'executable',
|
||||||
|
'dependencies': [
|
||||||
|
'../talk/libjingle.gyp:libjingle',
|
||||||
|
'../talk/libjingle.gyp:libjingle_p2p',
|
||||||
|
],
|
||||||
|
'sources': [
|
||||||
|
'examples/relayserver/relayserver_main.cc',
|
||||||
|
],
|
||||||
|
}, # target relayserver
|
||||||
|
{
|
||||||
|
'target_name': 'stunserver',
|
||||||
|
'type': 'executable',
|
||||||
|
'dependencies': [
|
||||||
|
'../talk/libjingle.gyp:libjingle',
|
||||||
|
'../talk/libjingle.gyp:libjingle_p2p',
|
||||||
|
],
|
||||||
|
'sources': [
|
||||||
|
'examples/stunserver/stunserver_main.cc',
|
||||||
|
],
|
||||||
|
}, # target stunserver
|
||||||
|
{
|
||||||
|
'target_name': 'turnserver',
|
||||||
|
'type': 'executable',
|
||||||
|
'dependencies': [
|
||||||
|
'../talk/libjingle.gyp:libjingle',
|
||||||
|
'../talk/libjingle.gyp:libjingle_p2p',
|
||||||
|
],
|
||||||
|
'sources': [
|
||||||
|
'examples/turnserver/turnserver_main.cc',
|
||||||
|
],
|
||||||
|
}, # target turnserver
|
||||||
|
{
|
||||||
|
'target_name': 'peerconnection_server',
|
||||||
|
'type': 'executable',
|
||||||
|
'sources': [
|
||||||
|
'examples/peerconnection/server/data_socket.cc',
|
||||||
|
'examples/peerconnection/server/data_socket.h',
|
||||||
|
'examples/peerconnection/server/main.cc',
|
||||||
|
'examples/peerconnection/server/peer_channel.cc',
|
||||||
|
'examples/peerconnection/server/peer_channel.h',
|
||||||
|
'examples/peerconnection/server/utils.cc',
|
||||||
|
'examples/peerconnection/server/utils.h',
|
||||||
|
],
|
||||||
|
'dependencies': [
|
||||||
|
'<(webrtc_root)/common.gyp:webrtc_common',
|
||||||
|
'../talk/libjingle.gyp:libjingle',
|
||||||
|
],
|
||||||
|
# TODO(ronghuawu): crbug.com/167187 fix size_t to int truncations.
|
||||||
|
'msvs_disabled_warnings': [ 4309, ],
|
||||||
|
}, # target peerconnection_server
|
||||||
|
],
|
||||||
'conditions': [
|
'conditions': [
|
||||||
|
['OS=="linux" or OS=="win"', {
|
||||||
|
'targets': [
|
||||||
|
{
|
||||||
|
'target_name': 'peerconnection_client',
|
||||||
|
'type': 'executable',
|
||||||
|
'sources': [
|
||||||
|
'examples/peerconnection/client/conductor.cc',
|
||||||
|
'examples/peerconnection/client/conductor.h',
|
||||||
|
'examples/peerconnection/client/defaults.cc',
|
||||||
|
'examples/peerconnection/client/defaults.h',
|
||||||
|
'examples/peerconnection/client/peer_connection_client.cc',
|
||||||
|
'examples/peerconnection/client/peer_connection_client.h',
|
||||||
|
],
|
||||||
|
'dependencies': [
|
||||||
|
'../talk/libjingle.gyp:libjingle_peerconnection',
|
||||||
|
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:field_trial_default',
|
||||||
|
'<@(libjingle_tests_additional_deps)',
|
||||||
|
],
|
||||||
|
'conditions': [
|
||||||
|
['build_json==1', {
|
||||||
|
'dependencies': [
|
||||||
|
'<(DEPTH)/third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
# TODO(ronghuawu): Move these files to a win/ directory then they
|
||||||
|
# can be excluded automatically.
|
||||||
|
['OS=="win"', {
|
||||||
|
'sources': [
|
||||||
|
'examples/peerconnection/client/flagdefs.h',
|
||||||
|
'examples/peerconnection/client/main.cc',
|
||||||
|
'examples/peerconnection/client/main_wnd.cc',
|
||||||
|
'examples/peerconnection/client/main_wnd.h',
|
||||||
|
],
|
||||||
|
'msvs_settings': {
|
||||||
|
'VCLinkerTool': {
|
||||||
|
'SubSystem': '2', # Windows
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}], # OS=="win"
|
||||||
|
['OS=="linux"', {
|
||||||
|
'sources': [
|
||||||
|
'examples/peerconnection/client/linux/main.cc',
|
||||||
|
'examples/peerconnection/client/linux/main_wnd.cc',
|
||||||
|
'examples/peerconnection/client/linux/main_wnd.h',
|
||||||
|
],
|
||||||
|
'cflags': [
|
||||||
|
'<!@(pkg-config --cflags glib-2.0 gobject-2.0 gtk+-2.0)',
|
||||||
|
],
|
||||||
|
'link_settings': {
|
||||||
|
'ldflags': [
|
||||||
|
'<!@(pkg-config --libs-only-L --libs-only-other glib-2.0'
|
||||||
|
' gobject-2.0 gthread-2.0 gtk+-2.0)',
|
||||||
|
],
|
||||||
|
'libraries': [
|
||||||
|
'<!@(pkg-config --libs-only-l glib-2.0 gobject-2.0'
|
||||||
|
' gthread-2.0 gtk+-2.0)',
|
||||||
|
'-lX11',
|
||||||
|
'-lXcomposite',
|
||||||
|
'-lXext',
|
||||||
|
'-lXrender',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}], # OS=="linux"
|
||||||
|
], # conditions
|
||||||
|
}, # target peerconnection_client
|
||||||
|
], # targets
|
||||||
|
}], # OS=="linux" or OS=="win"
|
||||||
|
|
||||||
|
['OS=="ios" or (OS=="mac" and target_arch!="ia32" and mac_sdk>="10.8")', {
|
||||||
|
'targets': [
|
||||||
|
{
|
||||||
|
'target_name': 'apprtc_common',
|
||||||
|
'type': 'static_library',
|
||||||
|
'dependencies': [
|
||||||
|
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:field_trial_default',
|
||||||
|
'../talk/libjingle.gyp:libjingle_peerconnection_objc',
|
||||||
|
],
|
||||||
|
'sources': [
|
||||||
|
'examples/objc/AppRTCDemo/common/ARDUtilities.h',
|
||||||
|
'examples/objc/AppRTCDemo/common/ARDUtilities.m',
|
||||||
|
],
|
||||||
|
'include_dirs': [
|
||||||
|
'examples/objc/AppRTCDemo/common',
|
||||||
|
],
|
||||||
|
'direct_dependent_settings': {
|
||||||
|
'include_dirs': [
|
||||||
|
'examples/objc/AppRTCDemo/common',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
'conditions': [
|
||||||
|
['OS=="mac"', {
|
||||||
|
'xcode_settings': {
|
||||||
|
'MACOSX_DEPLOYMENT_TARGET' : '10.8',
|
||||||
|
},
|
||||||
|
}],
|
||||||
|
],
|
||||||
|
'link_settings': {
|
||||||
|
'xcode_settings': {
|
||||||
|
'OTHER_LDFLAGS': [
|
||||||
|
'-framework QuartzCore',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'target_name': 'apprtc_signaling',
|
||||||
|
'type': 'static_library',
|
||||||
|
'dependencies': [
|
||||||
|
'apprtc_common',
|
||||||
|
'../talk/libjingle.gyp:libjingle_peerconnection_objc',
|
||||||
|
'socketrocket',
|
||||||
|
],
|
||||||
|
'sources': [
|
||||||
|
'examples/objc/AppRTCDemo/ARDAppClient.h',
|
||||||
|
'examples/objc/AppRTCDemo/ARDAppClient.m',
|
||||||
|
'examples/objc/AppRTCDemo/ARDAppClient+Internal.h',
|
||||||
|
'examples/objc/AppRTCDemo/ARDAppEngineClient.h',
|
||||||
|
'examples/objc/AppRTCDemo/ARDAppEngineClient.m',
|
||||||
|
'examples/objc/AppRTCDemo/ARDBitrateTracker.h',
|
||||||
|
'examples/objc/AppRTCDemo/ARDBitrateTracker.m',
|
||||||
|
'examples/objc/AppRTCDemo/ARDCEODTURNClient.h',
|
||||||
|
'examples/objc/AppRTCDemo/ARDCEODTURNClient.m',
|
||||||
|
'examples/objc/AppRTCDemo/ARDJoinResponse.h',
|
||||||
|
'examples/objc/AppRTCDemo/ARDJoinResponse.m',
|
||||||
|
'examples/objc/AppRTCDemo/ARDJoinResponse+Internal.h',
|
||||||
|
'examples/objc/AppRTCDemo/ARDMessageResponse.h',
|
||||||
|
'examples/objc/AppRTCDemo/ARDMessageResponse.m',
|
||||||
|
'examples/objc/AppRTCDemo/ARDMessageResponse+Internal.h',
|
||||||
|
'examples/objc/AppRTCDemo/ARDRoomServerClient.h',
|
||||||
|
'examples/objc/AppRTCDemo/ARDSDPUtils.h',
|
||||||
|
'examples/objc/AppRTCDemo/ARDSDPUtils.m',
|
||||||
|
'examples/objc/AppRTCDemo/ARDSignalingChannel.h',
|
||||||
|
'examples/objc/AppRTCDemo/ARDSignalingMessage.h',
|
||||||
|
'examples/objc/AppRTCDemo/ARDSignalingMessage.m',
|
||||||
|
'examples/objc/AppRTCDemo/ARDStatsBuilder.h',
|
||||||
|
'examples/objc/AppRTCDemo/ARDStatsBuilder.m',
|
||||||
|
'examples/objc/AppRTCDemo/ARDTURNClient.h',
|
||||||
|
'examples/objc/AppRTCDemo/ARDWebSocketChannel.h',
|
||||||
|
'examples/objc/AppRTCDemo/ARDWebSocketChannel.m',
|
||||||
|
'examples/objc/AppRTCDemo/RTCICECandidate+JSON.h',
|
||||||
|
'examples/objc/AppRTCDemo/RTCICECandidate+JSON.m',
|
||||||
|
'examples/objc/AppRTCDemo/RTCICEServer+JSON.h',
|
||||||
|
'examples/objc/AppRTCDemo/RTCICEServer+JSON.m',
|
||||||
|
'examples/objc/AppRTCDemo/RTCMediaConstraints+JSON.h',
|
||||||
|
'examples/objc/AppRTCDemo/RTCMediaConstraints+JSON.m',
|
||||||
|
'examples/objc/AppRTCDemo/RTCSessionDescription+JSON.h',
|
||||||
|
'examples/objc/AppRTCDemo/RTCSessionDescription+JSON.m',
|
||||||
|
],
|
||||||
|
'include_dirs': [
|
||||||
|
'examples/objc/AppRTCDemo',
|
||||||
|
],
|
||||||
|
'direct_dependent_settings': {
|
||||||
|
'include_dirs': [
|
||||||
|
'examples/objc/AppRTCDemo',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
'export_dependent_settings': [
|
||||||
|
'../talk/libjingle.gyp:libjingle_peerconnection_objc',
|
||||||
|
],
|
||||||
|
'conditions': [
|
||||||
|
['OS=="mac"', {
|
||||||
|
'xcode_settings': {
|
||||||
|
'MACOSX_DEPLOYMENT_TARGET' : '10.8',
|
||||||
|
},
|
||||||
|
}],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'target_name': 'AppRTCDemo',
|
||||||
|
'type': 'executable',
|
||||||
|
'product_name': 'AppRTCDemo',
|
||||||
|
'mac_bundle': 1,
|
||||||
|
'dependencies': [
|
||||||
|
'apprtc_common',
|
||||||
|
'apprtc_signaling',
|
||||||
|
],
|
||||||
|
'conditions': [
|
||||||
|
['OS=="ios"', {
|
||||||
|
'mac_bundle_resources': [
|
||||||
|
'examples/objc/AppRTCDemo/ios/resources/iPhone5@2x.png',
|
||||||
|
'examples/objc/AppRTCDemo/ios/resources/iPhone6@2x.png',
|
||||||
|
'examples/objc/AppRTCDemo/ios/resources/iPhone6p@3x.png',
|
||||||
|
'examples/objc/AppRTCDemo/ios/resources/Roboto-Regular.ttf',
|
||||||
|
'examples/objc/AppRTCDemo/ios/resources/ic_call_end_black_24dp.png',
|
||||||
|
'examples/objc/AppRTCDemo/ios/resources/ic_call_end_black_24dp@2x.png',
|
||||||
|
'examples/objc/AppRTCDemo/ios/resources/ic_clear_black_24dp.png',
|
||||||
|
'examples/objc/AppRTCDemo/ios/resources/ic_clear_black_24dp@2x.png',
|
||||||
|
'examples/objc/AppRTCDemo/ios/resources/ic_switch_video_black_24dp.png',
|
||||||
|
'examples/objc/AppRTCDemo/ios/resources/ic_switch_video_black_24dp@2x.png',
|
||||||
|
'examples/objc/Icon.png',
|
||||||
|
],
|
||||||
|
'sources': [
|
||||||
|
'examples/objc/AppRTCDemo/ios/ARDAppDelegate.h',
|
||||||
|
'examples/objc/AppRTCDemo/ios/ARDAppDelegate.m',
|
||||||
|
'examples/objc/AppRTCDemo/ios/ARDMainView.h',
|
||||||
|
'examples/objc/AppRTCDemo/ios/ARDMainView.m',
|
||||||
|
'examples/objc/AppRTCDemo/ios/ARDMainViewController.h',
|
||||||
|
'examples/objc/AppRTCDemo/ios/ARDMainViewController.m',
|
||||||
|
'examples/objc/AppRTCDemo/ios/ARDStatsView.h',
|
||||||
|
'examples/objc/AppRTCDemo/ios/ARDStatsView.m',
|
||||||
|
'examples/objc/AppRTCDemo/ios/ARDVideoCallView.h',
|
||||||
|
'examples/objc/AppRTCDemo/ios/ARDVideoCallView.m',
|
||||||
|
'examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.h',
|
||||||
|
'examples/objc/AppRTCDemo/ios/ARDVideoCallViewController.m',
|
||||||
|
'examples/objc/AppRTCDemo/ios/AppRTCDemo-Prefix.pch',
|
||||||
|
'examples/objc/AppRTCDemo/ios/UIImage+ARDUtilities.h',
|
||||||
|
'examples/objc/AppRTCDemo/ios/UIImage+ARDUtilities.m',
|
||||||
|
'examples/objc/AppRTCDemo/ios/main.m',
|
||||||
|
],
|
||||||
|
'xcode_settings': {
|
||||||
|
'INFOPLIST_FILE': 'examples/objc/AppRTCDemo/ios/Info.plist',
|
||||||
|
},
|
||||||
|
}],
|
||||||
|
['OS=="mac"', {
|
||||||
|
'sources': [
|
||||||
|
'examples/objc/AppRTCDemo/mac/APPRTCAppDelegate.h',
|
||||||
|
'examples/objc/AppRTCDemo/mac/APPRTCAppDelegate.m',
|
||||||
|
'examples/objc/AppRTCDemo/mac/APPRTCViewController.h',
|
||||||
|
'examples/objc/AppRTCDemo/mac/APPRTCViewController.m',
|
||||||
|
'examples/objc/AppRTCDemo/mac/main.m',
|
||||||
|
],
|
||||||
|
'xcode_settings': {
|
||||||
|
'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'NO',
|
||||||
|
'INFOPLIST_FILE': 'examples/objc/AppRTCDemo/mac/Info.plist',
|
||||||
|
'MACOSX_DEPLOYMENT_TARGET' : '10.8',
|
||||||
|
'OTHER_LDFLAGS': [
|
||||||
|
'-framework AVFoundation',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}],
|
||||||
|
['target_arch=="ia32"', {
|
||||||
|
'dependencies' : [
|
||||||
|
'<(DEPTH)/testing/iossim/iossim.gyp:iossim#host',
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
],
|
||||||
|
}, # target AppRTCDemo
|
||||||
|
{
|
||||||
|
# TODO(tkchin): move this into the real third party location and
|
||||||
|
# have it mirrored on chrome infra.
|
||||||
|
'target_name': 'socketrocket',
|
||||||
|
'type': 'static_library',
|
||||||
|
'sources': [
|
||||||
|
'examples/objc/AppRTCDemo/third_party/SocketRocket/SRWebSocket.h',
|
||||||
|
'examples/objc/AppRTCDemo/third_party/SocketRocket/SRWebSocket.m',
|
||||||
|
],
|
||||||
|
'conditions': [
|
||||||
|
['OS=="mac"', {
|
||||||
|
'xcode_settings': {
|
||||||
|
# SocketRocket autosynthesizes some properties. Disable the
|
||||||
|
# warning so we can compile successfully.
|
||||||
|
'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'NO',
|
||||||
|
'MACOSX_DEPLOYMENT_TARGET' : '10.8',
|
||||||
|
# SRWebSocket.m uses code with partial availability.
|
||||||
|
# https://code.google.com/p/webrtc/issues/detail?id=4695
|
||||||
|
'WARNING_CFLAGS!': ['-Wpartial-availability'],
|
||||||
|
},
|
||||||
|
}],
|
||||||
|
],
|
||||||
|
'direct_dependent_settings': {
|
||||||
|
'include_dirs': [
|
||||||
|
'examples/objc/AppRTCDemo/third_party/SocketRocket',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
'xcode_settings': {
|
||||||
|
'CLANG_ENABLE_OBJC_ARC': 'YES',
|
||||||
|
'WARNING_CFLAGS': [
|
||||||
|
'-Wno-deprecated-declarations',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
'link_settings': {
|
||||||
|
'xcode_settings': {
|
||||||
|
'OTHER_LDFLAGS': [
|
||||||
|
'-framework CFNetwork',
|
||||||
|
'-licucore',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}, # target socketrocket
|
||||||
|
], # targets
|
||||||
|
}], # OS=="ios" or (OS=="mac" and target_arch!="ia32" and mac_sdk>="10.8")
|
||||||
|
|
||||||
['OS=="android"', {
|
['OS=="android"', {
|
||||||
'targets': [
|
'targets': [
|
||||||
{
|
{
|
||||||
'target_name': 'libwebrtcdemo-jni',
|
'target_name': 'AppRTCDemo',
|
||||||
'type': 'loadable_module',
|
|
||||||
'dependencies': [
|
|
||||||
'<(webrtc_root)/common.gyp:webrtc_common',
|
|
||||||
'<(webrtc_root)/modules/modules.gyp:video_render_module_internal_impl',
|
|
||||||
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers_default',
|
|
||||||
'<(webrtc_root)/test/test.gyp:channel_transport',
|
|
||||||
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine',
|
|
||||||
],
|
|
||||||
'sources': [
|
|
||||||
'examples/android/media_demo/jni/jni_helpers.cc',
|
|
||||||
'examples/android/media_demo/jni/on_load.cc',
|
|
||||||
'examples/android/media_demo/jni/voice_engine_jni.cc',
|
|
||||||
],
|
|
||||||
'variables': {
|
|
||||||
# This library uses native JNI exports; tell GYP so that the
|
|
||||||
# required symbols will be kept.
|
|
||||||
'use_native_jni_exports': 1,
|
|
||||||
},
|
|
||||||
'link_settings': {
|
|
||||||
'libraries': [
|
|
||||||
'-llog',
|
|
||||||
'-lGLESv2',
|
|
||||||
'-lOpenSLES',
|
|
||||||
],
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'target_name': 'WebRTCDemo',
|
|
||||||
'type': 'none',
|
'type': 'none',
|
||||||
'dependencies': [
|
'dependencies': [
|
||||||
'libwebrtcdemo-jni',
|
'../talk/libjingle.gyp:libjingle_peerconnection_java',
|
||||||
'<(modules_java_gyp_path):*',
|
|
||||||
],
|
|
||||||
'actions': [
|
|
||||||
{
|
|
||||||
# TODO(yujie.mao): Convert building of the demo to a proper GYP
|
|
||||||
# target so this action is not needed once chromium's
|
|
||||||
# apk-building machinery can be used. (crbug.com/225101)
|
|
||||||
'action_name': 'build_webrtcdemo_apk',
|
|
||||||
'variables': {
|
|
||||||
'android_webrtc_demo_root': '<(webrtc_root)/examples/android/media_demo',
|
|
||||||
'ant_log': '../../../<(INTERMEDIATE_DIR)/ant.log', # ../../.. to compensate for the cd below.
|
|
||||||
},
|
|
||||||
'inputs' : [
|
|
||||||
'<(PRODUCT_DIR)/lib.java/audio_device_module_java.jar',
|
|
||||||
'<(PRODUCT_DIR)/libwebrtcdemo-jni.so',
|
|
||||||
'<!@(find <(android_webrtc_demo_root)/src -name "*.java")',
|
|
||||||
'<!@(find <(android_webrtc_demo_root)/res -type f)',
|
|
||||||
'<(android_webrtc_demo_root)/AndroidManifest.xml',
|
|
||||||
'<(android_webrtc_demo_root)/build.xml',
|
|
||||||
'<(android_webrtc_demo_root)/project.properties',
|
|
||||||
],
|
|
||||||
'outputs': ['<(PRODUCT_DIR)/WebRTCDemo-debug.apk'],
|
|
||||||
'action': [
|
|
||||||
'bash', '-ec',
|
|
||||||
'rm -fr <(_outputs) <(android_webrtc_demo_root)/{bin,libs,gen,obj} && '
|
|
||||||
'mkdir -p <(INTERMEDIATE_DIR) && ' # Must happen _before_ the cd below
|
|
||||||
'mkdir -p <(android_webrtc_demo_root)/libs/<(android_app_abi) && '
|
|
||||||
'cp <(PRODUCT_DIR)/lib.java/audio_device_module_java.jar <(android_webrtc_demo_root)/libs/ &&'
|
|
||||||
'<(android_strip) -o <(android_webrtc_demo_root)/libs/<(android_app_abi)/libwebrtcdemo-jni.so <(PRODUCT_DIR)/libwebrtcdemo-jni.so && '
|
|
||||||
'cd <(android_webrtc_demo_root) && '
|
|
||||||
'{ ANDROID_SDK_ROOT=<(android_sdk_root) '
|
|
||||||
'ant debug > <(ant_log) 2>&1 || '
|
|
||||||
' { cat <(ant_log) ; exit 1; } } && '
|
|
||||||
'cd - > /dev/null && '
|
|
||||||
'cp <(android_webrtc_demo_root)/bin/WebRTCDemo-debug.apk <(_outputs)'
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
],
|
||||||
|
'variables': {
|
||||||
|
'apk_name': 'AppRTCDemo',
|
||||||
|
'java_in_dir': 'examples/androidapp',
|
||||||
|
'has_java_resources': 1,
|
||||||
|
'resource_dir': 'examples/androidapp/res',
|
||||||
|
'R_package': 'org.appspot.apprtc',
|
||||||
|
'R_package_relpath': 'org/appspot/apprtc',
|
||||||
|
'input_jars_paths': [
|
||||||
|
'examples/androidapp/third_party/autobanh/autobanh.jar',
|
||||||
|
],
|
||||||
|
'library_dexed_jars_paths': [
|
||||||
|
'examples/androidapp/third_party/autobanh/autobanh.jar',
|
||||||
|
],
|
||||||
|
'native_lib_target': 'libjingle_peerconnection_so',
|
||||||
|
'add_to_dependents_classpaths':1,
|
||||||
|
},
|
||||||
|
'includes': [ '../build/java_apk.gypi' ],
|
||||||
|
}, # target AppRTCDemo
|
||||||
|
|
||||||
|
{
|
||||||
|
# AppRTCDemo creates a .jar as a side effect. Any java targets
|
||||||
|
# that need that .jar in their classpath should depend on this target,
|
||||||
|
# AppRTCDemo_apk. Dependents of AppRTCDemo_apk receive its
|
||||||
|
# jar path in the variable 'apk_output_jar_path'.
|
||||||
|
# This target should only be used by targets which instrument
|
||||||
|
# AppRTCDemo_apk.
|
||||||
|
'target_name': 'AppRTCDemo_apk',
|
||||||
|
'type': 'none',
|
||||||
|
'dependencies': [
|
||||||
|
'AppRTCDemo',
|
||||||
|
],
|
||||||
|
'includes': [ '../build/apk_fake_jar.gypi' ],
|
||||||
|
}, # target AppRTCDemo_apk
|
||||||
|
|
||||||
|
{
|
||||||
|
'target_name': 'AppRTCDemoTest',
|
||||||
|
'type': 'none',
|
||||||
|
'dependencies': [
|
||||||
|
'AppRTCDemo_apk',
|
||||||
|
],
|
||||||
|
'variables': {
|
||||||
|
'apk_name': 'AppRTCDemoTest',
|
||||||
|
'java_in_dir': 'examples/androidtests',
|
||||||
|
'is_test_apk': 1,
|
||||||
|
},
|
||||||
|
'includes': [ '../build/java_apk.gypi' ],
|
||||||
},
|
},
|
||||||
],
|
], # targets
|
||||||
}],
|
}], # OS=="android"
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user