Revert of Delete video_render module. (patchset #12 id:220001 of https://codereview.webrtc.org/1912143002/ )

Reason for revert:
This breaks every buildbot in chromium.webrtc.fyi and I don't see any roll in progress to address this (and I don't see how that would be possible either).
Usage in Chrome: https://code.google.com/p/chromium/codesearch#search/&q=modules.gyp%3Avideo_render&sq=package:chromium&type=cs

Example failures:
https://build.chromium.org/p/chromium.webrtc.fyi/builders/Linux%20Builder/builds/5420
https://build.chromium.org/p/chromium.webrtc.fyi/builders/Win%20Builder/builds/4526

I think it's fine to delete our video_render_module_internal_impl target and those files, but video_render target needs to remain.

Original issue's description:
> Delete video_render module.
>
> BUG=webrtc:5817
>
> Committed: https://crrev.com/97cfd1ec05d07ef233356e57f7aa4b028b74ffba
> Cr-Commit-Position: refs/heads/master@{#12526}

TBR=mflodman@webrtc.org,pbos@webrtc.org,nisse@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=webrtc:5817

Review-Url: https://codereview.webrtc.org/1923613003
Cr-Commit-Position: refs/heads/master@{#12534}
This commit is contained in:
kjellander 2016-04-27 08:56:50 -07:00 committed by Commit bot
parent 6353723b02
commit 0190367cea
79 changed files with 15144 additions and 4 deletions

View File

@ -157,8 +157,6 @@
'OTHER_LDFLAGS': [
'-framework CoreGraphics',
'-framework GLKit',
'-framework OpenGLES',
'-framework QuartzCore',
],
},
},

View File

@ -204,7 +204,10 @@ source_set("webrtc") {
]
if (build_with_chromium) {
deps += [ "modules/video_capture" ]
deps += [
"modules/video_capture",
"modules/video_render",
]
}
if (rtc_enable_protobuf) {
@ -219,6 +222,7 @@ if (!build_with_chromium) {
deps = [
":webrtc",
"modules/video_capture:video_capture_internal_impl",
"modules/video_render:video_render_internal_impl",
"test",
]
}

View File

@ -115,7 +115,7 @@
'java/android',
'<(webrtc_base_dir)/java/src',
'<(webrtc_modules_dir)/audio_device/android/java/src',
'<(webrtc_modules_dir)/video_render/android/java/src',
],
},
'includes': ['../../build/java.gypi'],

View File

@ -70,6 +70,7 @@
#include "webrtc/media/devices/videorendererfactory.h"
#include "webrtc/media/engine/webrtcvideodecoderfactory.h"
#include "webrtc/media/engine/webrtcvideoencoderfactory.h"
#include "webrtc/modules/video_render/video_render_internal.h"
#include "webrtc/system_wrappers/include/field_trial_default.h"
#include "webrtc/system_wrappers/include/logcat_trace_context.h"
#include "webrtc/system_wrappers/include/trace.h"

View File

@ -15,6 +15,7 @@
'dependencies': [
'<(webrtc_root)/base/base.gyp:rtc_base_approved',
'<(webrtc_root)/common.gyp:webrtc_common',
'<(webrtc_root)/modules/modules.gyp:video_render_module',
'<(webrtc_root)/webrtc.gyp:webrtc',
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:metrics_default',
@ -132,6 +133,7 @@
['build_with_chromium==1', {
'dependencies': [
'<(webrtc_root)/modules/modules.gyp:video_capture',
'<(webrtc_root)/modules/modules.gyp:video_render',
],
}, {
'defines': [
@ -146,6 +148,7 @@
},
'dependencies': [
'<(webrtc_root)/modules/modules.gyp:video_capture_module_internal_impl',
'<(webrtc_root)/modules/modules.gyp:video_render_module_internal_impl',
],
}],
['OS=="linux" and use_gtk==1', {

View File

@ -26,6 +26,7 @@
'video_coding/video_coding.gypi',
'video_capture/video_capture.gypi',
'video_processing/video_processing.gypi',
'video_render/video_render.gypi',
],
'conditions': [
['include_tests==1', {
@ -780,6 +781,19 @@
'modules_unittests.isolate',
],
},
{
'target_name': 'video_render_tests_run',
'type': 'none',
'dependencies': [
'video_render_tests',
],
'includes': [
'../build/isolate.gypi',
],
'sources': [
'video_render_tests.isolate',
],
},
],
}],
],

View File

@ -18,5 +18,14 @@
'includes': [ '../../build/java.gypi' ],
}, # audio_device_module_java
{
'target_name': 'video_render_module_java',
'type': 'none',
'variables': {
'java_in_dir': 'video_render/android/java',
'additional_src_dirs': [ '../base/java/src', ],
},
'includes': [ '../../build/java.gypi' ],
}, # video_render_module_java
],
}

View File

@ -16,5 +16,13 @@
},
'includes': [ '../../../build/java.gypi' ],
}, # audio_device_module_java
{
'target_name': 'video_render_module_java',
'type': 'none',
'variables': {
'java_in_dir': 'video_render/android/java',
},
'includes': [ '../../../build/java.gypi' ],
}, # video_render_module_java
],
}

View File

@ -0,0 +1,178 @@
# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import("../../build/webrtc.gni")
source_set("video_render_module") {
sources = [
"external/video_render_external_impl.cc",
"external/video_render_external_impl.h",
"i_video_render.h",
"video_render.h",
"video_render_defines.h",
"video_render_impl.h",
]
deps = [
"../..:webrtc_common",
"../../common_video",
"../../system_wrappers",
"../utility",
]
configs += [ "../..:common_config" ]
public_configs = [ "../..:common_inherited_config" ]
if (is_clang) {
# Suppress warnings from Chrome's Clang plugins.
# See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
configs -= [ "//build/config/clang:find_bad_constructs" ]
}
}
source_set("video_render") {
sources = [
"video_render_impl.cc",
]
deps = [
":video_render_module",
"../../system_wrappers",
]
configs += [ "../..:common_config" ]
public_configs = [ "../..:common_inherited_config" ]
if (is_clang) {
# Suppress warnings from Chrome's Clang plugins.
# See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
configs -= [ "//build/config/clang:find_bad_constructs" ]
}
}
if (!build_with_chromium) {
config("video_render_internal_impl_config") {
if (is_ios) {
libs = [
"OpenGLES.framework",
"QuartzCore.framework",
]
}
}
source_set("video_render_internal_impl") {
libs = []
sources = [
"video_render_internal_impl.cc",
]
deps = [
":video_render_module",
"../../system_wrappers",
]
if (is_linux) {
sources += [
"linux/video_render_linux_impl.cc",
"linux/video_render_linux_impl.h",
"linux/video_x11_channel.cc",
"linux/video_x11_channel.h",
"linux/video_x11_render.cc",
"linux/video_x11_render.h",
]
deps += [ "../..:webrtc_common" ]
libs += [ "Xext" ]
}
if (is_mac) {
sources += [
"mac/cocoa_full_screen_window.h",
"mac/cocoa_full_screen_window.mm",
"mac/cocoa_render_view.h",
"mac/cocoa_render_view.mm",
"mac/video_render_agl.cc",
"mac/video_render_agl.h",
"mac/video_render_mac_carbon_impl.cc",
"mac/video_render_mac_carbon_impl.h",
"mac/video_render_mac_cocoa_impl.h",
"mac/video_render_mac_cocoa_impl.mm",
"mac/video_render_nsopengl.h",
"mac/video_render_nsopengl.mm",
]
libs += [
"CoreVideo.framework",
"QTKit.framework",
]
}
if (is_win) {
sources += [
"windows/i_video_render_win.h",
"windows/video_render_direct3d9.cc",
"windows/video_render_direct3d9.h",
"windows/video_render_windows_impl.cc",
"windows/video_render_windows_impl.h",
]
directxsdk_exists =
exec_script("//build/dir_exists.py",
[ rebase_path("//third_party/directxsdk/files",
root_build_dir) ],
"trim string") == "True"
if (directxsdk_exists) {
directxsdk_path = "//third_party/directxsdk/files"
} else {
directxsdk_path =
exec_script("../../build/find_directx_sdk.py", [], "trim string")
}
include_dirs = [ directxsdk_path + "/Include" ]
}
if (is_android) {
sources += [
"android/video_render_android_impl.cc",
"android/video_render_android_impl.h",
"android/video_render_android_native_opengl2.cc",
"android/video_render_android_native_opengl2.h",
"android/video_render_android_surface_view.cc",
"android/video_render_android_surface_view.h",
"android/video_render_opengles20.cc",
"android/video_render_opengles20.h",
]
libs += [ "GLESv2" ]
}
if (is_ios) {
sources += [
"ios/open_gles20.h",
"ios/open_gles20.mm",
"ios/video_render_ios_channel.h",
"ios/video_render_ios_channel.mm",
"ios/video_render_ios_gles20.h",
"ios/video_render_ios_gles20.mm",
"ios/video_render_ios_impl.h",
"ios/video_render_ios_impl.mm",
"ios/video_render_ios_view.h",
"ios/video_render_ios_view.mm",
]
deps += [ "../..:webrtc_common" ]
cflags = [ "-fobjc-arc" ] # CLANG_ENABLE_OBJC_ARC = YES.
}
all_dependent_configs = [ ":video_render_internal_impl_config" ]
configs += [ "../..:common_config" ]
public_configs = [ "../..:common_inherited_config" ]
if (is_clang) {
# Suppress warnings from Chrome's Clang plugins.
# See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
configs -= [ "//build/config/clang:find_bad_constructs" ]
}
}
}

View File

@ -0,0 +1,5 @@
include_rules = [
"+webrtc/base",
"+webrtc/common_video",
"+webrtc/system_wrappers",
]

View File

@ -0,0 +1,12 @@
mflodman@webrtc.org
perkj@webrtc.org
tkchin@webrtc.org
per-file *.isolate=kjellander@webrtc.org
# These are for the common case of adding or renaming files. If you're doing
# structural changes, please get a review from a reviewer in this file.
per-file *.gyp=*
per-file *.gypi=*
per-file BUILD.gn=kjellander@webrtc.org

View File

@ -0,0 +1,371 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.videoengine;
import java.util.concurrent.locks.ReentrantLock;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.opengles.GL10;
import android.app.ActivityManager;
import android.content.Context;
import android.content.pm.ConfigurationInfo;
import android.graphics.PixelFormat;
import android.opengl.GLSurfaceView;
import org.webrtc.Logging;
public class ViEAndroidGLES20 extends GLSurfaceView
implements GLSurfaceView.Renderer {
private static String TAG = "WEBRTC-JR";
private static final boolean DEBUG = false;
// True if onSurfaceCreated has been called.
private boolean surfaceCreated = false;
private boolean openGLCreated = false;
// True if NativeFunctionsRegistered has been called.
private boolean nativeFunctionsRegisted = false;
private ReentrantLock nativeFunctionLock = new ReentrantLock();
// Address of Native object that will do the drawing.
private long nativeObject = 0;
private int viewWidth = 0;
private int viewHeight = 0;
public static boolean UseOpenGL2(Object renderWindow) {
return ViEAndroidGLES20.class.isInstance(renderWindow);
}
public ViEAndroidGLES20(Context context) {
super(context);
init(false, 0, 0);
}
public ViEAndroidGLES20(Context context, boolean translucent,
int depth, int stencil) {
super(context);
init(translucent, depth, stencil);
}
private void init(boolean translucent, int depth, int stencil) {
// By default, GLSurfaceView() creates a RGB_565 opaque surface.
// If we want a translucent one, we should change the surface's
// format here, using PixelFormat.TRANSLUCENT for GL Surfaces
// is interpreted as any 32-bit surface with alpha by SurfaceFlinger.
if (translucent) {
this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
}
// Setup the context factory for 2.0 rendering.
// See ContextFactory class definition below
setEGLContextFactory(new ContextFactory());
// We need to choose an EGLConfig that matches the format of
// our surface exactly. This is going to be done in our
// custom config chooser. See ConfigChooser class definition
// below.
setEGLConfigChooser( translucent ?
new ConfigChooser(8, 8, 8, 8, depth, stencil) :
new ConfigChooser(5, 6, 5, 0, depth, stencil) );
// Set the renderer responsible for frame rendering
this.setRenderer(this);
this.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
private static class ContextFactory implements GLSurfaceView.EGLContextFactory {
private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
Logging.w(TAG, "creating OpenGL ES 2.0 context");
checkEglError("Before eglCreateContext", egl);
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
EGLContext context = egl.eglCreateContext(display, eglConfig,
EGL10.EGL_NO_CONTEXT, attrib_list);
checkEglError("After eglCreateContext", egl);
return context;
}
public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
egl.eglDestroyContext(display, context);
}
}
private static void checkEglError(String prompt, EGL10 egl) {
int error;
while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
Logging.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error));
}
}
private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser {
public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
mRedSize = r;
mGreenSize = g;
mBlueSize = b;
mAlphaSize = a;
mDepthSize = depth;
mStencilSize = stencil;
}
// This EGL config specification is used to specify 2.0 rendering.
// We use a minimum size of 4 bits for red/green/blue, but will
// perform actual matching in chooseConfig() below.
private static int EGL_OPENGL_ES2_BIT = 4;
private static int[] s_configAttribs2 =
{
EGL10.EGL_RED_SIZE, 4,
EGL10.EGL_GREEN_SIZE, 4,
EGL10.EGL_BLUE_SIZE, 4,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
// Get the number of minimally matching EGL configurations
int[] num_config = new int[1];
egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
int numConfigs = num_config[0];
if (numConfigs <= 0) {
throw new IllegalArgumentException("No configs match configSpec");
}
// Allocate then read the array of minimally matching EGL configs
EGLConfig[] configs = new EGLConfig[numConfigs];
egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config);
if (DEBUG) {
printConfigs(egl, display, configs);
}
// Now return the "best" one
return chooseConfig(egl, display, configs);
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
for(EGLConfig config : configs) {
int d = findConfigAttrib(egl, display, config,
EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config,
EGL10.EGL_STENCIL_SIZE, 0);
// We need at least mDepthSize and mStencilSize bits
if (d < mDepthSize || s < mStencilSize)
continue;
// We want an *exact* match for red/green/blue/alpha
int r = findConfigAttrib(egl, display, config,
EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config,
EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config,
EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config,
EGL10.EGL_ALPHA_SIZE, 0);
if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize)
return config;
}
return null;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute, int defaultValue) {
if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
return mValue[0];
}
return defaultValue;
}
private void printConfigs(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
int numConfigs = configs.length;
Logging.w(TAG, String.format("%d configurations", numConfigs));
for (int i = 0; i < numConfigs; i++) {
Logging.w(TAG, String.format("Configuration %d:\n", i));
printConfig(egl, display, configs[i]);
}
}
private void printConfig(EGL10 egl, EGLDisplay display,
EGLConfig config) {
int[] attributes = {
EGL10.EGL_BUFFER_SIZE,
EGL10.EGL_ALPHA_SIZE,
EGL10.EGL_BLUE_SIZE,
EGL10.EGL_GREEN_SIZE,
EGL10.EGL_RED_SIZE,
EGL10.EGL_DEPTH_SIZE,
EGL10.EGL_STENCIL_SIZE,
EGL10.EGL_CONFIG_CAVEAT,
EGL10.EGL_CONFIG_ID,
EGL10.EGL_LEVEL,
EGL10.EGL_MAX_PBUFFER_HEIGHT,
EGL10.EGL_MAX_PBUFFER_PIXELS,
EGL10.EGL_MAX_PBUFFER_WIDTH,
EGL10.EGL_NATIVE_RENDERABLE,
EGL10.EGL_NATIVE_VISUAL_ID,
EGL10.EGL_NATIVE_VISUAL_TYPE,
0x3030, // EGL10.EGL_PRESERVED_RESOURCES,
EGL10.EGL_SAMPLES,
EGL10.EGL_SAMPLE_BUFFERS,
EGL10.EGL_SURFACE_TYPE,
EGL10.EGL_TRANSPARENT_TYPE,
EGL10.EGL_TRANSPARENT_RED_VALUE,
EGL10.EGL_TRANSPARENT_GREEN_VALUE,
EGL10.EGL_TRANSPARENT_BLUE_VALUE,
0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB,
0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA,
0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL,
0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL,
EGL10.EGL_LUMINANCE_SIZE,
EGL10.EGL_ALPHA_MASK_SIZE,
EGL10.EGL_COLOR_BUFFER_TYPE,
EGL10.EGL_RENDERABLE_TYPE,
0x3042 // EGL10.EGL_CONFORMANT
};
String[] names = {
"EGL_BUFFER_SIZE",
"EGL_ALPHA_SIZE",
"EGL_BLUE_SIZE",
"EGL_GREEN_SIZE",
"EGL_RED_SIZE",
"EGL_DEPTH_SIZE",
"EGL_STENCIL_SIZE",
"EGL_CONFIG_CAVEAT",
"EGL_CONFIG_ID",
"EGL_LEVEL",
"EGL_MAX_PBUFFER_HEIGHT",
"EGL_MAX_PBUFFER_PIXELS",
"EGL_MAX_PBUFFER_WIDTH",
"EGL_NATIVE_RENDERABLE",
"EGL_NATIVE_VISUAL_ID",
"EGL_NATIVE_VISUAL_TYPE",
"EGL_PRESERVED_RESOURCES",
"EGL_SAMPLES",
"EGL_SAMPLE_BUFFERS",
"EGL_SURFACE_TYPE",
"EGL_TRANSPARENT_TYPE",
"EGL_TRANSPARENT_RED_VALUE",
"EGL_TRANSPARENT_GREEN_VALUE",
"EGL_TRANSPARENT_BLUE_VALUE",
"EGL_BIND_TO_TEXTURE_RGB",
"EGL_BIND_TO_TEXTURE_RGBA",
"EGL_MIN_SWAP_INTERVAL",
"EGL_MAX_SWAP_INTERVAL",
"EGL_LUMINANCE_SIZE",
"EGL_ALPHA_MASK_SIZE",
"EGL_COLOR_BUFFER_TYPE",
"EGL_RENDERABLE_TYPE",
"EGL_CONFORMANT"
};
int[] value = new int[1];
for (int i = 0; i < attributes.length; i++) {
int attribute = attributes[i];
String name = names[i];
if (egl.eglGetConfigAttrib(display, config, attribute, value)) {
Logging.w(TAG, String.format(" %s: %d\n", name, value[0]));
} else {
// Logging.w(TAG, String.format(" %s: failed\n", name));
while (egl.eglGetError() != EGL10.EGL_SUCCESS);
}
}
}
// Subclasses can adjust these values:
protected int mRedSize;
protected int mGreenSize;
protected int mBlueSize;
protected int mAlphaSize;
protected int mDepthSize;
protected int mStencilSize;
private int[] mValue = new int[1];
}
// IsSupported
// Return true if this device support Open GL ES 2.0 rendering.
public static boolean IsSupported(Context context) {
ActivityManager am =
(ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
ConfigurationInfo info = am.getDeviceConfigurationInfo();
if(info.reqGlEsVersion >= 0x20000) {
// Open GL ES 2.0 is supported.
return true;
}
return false;
}
public void onDrawFrame(GL10 gl) {
nativeFunctionLock.lock();
if(!nativeFunctionsRegisted || !surfaceCreated) {
nativeFunctionLock.unlock();
return;
}
if(!openGLCreated) {
if(0 != CreateOpenGLNative(nativeObject, viewWidth, viewHeight)) {
return; // Failed to create OpenGL
}
openGLCreated = true; // Created OpenGL successfully
}
DrawNative(nativeObject); // Draw the new frame
nativeFunctionLock.unlock();
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
surfaceCreated = true;
viewWidth = width;
viewHeight = height;
nativeFunctionLock.lock();
if(nativeFunctionsRegisted) {
if(CreateOpenGLNative(nativeObject,width,height) == 0)
openGLCreated = true;
}
nativeFunctionLock.unlock();
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}
public void RegisterNativeObject(long nativeObject) {
nativeFunctionLock.lock();
this.nativeObject = nativeObject;
nativeFunctionsRegisted = true;
nativeFunctionLock.unlock();
}
public void DeRegisterNativeObject() {
nativeFunctionLock.lock();
nativeFunctionsRegisted = false;
openGLCreated = false;
this.nativeObject = 0;
nativeFunctionLock.unlock();
}
public void ReDraw() {
if(surfaceCreated) {
// Request the renderer to redraw using the render thread context.
this.requestRender();
}
}
private native int CreateOpenGLNative(long nativeObject,
int width, int height);
private native void DrawNative(long nativeObject);
}

View File

@ -0,0 +1,29 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.videoengine;
import android.content.Context;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class ViERenderer {
public static SurfaceView CreateRenderer(Context context) {
return CreateRenderer(context, false);
}
public static SurfaceView CreateRenderer(Context context,
boolean useOpenGLES2) {
if(useOpenGLES2 == true && ViEAndroidGLES20.IsSupported(context))
return new ViEAndroidGLES20(context);
else
return new SurfaceView(context);
}
}

View File

@ -0,0 +1,185 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.videoengine;
// The following four imports are needed saveBitmapToJPEG which
// is for debug only
import java.io.ByteArrayOutputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.SurfaceHolder.Callback;
import org.webrtc.Logging;
public class ViESurfaceRenderer implements Callback {
private final static String TAG = "WEBRTC";
// the bitmap used for drawing.
private Bitmap bitmap = null;
private ByteBuffer byteBuffer = null;
private SurfaceHolder surfaceHolder;
// Rect of the source bitmap to draw
private Rect srcRect = new Rect();
// Rect of the destination canvas to draw to
private Rect dstRect = new Rect();
private float dstTopScale = 0;
private float dstBottomScale = 1;
private float dstLeftScale = 0;
private float dstRightScale = 1;
public ViESurfaceRenderer(SurfaceView view) {
surfaceHolder = view.getHolder();
if(surfaceHolder == null)
return;
surfaceHolder.addCallback(this);
}
// surfaceChanged and surfaceCreated share this function
private void changeDestRect(int dstWidth, int dstHeight) {
dstRect.right = (int)(dstRect.left + dstRightScale * dstWidth);
dstRect.bottom = (int)(dstRect.top + dstBottomScale * dstHeight);
}
public void surfaceChanged(SurfaceHolder holder, int format,
int in_width, int in_height) {
Logging.d(TAG, "ViESurfaceRender::surfaceChanged");
changeDestRect(in_width, in_height);
Logging.d(TAG, "ViESurfaceRender::surfaceChanged" +
" in_width:" + in_width + " in_height:" + in_height +
" srcRect.left:" + srcRect.left +
" srcRect.top:" + srcRect.top +
" srcRect.right:" + srcRect.right +
" srcRect.bottom:" + srcRect.bottom +
" dstRect.left:" + dstRect.left +
" dstRect.top:" + dstRect.top +
" dstRect.right:" + dstRect.right +
" dstRect.bottom:" + dstRect.bottom);
}
public void surfaceCreated(SurfaceHolder holder) {
Canvas canvas = surfaceHolder.lockCanvas();
if(canvas != null) {
Rect dst = surfaceHolder.getSurfaceFrame();
if(dst != null) {
changeDestRect(dst.right - dst.left, dst.bottom - dst.top);
Logging.d(TAG, "ViESurfaceRender::surfaceCreated" +
" dst.left:" + dst.left +
" dst.top:" + dst.top +
" dst.right:" + dst.right +
" dst.bottom:" + dst.bottom +
" srcRect.left:" + srcRect.left +
" srcRect.top:" + srcRect.top +
" srcRect.right:" + srcRect.right +
" srcRect.bottom:" + srcRect.bottom +
" dstRect.left:" + dstRect.left +
" dstRect.top:" + dstRect.top +
" dstRect.right:" + dstRect.right +
" dstRect.bottom:" + dstRect.bottom);
}
surfaceHolder.unlockCanvasAndPost(canvas);
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
Logging.d(TAG, "ViESurfaceRenderer::surfaceDestroyed");
bitmap = null;
byteBuffer = null;
}
public Bitmap CreateBitmap(int width, int height) {
Logging.d(TAG, "CreateByteBitmap " + width + ":" + height);
if (bitmap == null) {
try {
android.os.Process.setThreadPriority(
android.os.Process.THREAD_PRIORITY_DISPLAY);
}
catch (Exception e) {
}
}
bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
srcRect.left = 0;
srcRect.top = 0;
srcRect.bottom = height;
srcRect.right = width;
return bitmap;
}
public ByteBuffer CreateByteBuffer(int width, int height) {
Logging.d(TAG, "CreateByteBuffer " + width + ":" + height);
if (bitmap == null) {
bitmap = CreateBitmap(width, height);
byteBuffer = ByteBuffer.allocateDirect(width * height * 2);
}
return byteBuffer;
}
public void SetCoordinates(float left, float top,
float right, float bottom) {
Logging.d(TAG, "SetCoordinates " + left + "," + top + ":" +
right + "," + bottom);
dstLeftScale = left;
dstTopScale = top;
dstRightScale = right;
dstBottomScale = bottom;
}
// It saves bitmap data to a JPEG picture, this function is for debug only.
private void saveBitmapToJPEG(int width, int height) {
ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, byteOutStream);
try{
FileOutputStream output = new FileOutputStream(String.format(
"/sdcard/render_%d.jpg", System.currentTimeMillis()));
output.write(byteOutStream.toByteArray());
output.flush();
output.close();
}
catch (FileNotFoundException e) {
}
catch (IOException e) {
}
}
public void DrawByteBuffer() {
if(byteBuffer == null)
return;
byteBuffer.rewind();
bitmap.copyPixelsFromBuffer(byteBuffer);
DrawBitmap();
}
public void DrawBitmap() {
if(bitmap == null)
return;
Canvas canvas = surfaceHolder.lockCanvas();
if(canvas != null) {
// The follow line is for debug only
// saveBitmapToJPEG(srcRect.right - srcRect.left,
// srcRect.bottom - srcRect.top);
canvas.drawBitmap(bitmap, srcRect, dstRect, null);
surfaceHolder.unlockCanvasAndPost(canvas);
}
}
}

View File

@ -0,0 +1,316 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_render/android/video_render_android_impl.h"
#include "webrtc/modules/video_render/video_render_internal.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#ifdef ANDROID
#include <android/log.h>
#include <stdio.h>
#undef WEBRTC_TRACE
#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
#else
#include "webrtc/system_wrappers/include/trace.h"
#endif
namespace webrtc {
JavaVM* VideoRenderAndroid::g_jvm = NULL;
int32_t SetRenderAndroidVM(JavaVM* javaVM) {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__);
VideoRenderAndroid::g_jvm = javaVM;
return 0;
}
VideoRenderAndroid::VideoRenderAndroid(
const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool /*fullscreen*/):
_id(id),
_critSect(*CriticalSectionWrapper::CreateCriticalSection()),
_renderType(videoRenderType),
_ptrWindow((jobject)(window)),
_javaShutDownFlag(false),
_javaShutdownEvent(*EventWrapper::Create()),
_javaRenderEvent(*EventWrapper::Create()),
_lastJavaRenderEvent(0),
_javaRenderJniEnv(NULL) {
}
VideoRenderAndroid::~VideoRenderAndroid() {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"VideoRenderAndroid dtor");
if (_javaRenderThread)
StopRender();
for (AndroidStreamMap::iterator it = _streamsMap.begin();
it != _streamsMap.end();
++it) {
delete it->second;
}
delete &_javaShutdownEvent;
delete &_javaRenderEvent;
delete &_critSect;
}
int32_t VideoRenderAndroid::ChangeWindow(void* /*window*/) {
return -1;
}
VideoRenderCallback*
VideoRenderAndroid::AddIncomingRenderStream(const uint32_t streamId,
const uint32_t zOrder,
const float left, const float top,
const float right,
const float bottom) {
CriticalSectionScoped cs(&_critSect);
AndroidStream* renderStream = NULL;
AndroidStreamMap::iterator item = _streamsMap.find(streamId);
if (item != _streamsMap.end() && item->second != NULL) {
WEBRTC_TRACE(kTraceInfo,
kTraceVideoRenderer,
-1,
"%s: Render stream already exists",
__FUNCTION__);
return renderStream;
}
renderStream = CreateAndroidRenderChannel(streamId, zOrder, left, top,
right, bottom, *this);
if (renderStream) {
_streamsMap[streamId] = renderStream;
}
else {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
return NULL;
}
return renderStream;
}
int32_t VideoRenderAndroid::DeleteIncomingRenderStream(
const uint32_t streamId) {
CriticalSectionScoped cs(&_critSect);
AndroidStreamMap::iterator item = _streamsMap.find(streamId);
if (item == _streamsMap.end()) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
return -1;
}
delete item->second;
_streamsMap.erase(item);
return 0;
}
int32_t VideoRenderAndroid::GetIncomingRenderStreamProperties(
const uint32_t streamId,
uint32_t& zOrder,
float& left,
float& top,
float& right,
float& bottom) const {
return -1;
}
int32_t VideoRenderAndroid::StartRender() {
CriticalSectionScoped cs(&_critSect);
if (_javaRenderThread) {
// StartRender is called when this stream should start render.
// However StopRender is not called when the streams stop rendering.
// Thus the the thread is only deleted when the renderer is removed.
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s, Render thread already exist", __FUNCTION__);
return 0;
}
_javaRenderThread.reset(new rtc::PlatformThread(JavaRenderThreadFun, this,
"AndroidRenderThread"));
_javaRenderThread->Start();
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: thread started",
__FUNCTION__);
_javaRenderThread->SetPriority(rtc::kRealtimePriority);
return 0;
}
int32_t VideoRenderAndroid::StopRender() {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:", __FUNCTION__);
{
CriticalSectionScoped cs(&_critSect);
if (!_javaRenderThread)
{
return -1;
}
_javaShutDownFlag = true;
_javaRenderEvent.Set();
}
_javaShutdownEvent.Wait(3000);
CriticalSectionScoped cs(&_critSect);
_javaRenderThread->Stop();
_javaRenderThread.reset();
return 0;
}
void VideoRenderAndroid::ReDraw() {
CriticalSectionScoped cs(&_critSect);
// Allow redraw if it was more than 20ms since last.
if (_lastJavaRenderEvent < TickTime::MillisecondTimestamp() - 20) {
_lastJavaRenderEvent = TickTime::MillisecondTimestamp();
_javaRenderEvent.Set();
}
}
bool VideoRenderAndroid::JavaRenderThreadFun(void* obj) {
return static_cast<VideoRenderAndroid*> (obj)->JavaRenderThreadProcess();
}
bool VideoRenderAndroid::JavaRenderThreadProcess()
{
_javaRenderEvent.Wait(1000);
CriticalSectionScoped cs(&_critSect);
if (!_javaRenderJniEnv) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&_javaRenderJniEnv, NULL);
// Get the JNI env for this thread
if ((res < 0) || !_javaRenderJniEnv) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, _javaRenderJniEnv);
return false;
}
}
for (AndroidStreamMap::iterator it = _streamsMap.begin();
it != _streamsMap.end();
++it) {
it->second->DeliverFrame(_javaRenderJniEnv);
}
if (_javaShutDownFlag) {
if (g_jvm->DetachCurrentThread() < 0)
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
else {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"%s: Java thread detached", __FUNCTION__);
}
_javaRenderJniEnv = NULL;
_javaShutDownFlag = false;
_javaShutdownEvent.Set();
return false; // Do not run this thread again.
}
return true;
}
VideoRenderType VideoRenderAndroid::RenderType() {
return _renderType;
}
RawVideoType VideoRenderAndroid::PerferedVideoType() {
return kVideoI420;
}
bool VideoRenderAndroid::FullScreen() {
return false;
}
int32_t VideoRenderAndroid::GetGraphicsMemory(
uint64_t& /*totalGraphicsMemory*/,
uint64_t& /*availableGraphicsMemory*/) const {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
int32_t VideoRenderAndroid::GetScreenResolution(
uint32_t& /*screenWidth*/,
uint32_t& /*screenHeight*/) const {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
uint32_t VideoRenderAndroid::RenderFrameRate(
const uint32_t /*streamId*/) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
int32_t VideoRenderAndroid::SetStreamCropping(
const uint32_t /*streamId*/,
const float /*left*/,
const float /*top*/,
const float /*right*/,
const float /*bottom*/) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
int32_t VideoRenderAndroid::SetTransparentBackground(const bool enable) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
int32_t VideoRenderAndroid::ConfigureRenderer(
const uint32_t streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
int32_t VideoRenderAndroid::SetText(
const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left, const float top,
const float rigth, const float bottom) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
int32_t VideoRenderAndroid::SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left, const float top,
const float right,
const float bottom) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
} // namespace webrtc

View File

@ -0,0 +1,154 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
#include <jni.h>
#include <map>
#include <memory>
#include "webrtc/base/platform_thread.h"
#include "webrtc/modules/video_render/i_video_render.h"
namespace webrtc {
//#define ANDROID_LOG
class CriticalSectionWrapper;
class EventWrapper;
// The object a module user uses to send new frames to the java renderer
// Base class for android render streams.
class AndroidStream : public VideoRenderCallback {
public:
// DeliverFrame is called from a thread connected to the Java VM.
// Used for Delivering frame for rendering.
virtual void DeliverFrame(JNIEnv* jniEnv)=0;
virtual ~AndroidStream() {};
};
class VideoRenderAndroid: IVideoRender {
public:
VideoRenderAndroid(const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen);
virtual ~VideoRenderAndroid();
virtual int32_t Init()=0;
virtual int32_t ChangeWindow(void* window);
virtual VideoRenderCallback* AddIncomingRenderStream(
const uint32_t streamId,
const uint32_t zOrder,
const float left, const float top,
const float right, const float bottom);
virtual int32_t DeleteIncomingRenderStream(
const uint32_t streamId);
virtual int32_t GetIncomingRenderStreamProperties(
const uint32_t streamId,
uint32_t& zOrder,
float& left, float& top,
float& right, float& bottom) const;
virtual int32_t StartRender();
virtual int32_t StopRender();
virtual void ReDraw();
// Properties
virtual VideoRenderType RenderType();
virtual RawVideoType PerferedVideoType();
virtual bool FullScreen();
virtual int32_t GetGraphicsMemory(
uint64_t& totalGraphicsMemory,
uint64_t& availableGraphicsMemory) const;
virtual int32_t GetScreenResolution(
uint32_t& screenWidth,
uint32_t& screenHeight) const;
virtual uint32_t RenderFrameRate(const uint32_t streamId);
virtual int32_t SetStreamCropping(const uint32_t streamId,
const float left, const float top,
const float right, const float bottom);
virtual int32_t SetTransparentBackground(const bool enable);
virtual int32_t ConfigureRenderer(const uint32_t streamId,
const unsigned int zOrder,
const float left, const float top,
const float right, const float bottom);
virtual int32_t SetText(const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left, const float top,
const float rigth, const float bottom);
virtual int32_t SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey, const float left,
const float top, const float right,
const float bottom);
static JavaVM* g_jvm;
protected:
virtual AndroidStream* CreateAndroidRenderChannel(
int32_t streamId,
int32_t zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer) = 0;
int32_t _id;
CriticalSectionWrapper& _critSect;
VideoRenderType _renderType;
jobject _ptrWindow;
private:
static bool JavaRenderThreadFun(void* obj);
bool JavaRenderThreadProcess();
// Map with streams to render.
typedef std::map<int32_t, AndroidStream*> AndroidStreamMap;
AndroidStreamMap _streamsMap;
// True if the _javaRenderThread thread shall be detached from the JVM.
bool _javaShutDownFlag;
EventWrapper& _javaShutdownEvent;
EventWrapper& _javaRenderEvent;
int64_t _lastJavaRenderEvent;
JNIEnv* _javaRenderJniEnv; // JNIEnv for the java render thread.
// TODO(pbos): Remove unique_ptr and use the member directly.
std::unique_ptr<rtc::PlatformThread> _javaRenderThread;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_

View File

@ -0,0 +1,450 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_render/android/video_render_android_native_opengl2.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#ifdef ANDROID_LOG
#include <android/log.h>
#include <stdio.h>
#undef WEBRTC_TRACE
#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
#else
#include "webrtc/system_wrappers/include/trace.h"
#endif
namespace webrtc {
AndroidNativeOpenGl2Renderer::AndroidNativeOpenGl2Renderer(
const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen) :
VideoRenderAndroid(id, videoRenderType, window, fullscreen),
_javaRenderObj(NULL),
_javaRenderClass(NULL) {
}
bool AndroidNativeOpenGl2Renderer::UseOpenGL2(void* window) {
if (!g_jvm) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"RendererAndroid():UseOpenGL No JVM set.");
return false;
}
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(
kTraceError,
kTraceVideoRenderer,
-1,
"RendererAndroid(): Could not attach thread to JVM (%d, %p)",
res, env);
return false;
}
isAttached = true;
}
// get the renderer class
jclass javaRenderClassLocal =
env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
if (!javaRenderClassLocal) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"%s: could not find ViEAndroidRenderer class",
__FUNCTION__);
return false;
}
// get the method ID for UseOpenGL
jmethodID cidUseOpenGL = env->GetStaticMethodID(javaRenderClassLocal,
"UseOpenGL2",
"(Ljava/lang/Object;)Z");
if (cidUseOpenGL == NULL) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"%s: could not get UseOpenGL ID", __FUNCTION__);
return false;
}
jboolean res = env->CallStaticBooleanMethod(javaRenderClassLocal,
cidUseOpenGL, (jobject) window);
// Detach this thread if it was attached
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
return res;
}
AndroidNativeOpenGl2Renderer::~AndroidNativeOpenGl2Renderer() {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"AndroidNativeOpenGl2Renderer dtor");
if (g_jvm) {
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
env = NULL;
}
else {
isAttached = true;
}
}
env->DeleteGlobalRef(_javaRenderObj);
env->DeleteGlobalRef(_javaRenderClass);
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM",
__FUNCTION__);
}
}
}
}
int32_t AndroidNativeOpenGl2Renderer::Init() {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
if (!g_jvm) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"(%s): Not a valid Java VM pointer.", __FUNCTION__);
return -1;
}
if (!_ptrWindow) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"(%s): No window have been provided.", __FUNCTION__);
return -1;
}
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
return -1;
}
isAttached = true;
}
// get the ViEAndroidGLES20 class
jclass javaRenderClassLocal =
env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
if (!javaRenderClassLocal) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not find ViEAndroidGLES20", __FUNCTION__);
return -1;
}
// create a global reference to the class (to tell JNI that
// we are referencing it after this function has returned)
_javaRenderClass =
reinterpret_cast<jclass> (env->NewGlobalRef(javaRenderClassLocal));
if (!_javaRenderClass) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not create Java SurfaceHolder class reference",
__FUNCTION__);
return -1;
}
// Delete local class ref, we only use the global ref
env->DeleteLocalRef(javaRenderClassLocal);
// create a reference to the object (to tell JNI that we are referencing it
// after this function has returned)
_javaRenderObj = env->NewGlobalRef(_ptrWindow);
if (!_javaRenderObj) {
WEBRTC_TRACE(
kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not create Java SurfaceRender object reference",
__FUNCTION__);
return -1;
}
// Detach this thread if it was attached
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done",
__FUNCTION__);
return 0;
}
AndroidStream*
AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel(
int32_t streamId,
int32_t zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer) {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d",
__FUNCTION__, streamId);
AndroidNativeOpenGl2Channel* stream =
new AndroidNativeOpenGl2Channel(streamId, g_jvm, renderer,
_javaRenderObj);
if (stream && stream->Init(zOrder, left, top, right, bottom) == 0)
return stream;
else {
delete stream;
}
return NULL;
}
AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(
uint32_t streamId,
JavaVM* jvm,
VideoRenderAndroid& renderer,jobject javaRenderObj):
_id(streamId),
_renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
_renderer(renderer), _jvm(jvm), _javaRenderObj(javaRenderObj),
_registerNativeCID(NULL), _deRegisterNativeCID(NULL),
_openGLRenderer(streamId) {
}
AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel() {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"AndroidNativeOpenGl2Channel dtor");
if (_jvm) {
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = _jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
env = NULL;
} else {
isAttached = true;
}
}
if (env && _deRegisterNativeCID) {
env->CallVoidMethod(_javaRenderObj, _deRegisterNativeCID);
}
if (isAttached) {
if (_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM",
__FUNCTION__);
}
}
}
delete &_renderCritSect;
}
int32_t AndroidNativeOpenGl2Channel::Init(int32_t zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: AndroidNativeOpenGl2Channel", __FUNCTION__);
if (!_jvm) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Not a valid Java VM pointer", __FUNCTION__);
return -1;
}
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = _jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
return -1;
}
isAttached = true;
}
jclass javaRenderClass =
env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
if (!javaRenderClass) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not find ViESurfaceRenderer", __FUNCTION__);
return -1;
}
// get the method ID for the ReDraw function
_redrawCid = env->GetMethodID(javaRenderClass, "ReDraw", "()V");
if (_redrawCid == NULL) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not get ReDraw ID", __FUNCTION__);
return -1;
}
_registerNativeCID = env->GetMethodID(javaRenderClass,
"RegisterNativeObject", "(J)V");
if (_registerNativeCID == NULL) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not get RegisterNativeObject ID", __FUNCTION__);
return -1;
}
_deRegisterNativeCID = env->GetMethodID(javaRenderClass,
"DeRegisterNativeObject", "()V");
if (_deRegisterNativeCID == NULL) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not get DeRegisterNativeObject ID",
__FUNCTION__);
return -1;
}
JNINativeMethod nativeFunctions[2] = {
{ "DrawNative",
"(J)V",
(void*) &AndroidNativeOpenGl2Channel::DrawNativeStatic, },
{ "CreateOpenGLNative",
"(JII)I",
(void*) &AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic },
};
if (env->RegisterNatives(javaRenderClass, nativeFunctions, 2) == 0) {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1,
"%s: Registered native functions", __FUNCTION__);
}
else {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"%s: Failed to register native functions", __FUNCTION__);
return -1;
}
env->CallVoidMethod(_javaRenderObj, _registerNativeCID, (jlong) this);
// Detach this thread if it was attached
if (isAttached) {
if (_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
if (_openGLRenderer.SetCoordinates(zOrder, left, top, right, bottom) != 0) {
return -1;
}
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: AndroidNativeOpenGl2Channel done", __FUNCTION__);
return 0;
}
int32_t AndroidNativeOpenGl2Channel::RenderFrame(const uint32_t /*streamId*/,
const VideoFrame& videoFrame) {
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter();
_bufferToRender = videoFrame;
_renderCritSect.Leave();
_renderer.ReDraw();
return 0;
}
/*Implements AndroidStream
* Calls the Java object and render the buffer in _bufferToRender
*/
void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv) {
//TickTime timeNow=TickTime::Now();
//Draw the Surface
jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid);
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id,
// "%s: time to deliver %lld" ,__FUNCTION__,
// (TickTime::Now()-timeNow).Milliseconds());
}
/*
* JNI callback from Java class. Called when the render
* want to render a frame. Called from the GLRenderThread
* Method: DrawNative
* Signature: (J)V
*/
void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic(
JNIEnv * env, jobject, jlong context) {
AndroidNativeOpenGl2Channel* renderChannel =
reinterpret_cast<AndroidNativeOpenGl2Channel*>(context);
renderChannel->DrawNative();
}
void AndroidNativeOpenGl2Channel::DrawNative() {
_renderCritSect.Enter();
_openGLRenderer.Render(_bufferToRender);
_renderCritSect.Leave();
}
/*
* JNI callback from Java class. Called when the GLSurfaceview
* have created a surface. Called from the GLRenderThread
* Method: CreateOpenGLNativeStatic
* Signature: (JII)I
*/
jint JNICALL AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic(
JNIEnv * env,
jobject,
jlong context,
jint width,
jint height) {
AndroidNativeOpenGl2Channel* renderChannel =
reinterpret_cast<AndroidNativeOpenGl2Channel*> (context);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__);
return renderChannel->CreateOpenGLNative(width, height);
}
jint AndroidNativeOpenGl2Channel::CreateOpenGLNative(
int width, int height) {
return _openGLRenderer.Setup(width, height);
}
} // namespace webrtc

View File

@ -0,0 +1,95 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
#include <jni.h>
#include "webrtc/modules/video_render/android/video_render_android_impl.h"
#include "webrtc/modules/video_render/android/video_render_opengles20.h"
#include "webrtc/modules/video_render/video_render_defines.h"
namespace webrtc {
class CriticalSectionWrapper;
class AndroidNativeOpenGl2Channel: public AndroidStream {
public:
AndroidNativeOpenGl2Channel(
uint32_t streamId,
JavaVM* jvm,
VideoRenderAndroid& renderer,jobject javaRenderObj);
~AndroidNativeOpenGl2Channel();
int32_t Init(int32_t zOrder, const float left, const float top,
const float right, const float bottom);
//Implement VideoRenderCallback
virtual int32_t RenderFrame(const uint32_t streamId,
const VideoFrame& videoFrame);
//Implements AndroidStream
virtual void DeliverFrame(JNIEnv* jniEnv);
private:
static jint JNICALL CreateOpenGLNativeStatic(
JNIEnv * env,
jobject,
jlong context,
jint width,
jint height);
jint CreateOpenGLNative(int width, int height);
static void JNICALL DrawNativeStatic(JNIEnv * env,jobject, jlong context);
void DrawNative();
uint32_t _id;
CriticalSectionWrapper& _renderCritSect;
VideoFrame _bufferToRender;
VideoRenderAndroid& _renderer;
JavaVM* _jvm;
jobject _javaRenderObj;
jmethodID _redrawCid;
jmethodID _registerNativeCID;
jmethodID _deRegisterNativeCID;
VideoRenderOpenGles20 _openGLRenderer;
};
class AndroidNativeOpenGl2Renderer: private VideoRenderAndroid {
public:
AndroidNativeOpenGl2Renderer(const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen);
~AndroidNativeOpenGl2Renderer();
static bool UseOpenGL2(void* window);
int32_t Init();
virtual AndroidStream* CreateAndroidRenderChannel(
int32_t streamId,
int32_t zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer);
private:
jobject _javaRenderObj;
jclass _javaRenderClass;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_

View File

@ -0,0 +1,474 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_render/android/video_render_android_surface_view.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#ifdef ANDROID_LOG
#include <android/log.h>
#include <stdio.h>
#undef WEBRTC_TRACE
#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
#else
#include "webrtc/system_wrappers/include/trace.h"
#endif
namespace webrtc {
AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(
const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen) :
VideoRenderAndroid(id,videoRenderType,window,fullscreen),
_javaRenderObj(NULL),
_javaRenderClass(NULL) {
}
AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer() {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"AndroidSurfaceViewRenderer dtor");
if(g_jvm) {
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__,
res,
env);
env=NULL;
}
else {
isAttached = true;
}
}
env->DeleteGlobalRef(_javaRenderObj);
env->DeleteGlobalRef(_javaRenderClass);
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning,
kTraceVideoRenderer,
_id,
"%s: Could not detach thread from JVM",
__FUNCTION__);
}
}
}
}
int32_t AndroidSurfaceViewRenderer::Init() {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
if (!g_jvm) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"(%s): Not a valid Java VM pointer.",
__FUNCTION__);
return -1;
}
if(!_ptrWindow) {
WEBRTC_TRACE(kTraceWarning,
kTraceVideoRenderer,
_id,
"(%s): No window have been provided.",
__FUNCTION__);
return -1;
}
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__,
res,
env);
return -1;
}
isAttached = true;
}
// get the ViESurfaceRender class
jclass javaRenderClassLocal =
env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
if (!javaRenderClassLocal) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not find ViESurfaceRenderer",
__FUNCTION__);
return -1;
}
// create a global reference to the class (to tell JNI that
// we are referencing it after this function has returned)
_javaRenderClass =
reinterpret_cast<jclass>(env->NewGlobalRef(javaRenderClassLocal));
if (!_javaRenderClass) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not create Java ViESurfaceRenderer class reference",
__FUNCTION__);
return -1;
}
// Delete local class ref, we only use the global ref
env->DeleteLocalRef(javaRenderClassLocal);
// get the method ID for the constructor
jmethodID cid = env->GetMethodID(_javaRenderClass,
"<init>",
"(Landroid/view/SurfaceView;)V");
if (cid == NULL) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not get constructor ID",
__FUNCTION__);
return -1; /* exception thrown */
}
// construct the object
jobject javaRenderObjLocal = env->NewObject(_javaRenderClass,
cid,
_ptrWindow);
if (!javaRenderObjLocal) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not create Java Render",
__FUNCTION__);
return -1;
}
// create a reference to the object (to tell JNI that we are referencing it
// after this function has returned)
_javaRenderObj = env->NewGlobalRef(javaRenderObjLocal);
if (!_javaRenderObj) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not create Java SurfaceRender object reference",
__FUNCTION__);
return -1;
}
// Detach this thread if it was attached
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning,
kTraceVideoRenderer,
_id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", __FUNCTION__);
return 0;
}
AndroidStream*
AndroidSurfaceViewRenderer::CreateAndroidRenderChannel(
int32_t streamId,
int32_t zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer) {
WEBRTC_TRACE(kTraceDebug,
kTraceVideoRenderer,
_id,
"%s: Id %d",
__FUNCTION__,
streamId);
AndroidSurfaceViewChannel* stream =
new AndroidSurfaceViewChannel(streamId, g_jvm, renderer, _javaRenderObj);
if(stream && stream->Init(zOrder, left, top, right, bottom) == 0)
return stream;
else
delete stream;
return NULL;
}
AndroidSurfaceViewChannel::AndroidSurfaceViewChannel(
uint32_t streamId,
JavaVM* jvm,
VideoRenderAndroid& renderer,
jobject javaRenderObj) :
_id(streamId),
_renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
_renderer(renderer),
_jvm(jvm),
_javaRenderObj(javaRenderObj),
#ifndef ANDROID_NDK_8_OR_ABOVE
_javaByteBufferObj(NULL),
_directBuffer(NULL),
#endif
_bitmapWidth(0),
_bitmapHeight(0) {
}
AndroidSurfaceViewChannel::~AndroidSurfaceViewChannel() {
WEBRTC_TRACE(kTraceInfo,
kTraceVideoRenderer,
_id,
"AndroidSurfaceViewChannel dtor");
delete &_renderCritSect;
if(_jvm) {
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if ( _jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = _jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__,
res,
env);
env=NULL;
}
else {
isAttached = true;
}
}
env->DeleteGlobalRef(_javaByteBufferObj);
if (isAttached) {
if (_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning,
kTraceVideoRenderer,
_id,
"%s: Could not detach thread from JVM",
__FUNCTION__);
}
}
}
}
int32_t AndroidSurfaceViewChannel::Init(
int32_t /*zOrder*/,
const float left,
const float top,
const float right,
const float bottom) {
WEBRTC_TRACE(kTraceDebug,
kTraceVideoRenderer,
_id,
"%s: AndroidSurfaceViewChannel",
__FUNCTION__);
if (!_jvm) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: Not a valid Java VM pointer",
__FUNCTION__);
return -1;
}
if( (top > 1 || top < 0) ||
(right > 1 || right < 0) ||
(bottom > 1 || bottom < 0) ||
(left > 1 || left < 0)) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Wrong coordinates", __FUNCTION__);
return -1;
}
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = _jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__,
res,
env);
return -1;
}
isAttached = true;
}
jclass javaRenderClass =
env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
if (!javaRenderClass) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not find ViESurfaceRenderer",
__FUNCTION__);
return -1;
}
// get the method ID for the CreateIntArray
_createByteBufferCid =
env->GetMethodID(javaRenderClass,
"CreateByteBuffer",
"(II)Ljava/nio/ByteBuffer;");
if (_createByteBufferCid == NULL) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not get CreateByteBuffer ID",
__FUNCTION__);
return -1; /* exception thrown */
}
// get the method ID for the DrawByteBuffer function
_drawByteBufferCid = env->GetMethodID(javaRenderClass,
"DrawByteBuffer",
"()V");
if (_drawByteBufferCid == NULL) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not get DrawByteBuffer ID",
__FUNCTION__);
return -1; /* exception thrown */
}
// get the method ID for the SetCoordinates function
_setCoordinatesCid = env->GetMethodID(javaRenderClass,
"SetCoordinates",
"(FFFF)V");
if (_setCoordinatesCid == NULL) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not get SetCoordinates ID",
__FUNCTION__);
return -1; /* exception thrown */
}
env->CallVoidMethod(_javaRenderObj, _setCoordinatesCid,
left, top, right, bottom);
// Detach this thread if it was attached
if (isAttached) {
if (_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning,
kTraceVideoRenderer,
_id,
"%s: Could not detach thread from JVM",
__FUNCTION__);
}
}
WEBRTC_TRACE(kTraceDebug,
kTraceVideoRenderer,
_id,
"%s: AndroidSurfaceViewChannel done",
__FUNCTION__);
return 0;
}
int32_t AndroidSurfaceViewChannel::RenderFrame(const uint32_t /*streamId*/,
const VideoFrame& videoFrame) {
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter();
_bufferToRender = videoFrame;
_renderCritSect.Leave();
_renderer.ReDraw();
return 0;
}
/*Implements AndroidStream
* Calls the Java object and render the buffer in _bufferToRender
*/
void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) {
_renderCritSect.Enter();
if (_bitmapWidth != _bufferToRender.width() ||
_bitmapHeight != _bufferToRender.height()) {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: New render size %d "
"%d",__FUNCTION__,
_bufferToRender.width(), _bufferToRender.height());
if (_javaByteBufferObj) {
jniEnv->DeleteGlobalRef(_javaByteBufferObj);
_javaByteBufferObj = NULL;
_directBuffer = NULL;
}
jobject javaByteBufferObj =
jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid,
_bufferToRender.width(),
_bufferToRender.height());
_javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj);
if (!_javaByteBufferObj) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not "
"create Java ByteBuffer object reference", __FUNCTION__);
_renderCritSect.Leave();
return;
} else {
_directBuffer = static_cast<unsigned char*>
(jniEnv->GetDirectBufferAddress(_javaByteBufferObj));
_bitmapWidth = _bufferToRender.width();
_bitmapHeight = _bufferToRender.height();
}
}
if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) {
const int conversionResult =
ConvertFromI420(_bufferToRender, kRGB565, 0, _directBuffer);
if (conversionResult < 0) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion"
" failed.", __FUNCTION__);
_renderCritSect.Leave();
return;
}
}
_renderCritSect.Leave();
// Draw the Surface
jniEnv->CallVoidMethod(_javaRenderObj, _drawByteBufferCid);
}
} // namespace webrtc

View File

@ -0,0 +1,83 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
#include <jni.h>
#include "webrtc/modules/video_render/android/video_render_android_impl.h"
#include "webrtc/modules/video_render/video_render_defines.h"
namespace webrtc {
class CriticalSectionWrapper;
class AndroidSurfaceViewChannel : public AndroidStream {
public:
AndroidSurfaceViewChannel(uint32_t streamId,
JavaVM* jvm,
VideoRenderAndroid& renderer,
jobject javaRenderObj);
~AndroidSurfaceViewChannel();
int32_t Init(int32_t zOrder, const float left, const float top,
const float right, const float bottom);
//Implement VideoRenderCallback
virtual int32_t RenderFrame(const uint32_t streamId,
const VideoFrame& videoFrame);
//Implements AndroidStream
virtual void DeliverFrame(JNIEnv* jniEnv);
private:
uint32_t _id;
CriticalSectionWrapper& _renderCritSect;
VideoFrame _bufferToRender;
VideoRenderAndroid& _renderer;
JavaVM* _jvm;
jobject _javaRenderObj;
jobject _javaByteBufferObj;
unsigned char* _directBuffer;
jmethodID _createByteBufferCid;
jmethodID _drawByteBufferCid;
jmethodID _setCoordinatesCid;
int _bitmapWidth;
int _bitmapHeight;
};
class AndroidSurfaceViewRenderer : private VideoRenderAndroid {
public:
AndroidSurfaceViewRenderer(const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen);
~AndroidSurfaceViewRenderer();
int32_t Init();
virtual AndroidStream* CreateAndroidRenderChannel(
int32_t streamId,
int32_t zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer);
private:
jobject _javaRenderObj;
jclass _javaRenderClass;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_

View File

@ -0,0 +1,397 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <stdio.h>
#include <stdlib.h>
#include "webrtc/modules/video_render/android/video_render_opengles20.h"
//#define ANDROID_LOG
#ifdef ANDROID_LOG
#include <android/log.h>
#include <stdio.h>
#undef WEBRTC_TRACE
#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
#else
#include "webrtc/system_wrappers/include/trace.h"
#endif
namespace webrtc {
const char VideoRenderOpenGles20::g_indices[] = { 0, 3, 2, 0, 2, 1 };
const char VideoRenderOpenGles20::g_vertextShader[] = {
"attribute vec4 aPosition;\n"
"attribute vec2 aTextureCoord;\n"
"varying vec2 vTextureCoord;\n"
"void main() {\n"
" gl_Position = aPosition;\n"
" vTextureCoord = aTextureCoord;\n"
"}\n" };
// The fragment shader.
// Do YUV to RGB565 conversion.
const char VideoRenderOpenGles20::g_fragmentShader[] = {
"precision mediump float;\n"
"uniform sampler2D Ytex;\n"
"uniform sampler2D Utex,Vtex;\n"
"varying vec2 vTextureCoord;\n"
"void main(void) {\n"
" float nx,ny,r,g,b,y,u,v;\n"
" mediump vec4 txl,ux,vx;"
" nx=vTextureCoord[0];\n"
" ny=vTextureCoord[1];\n"
" y=texture2D(Ytex,vec2(nx,ny)).r;\n"
" u=texture2D(Utex,vec2(nx,ny)).r;\n"
" v=texture2D(Vtex,vec2(nx,ny)).r;\n"
//" y = v;\n"+
" y=1.1643*(y-0.0625);\n"
" u=u-0.5;\n"
" v=v-0.5;\n"
" r=y+1.5958*v;\n"
" g=y-0.39173*u-0.81290*v;\n"
" b=y+2.017*u;\n"
" gl_FragColor=vec4(r,g,b,1.0);\n"
"}\n" };
VideoRenderOpenGles20::VideoRenderOpenGles20(int32_t id) :
_id(id),
_textureWidth(-1),
_textureHeight(-1) {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
__FUNCTION__, (int) _id);
const GLfloat vertices[20] = {
// X, Y, Z, U, V
-1, -1, 0, 0, 1, // Bottom Left
1, -1, 0, 1, 1, //Bottom Right
1, 1, 0, 1, 0, //Top Right
-1, 1, 0, 0, 0 }; //Top Left
memcpy(_vertices, vertices, sizeof(_vertices));
}
VideoRenderOpenGles20::~VideoRenderOpenGles20() {
}
int32_t VideoRenderOpenGles20::Setup(int32_t width, int32_t height) {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: width %d, height %d", __FUNCTION__, (int) width,
(int) height);
printGLString("Version", GL_VERSION);
printGLString("Vendor", GL_VENDOR);
printGLString("Renderer", GL_RENDERER);
printGLString("Extensions", GL_EXTENSIONS);
int maxTextureImageUnits[2];
int maxTextureSize[2];
glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, maxTextureImageUnits);
glGetIntegerv(GL_MAX_TEXTURE_SIZE, maxTextureSize);
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: number of textures %d, size %d", __FUNCTION__,
(int) maxTextureImageUnits[0], (int) maxTextureSize[0]);
_program = createProgram(g_vertextShader, g_fragmentShader);
if (!_program) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not create program", __FUNCTION__);
return -1;
}
int positionHandle = glGetAttribLocation(_program, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (positionHandle == -1) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not get aPosition handle", __FUNCTION__);
return -1;
}
int textureHandle = glGetAttribLocation(_program, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (textureHandle == -1) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not get aTextureCoord handle", __FUNCTION__);
return -1;
}
// set the vertices array in the shader
// _vertices contains 4 vertices with 5 coordinates.
// 3 for (xyz) for the vertices and 2 for the texture
glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false,
5 * sizeof(GLfloat), _vertices);
checkGlError("glVertexAttribPointer aPosition");
glEnableVertexAttribArray(positionHandle);
checkGlError("glEnableVertexAttribArray positionHandle");
// set the texture coordinate array in the shader
// _vertices contains 4 vertices with 5 coordinates.
// 3 for (xyz) for the vertices and 2 for the texture
glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5
* sizeof(GLfloat), &_vertices[3]);
checkGlError("glVertexAttribPointer maTextureHandle");
glEnableVertexAttribArray(textureHandle);
checkGlError("glEnableVertexAttribArray textureHandle");
glUseProgram(_program);
int i = glGetUniformLocation(_program, "Ytex");
checkGlError("glGetUniformLocation");
glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */
checkGlError("glUniform1i Ytex");
i = glGetUniformLocation(_program, "Utex");
checkGlError("glGetUniformLocation Utex");
glUniform1i(i, 1); /* Bind Utex to texture unit 1 */
checkGlError("glUniform1i Utex");
i = glGetUniformLocation(_program, "Vtex");
checkGlError("glGetUniformLocation");
glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */
checkGlError("glUniform1i");
glViewport(0, 0, width, height);
checkGlError("glViewport");
return 0;
}
// SetCoordinates
// Sets the coordinates where the stream shall be rendered.
// Values must be between 0 and 1.
int32_t VideoRenderOpenGles20::SetCoordinates(int32_t zOrder,
const float left,
const float top,
const float right,
const float bottom) {
if ((top > 1 || top < 0) || (right > 1 || right < 0) ||
(bottom > 1 || bottom < 0) || (left > 1 || left < 0)) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Wrong coordinates", __FUNCTION__);
return -1;
}
// X, Y, Z, U, V
// -1, -1, 0, 0, 1, // Bottom Left
// 1, -1, 0, 1, 1, //Bottom Right
// 1, 1, 0, 1, 0, //Top Right
// -1, 1, 0, 0, 0 //Top Left
// Bottom Left
_vertices[0] = (left * 2) - 1;
_vertices[1] = -1 * (2 * bottom) + 1;
_vertices[2] = zOrder;
//Bottom Right
_vertices[5] = (right * 2) - 1;
_vertices[6] = -1 * (2 * bottom) + 1;
_vertices[7] = zOrder;
//Top Right
_vertices[10] = (right * 2) - 1;
_vertices[11] = -1 * (2 * top) + 1;
_vertices[12] = zOrder;
//Top Left
_vertices[15] = (left * 2) - 1;
_vertices[16] = -1 * (2 * top) + 1;
_vertices[17] = zOrder;
return 0;
}
int32_t VideoRenderOpenGles20::Render(const VideoFrame& frameToRender) {
if (frameToRender.IsZeroSize()) {
return -1;
}
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
__FUNCTION__, (int) _id);
glUseProgram(_program);
checkGlError("glUseProgram");
if (_textureWidth != (GLsizei) frameToRender.width() ||
_textureHeight != (GLsizei) frameToRender.height()) {
SetupTextures(frameToRender);
}
UpdateTextures(frameToRender);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, g_indices);
checkGlError("glDrawArrays");
return 0;
}
GLuint VideoRenderOpenGles20::loadShader(GLenum shaderType,
const char* pSource) {
GLuint shader = glCreateShader(shaderType);
if (shader) {
glShaderSource(shader, 1, &pSource, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled) {
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen) {
char* buf = (char*) malloc(infoLen);
if (buf) {
glGetShaderInfoLog(shader, infoLen, NULL, buf);
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not compile shader %d: %s",
__FUNCTION__, shaderType, buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
}
}
return shader;
}
GLuint VideoRenderOpenGles20::createProgram(const char* pVertexSource,
const char* pFragmentSource) {
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
if (!vertexShader) {
return 0;
}
GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
if (!pixelShader) {
return 0;
}
GLuint program = glCreateProgram();
if (program) {
glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (linkStatus != GL_TRUE) {
GLint bufLength = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength) {
char* buf = (char*) malloc(bufLength);
if (buf) {
glGetProgramInfoLog(program, bufLength, NULL, buf);
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not link program: %s",
__FUNCTION__, buf);
free(buf);
}
}
glDeleteProgram(program);
program = 0;
}
}
return program;
}
void VideoRenderOpenGles20::printGLString(const char *name, GLenum s) {
const char *v = (const char *) glGetString(s);
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "GL %s = %s\n",
name, v);
}
void VideoRenderOpenGles20::checkGlError(const char* op) {
#ifdef ANDROID_LOG
for (GLint error = glGetError(); error; error = glGetError()) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"after %s() glError (0x%x)\n", op, error);
}
#else
return;
#endif
}
static void InitializeTexture(int name, int id, int width, int height) {
glActiveTexture(name);
glBindTexture(GL_TEXTURE_2D, id);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
}
void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender) {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: width %d, height %d", __FUNCTION__,
frameToRender.width(), frameToRender.height());
const GLsizei width = frameToRender.width();
const GLsizei height = frameToRender.height();
glGenTextures(3, _textureIds); //Generate the Y, U and V texture
InitializeTexture(GL_TEXTURE0, _textureIds[0], width, height);
InitializeTexture(GL_TEXTURE1, _textureIds[1], width / 2, height / 2);
InitializeTexture(GL_TEXTURE2, _textureIds[2], width / 2, height / 2);
checkGlError("SetupTextures");
_textureWidth = width;
_textureHeight = height;
}
// Uploads a plane of pixel data, accounting for stride != width*bpp.
static void GlTexSubImage2D(GLsizei width, GLsizei height, int stride,
const uint8_t* plane) {
if (stride == width) {
// Yay! We can upload the entire plane in a single GL call.
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE,
GL_UNSIGNED_BYTE,
static_cast<const GLvoid*>(plane));
} else {
// Boo! Since GLES2 doesn't have GL_UNPACK_ROW_LENGTH and Android doesn't
// have GL_EXT_unpack_subimage we have to upload a row at a time. Ick.
for (int row = 0; row < height; ++row) {
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, row, width, 1, GL_LUMINANCE,
GL_UNSIGNED_BYTE,
static_cast<const GLvoid*>(plane + (row * stride)));
}
}
}
void VideoRenderOpenGles20::UpdateTextures(const VideoFrame& frameToRender) {
const GLsizei width = frameToRender.width();
const GLsizei height = frameToRender.height();
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, _textureIds[0]);
GlTexSubImage2D(width, height, frameToRender.stride(kYPlane),
frameToRender.buffer(kYPlane));
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, _textureIds[1]);
GlTexSubImage2D(width / 2, height / 2, frameToRender.stride(kUPlane),
frameToRender.buffer(kUPlane));
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, _textureIds[2]);
GlTexSubImage2D(width / 2, height / 2, frameToRender.stride(kVPlane),
frameToRender.buffer(kVPlane));
checkGlError("UpdateTextures");
}
} // namespace webrtc

View File

@ -0,0 +1,57 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
#include "webrtc/modules/video_render/video_render_defines.h"
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
namespace webrtc
{
class VideoRenderOpenGles20 {
public:
VideoRenderOpenGles20(int32_t id);
~VideoRenderOpenGles20();
int32_t Setup(int32_t widht, int32_t height);
int32_t Render(const VideoFrame& frameToRender);
int32_t SetCoordinates(int32_t zOrder, const float left, const float top,
const float right, const float bottom);
private:
void printGLString(const char *name, GLenum s);
void checkGlError(const char* op);
GLuint loadShader(GLenum shaderType, const char* pSource);
GLuint createProgram(const char* pVertexSource,
const char* pFragmentSource);
void SetupTextures(const VideoFrame& frameToRender);
void UpdateTextures(const VideoFrame& frameToRender);
int32_t _id;
GLuint _textureIds[3]; // Texture id of Y,U and V texture.
GLuint _program;
GLsizei _textureWidth;
GLsizei _textureHeight;
GLfloat _vertices[20];
static const char g_indices[];
static const char g_vertextShader[];
static const char g_fragmentShader[];
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_

View File

@ -0,0 +1,195 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_render/external/video_render_external_impl.h"
namespace webrtc {
VideoRenderExternalImpl::VideoRenderExternalImpl(
const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen) :
_critSect(*CriticalSectionWrapper::CreateCriticalSection()),
_fullscreen(fullscreen)
{
}
VideoRenderExternalImpl::~VideoRenderExternalImpl()
{
delete &_critSect;
}
int32_t VideoRenderExternalImpl::Init()
{
return 0;
}
int32_t VideoRenderExternalImpl::ChangeWindow(void* window)
{
CriticalSectionScoped cs(&_critSect);
return 0;
}
VideoRenderCallback*
VideoRenderExternalImpl::AddIncomingRenderStream(const uint32_t streamId,
const uint32_t zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_critSect);
return this;
}
int32_t VideoRenderExternalImpl::DeleteIncomingRenderStream(
const uint32_t streamId)
{
CriticalSectionScoped cs(&_critSect);
return 0;
}
int32_t VideoRenderExternalImpl::GetIncomingRenderStreamProperties(
const uint32_t streamId,
uint32_t& zOrder,
float& left,
float& top,
float& right,
float& bottom) const
{
CriticalSectionScoped cs(&_critSect);
zOrder = 0;
left = 0;
top = 0;
right = 0;
bottom = 0;
return 0;
}
int32_t VideoRenderExternalImpl::StartRender()
{
CriticalSectionScoped cs(&_critSect);
return 0;
}
int32_t VideoRenderExternalImpl::StopRender()
{
CriticalSectionScoped cs(&_critSect);
return 0;
}
VideoRenderType VideoRenderExternalImpl::RenderType()
{
return kRenderExternal;
}
RawVideoType VideoRenderExternalImpl::PerferedVideoType()
{
return kVideoI420;
}
bool VideoRenderExternalImpl::FullScreen()
{
CriticalSectionScoped cs(&_critSect);
return _fullscreen;
}
int32_t VideoRenderExternalImpl::GetGraphicsMemory(
uint64_t& totalGraphicsMemory,
uint64_t& availableGraphicsMemory) const
{
totalGraphicsMemory = 0;
availableGraphicsMemory = 0;
return -1;
}
int32_t VideoRenderExternalImpl::GetScreenResolution(
uint32_t& screenWidth,
uint32_t& screenHeight) const
{
CriticalSectionScoped cs(&_critSect);
screenWidth = 0;
screenHeight = 0;
return 0;
}
uint32_t VideoRenderExternalImpl::RenderFrameRate(
const uint32_t streamId)
{
CriticalSectionScoped cs(&_critSect);
return 0;
}
int32_t VideoRenderExternalImpl::SetStreamCropping(
const uint32_t streamId,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_critSect);
return 0;
}
int32_t VideoRenderExternalImpl::ConfigureRenderer(
const uint32_t streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_critSect);
return 0;
}
int32_t VideoRenderExternalImpl::SetTransparentBackground(
const bool enable)
{
CriticalSectionScoped cs(&_critSect);
return 0;
}
int32_t VideoRenderExternalImpl::SetText(
const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_critSect);
return 0;
}
int32_t VideoRenderExternalImpl::SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_critSect);
return 0;
}
// VideoRenderCallback
int32_t VideoRenderExternalImpl::RenderFrame(const uint32_t streamId,
const VideoFrame& videoFrame) {
return 0;
}
} // namespace webrtc

View File

@ -0,0 +1,128 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_render/i_video_render.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
namespace webrtc {
// Class definitions
class VideoRenderExternalImpl: IVideoRender, public VideoRenderCallback
{
public:
/*
* Constructor/destructor
*/
VideoRenderExternalImpl(const int32_t id,
const VideoRenderType videoRenderType,
void* window, const bool fullscreen);
virtual ~VideoRenderExternalImpl();
virtual int32_t Init();
virtual int32_t ChangeWindow(void* window);
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
virtual VideoRenderCallback
* AddIncomingRenderStream(const uint32_t streamId,
const uint32_t zOrder,
const float left, const float top,
const float right, const float bottom);
virtual int32_t
DeleteIncomingRenderStream(const uint32_t streamId);
virtual int32_t
GetIncomingRenderStreamProperties(const uint32_t streamId,
uint32_t& zOrder,
float& left, float& top,
float& right, float& bottom) const;
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
virtual int32_t StartRender();
virtual int32_t StopRender();
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual VideoRenderType RenderType();
virtual RawVideoType PerferedVideoType();
virtual bool FullScreen();
virtual int32_t
GetGraphicsMemory(uint64_t& totalGraphicsMemory,
uint64_t& availableGraphicsMemory) const;
virtual int32_t
GetScreenResolution(uint32_t& screenWidth,
uint32_t& screenHeight) const;
virtual uint32_t RenderFrameRate(const uint32_t streamId);
virtual int32_t SetStreamCropping(const uint32_t streamId,
const float left, const float top,
const float right, const float bottom);
virtual int32_t ConfigureRenderer(const uint32_t streamId,
const unsigned int zOrder,
const float left, const float top,
const float right, const float bottom);
virtual int32_t SetTransparentBackground(const bool enable);
virtual int32_t SetText(const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left, const float top,
const float right, const float bottom);
virtual int32_t SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey, const float left,
const float top, const float right,
const float bottom);
// VideoRenderCallback
virtual int32_t RenderFrame(const uint32_t streamId,
const VideoFrame& videoFrame);
private:
CriticalSectionWrapper& _critSect;
bool _fullscreen;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_

View File

@ -0,0 +1,129 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
#include "webrtc/modules/video_render/video_render.h"
namespace webrtc {
// Class definitions
class IVideoRender
{
public:
/*
* Constructor/destructor
*/
virtual ~IVideoRender() {}
virtual int32_t Init() = 0;
virtual int32_t ChangeWindow(void* window) = 0;
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
virtual VideoRenderCallback
* AddIncomingRenderStream(const uint32_t streamId,
const uint32_t zOrder,
const float left,
const float top,
const float right,
const float bottom) = 0;
virtual int32_t
DeleteIncomingRenderStream(const uint32_t streamId) = 0;
virtual int32_t
GetIncomingRenderStreamProperties(const uint32_t streamId,
uint32_t& zOrder,
float& left,
float& top,
float& right,
float& bottom) const = 0;
// Implemented in common code?
//virtual uint32_t GetNumIncomingRenderStreams() const = 0;
//virtual bool HasIncomingRenderStream(const uint16_t stramId) const = 0;
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
virtual int32_t StartRender() = 0;
virtual int32_t StopRender() = 0;
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual VideoRenderType RenderType() = 0;
virtual RawVideoType PerferedVideoType() = 0;
virtual bool FullScreen() = 0;
// TODO: This should be treated in platform specific code only
virtual int32_t
GetGraphicsMemory(uint64_t& totalGraphicsMemory,
uint64_t& availableGraphicsMemory) const = 0;
virtual int32_t
GetScreenResolution(uint32_t& screenWidth,
uint32_t& screenHeight) const = 0;
virtual uint32_t RenderFrameRate(const uint32_t streamId) = 0;
virtual int32_t SetStreamCropping(const uint32_t streamId,
const float left,
const float top,
const float right,
const float bottom) = 0;
virtual int32_t ConfigureRenderer(const uint32_t streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom) = 0;
virtual int32_t SetTransparentBackground(const bool enable) = 0;
virtual int32_t SetText(const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left,
const float top,
const float rigth,
const float bottom) = 0;
virtual int32_t SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom) = 0;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_

View File

@ -0,0 +1,64 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_
#define WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_
#include <OpenGLES/ES2/glext.h>
#include "webrtc/modules/video_render/video_render_defines.h"
/*
* This OpenGles20 is the class of renderer for VideoFrame into a GLES 2.0
* windows used in the VideoRenderIosView class.
*/
namespace webrtc {
class OpenGles20 {
public:
OpenGles20();
~OpenGles20();
bool Setup(int32_t width, int32_t height);
bool Render(const VideoFrame& frame);
// SetCoordinates
// Sets the coordinates where the stream shall be rendered.
// Values must be between 0 and 1.
bool SetCoordinates(const float z_order,
const float left,
const float top,
const float right,
const float bottom);
private:
// Compile and load the vertex and fragment shaders defined at the top of
// open_gles20.mm
GLuint LoadShader(GLenum shader_type, const char* shader_source);
GLuint CreateProgram(const char* vertex_source, const char* fragment_source);
// Initialize the textures by the frame width and height
void SetupTextures(const VideoFrame& frame);
// Update the textures by the YUV data from the frame
void UpdateTextures(const VideoFrame& frame);
GLuint texture_ids_[3]; // Texture id of Y,U and V texture.
GLuint program_;
GLsizei texture_width_;
GLsizei texture_height_;
GLfloat vertices_[20];
static const char indices_[];
static const char vertext_shader_[];
static const char fragment_shader_[];
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_

View File

@ -0,0 +1,330 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
// This files is mostly copied from
// webrtc/modules/video_render/android/video_render_opengles20.h
// TODO(sjlee): unify this copy with the android one.
#include "webrtc/modules/video_render/ios/open_gles20.h"
#include "webrtc/system_wrappers/include/trace.h"
using namespace webrtc;
const char OpenGles20::indices_[] = {0, 3, 2, 0, 2, 1};
const char OpenGles20::vertext_shader_[] = {
"attribute vec4 aPosition;\n"
"attribute vec2 aTextureCoord;\n"
"varying vec2 vTextureCoord;\n"
"void main() {\n"
" gl_Position = aPosition;\n"
" vTextureCoord = aTextureCoord;\n"
"}\n"};
// The fragment shader.
// Do YUV to RGB565 conversion.
const char OpenGles20::fragment_shader_[] = {
"precision mediump float;\n"
"uniform sampler2D Ytex;\n"
"uniform sampler2D Utex,Vtex;\n"
"varying vec2 vTextureCoord;\n"
"void main(void) {\n"
" float nx,ny,r,g,b,y,u,v;\n"
" mediump vec4 txl,ux,vx;"
" nx=vTextureCoord[0];\n"
" ny=vTextureCoord[1];\n"
" y=texture2D(Ytex,vec2(nx,ny)).r;\n"
" u=texture2D(Utex,vec2(nx,ny)).r;\n"
" v=texture2D(Vtex,vec2(nx,ny)).r;\n"
" y=1.1643*(y-0.0625);\n"
" u=u-0.5;\n"
" v=v-0.5;\n"
" r=y+1.5958*v;\n"
" g=y-0.39173*u-0.81290*v;\n"
" b=y+2.017*u;\n"
" gl_FragColor=vec4(r,g,b,1.0);\n"
"}\n"};
OpenGles20::OpenGles20() : texture_width_(-1), texture_height_(-1) {
texture_ids_[0] = 0;
texture_ids_[1] = 0;
texture_ids_[2] = 0;
program_ = 0;
const GLfloat vertices[20] = {
// X, Y, Z, U, V
-1, -1, 0, 0, 1, // Bottom Left
1, -1, 0, 1, 1, // Bottom Right
1, 1, 0, 1, 0, // Top Right
-1, 1, 0, 0, 0}; // Top Left
memcpy(vertices_, vertices, sizeof(vertices_));
}
OpenGles20::~OpenGles20() {
if (program_) {
glDeleteTextures(3, texture_ids_);
glDeleteProgram(program_);
}
}
bool OpenGles20::Setup(int32_t width, int32_t height) {
program_ = CreateProgram(vertext_shader_, fragment_shader_);
if (!program_) {
return false;
}
int position_handle = glGetAttribLocation(program_, "aPosition");
int texture_handle = glGetAttribLocation(program_, "aTextureCoord");
// set the vertices array in the shader
// vertices_ contains 4 vertices with 5 coordinates.
// 3 for (xyz) for the vertices and 2 for the texture
glVertexAttribPointer(
position_handle, 3, GL_FLOAT, false, 5 * sizeof(GLfloat), vertices_);
glEnableVertexAttribArray(position_handle);
// set the texture coordinate array in the shader
// vertices_ contains 4 vertices with 5 coordinates.
// 3 for (xyz) for the vertices and 2 for the texture
glVertexAttribPointer(
texture_handle, 2, GL_FLOAT, false, 5 * sizeof(GLfloat), &vertices_[3]);
glEnableVertexAttribArray(texture_handle);
glUseProgram(program_);
int i = glGetUniformLocation(program_, "Ytex");
glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */
i = glGetUniformLocation(program_, "Utex");
glUniform1i(i, 1); /* Bind Utex to texture unit 1 */
i = glGetUniformLocation(program_, "Vtex");
glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */
glViewport(0, 0, width, height);
return true;
}
bool OpenGles20::SetCoordinates(const float z_order,
const float left,
const float top,
const float right,
const float bottom) {
if (top > 1 || top < 0 || right > 1 || right < 0 || bottom > 1 ||
bottom < 0 || left > 1 || left < 0) {
return false;
}
// Bottom Left
vertices_[0] = (left * 2) - 1;
vertices_[1] = -1 * (2 * bottom) + 1;
vertices_[2] = z_order;
// Bottom Right
vertices_[5] = (right * 2) - 1;
vertices_[6] = -1 * (2 * bottom) + 1;
vertices_[7] = z_order;
// Top Right
vertices_[10] = (right * 2) - 1;
vertices_[11] = -1 * (2 * top) + 1;
vertices_[12] = z_order;
// Top Left
vertices_[15] = (left * 2) - 1;
vertices_[16] = -1 * (2 * top) + 1;
vertices_[17] = z_order;
return true;
}
bool OpenGles20::Render(const VideoFrame& frame) {
if (texture_width_ != (GLsizei)frame.width() ||
texture_height_ != (GLsizei)frame.height()) {
SetupTextures(frame);
}
UpdateTextures(frame);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, indices_);
return true;
}
GLuint OpenGles20::LoadShader(GLenum shader_type, const char* shader_source) {
GLuint shader = glCreateShader(shader_type);
if (shader) {
glShaderSource(shader, 1, &shader_source, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled) {
GLint info_len = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &info_len);
if (info_len) {
char* buf = (char*)malloc(info_len);
glGetShaderInfoLog(shader, info_len, NULL, buf);
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
0,
"%s: Could not compile shader %d: %s",
__FUNCTION__,
shader_type,
buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
}
return shader;
}
GLuint OpenGles20::CreateProgram(const char* vertex_source,
const char* fragment_source) {
GLuint vertex_shader = LoadShader(GL_VERTEX_SHADER, vertex_source);
if (!vertex_shader) {
return -1;
}
GLuint fragment_shader = LoadShader(GL_FRAGMENT_SHADER, fragment_source);
if (!fragment_shader) {
return -1;
}
GLuint program = glCreateProgram();
if (program) {
glAttachShader(program, vertex_shader);
glAttachShader(program, fragment_shader);
glLinkProgram(program);
GLint link_status = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &link_status);
if (link_status != GL_TRUE) {
GLint info_len = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &info_len);
if (info_len) {
char* buf = (char*)malloc(info_len);
glGetProgramInfoLog(program, info_len, NULL, buf);
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
0,
"%s: Could not link program: %s",
__FUNCTION__,
buf);
free(buf);
}
glDeleteProgram(program);
program = 0;
}
}
if (vertex_shader) {
glDeleteShader(vertex_shader);
}
if (fragment_shader) {
glDeleteShader(fragment_shader);
}
return program;
}
static void InitializeTexture(int name, int id, int width, int height) {
glActiveTexture(name);
glBindTexture(GL_TEXTURE_2D, id);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_LUMINANCE,
width,
height,
0,
GL_LUMINANCE,
GL_UNSIGNED_BYTE,
NULL);
}
void OpenGles20::SetupTextures(const VideoFrame& frame) {
const GLsizei width = frame.width();
const GLsizei height = frame.height();
if (!texture_ids_[0]) {
glGenTextures(3, texture_ids_); // Generate the Y, U and V texture
}
InitializeTexture(GL_TEXTURE0, texture_ids_[0], width, height);
InitializeTexture(GL_TEXTURE1, texture_ids_[1], width / 2, height / 2);
InitializeTexture(GL_TEXTURE2, texture_ids_[2], width / 2, height / 2);
texture_width_ = width;
texture_height_ = height;
}
// Uploads a plane of pixel data, accounting for stride != width*bpp.
static void GlTexSubImage2D(GLsizei width,
GLsizei height,
int stride,
const uint8_t* plane) {
if (stride == width) {
// Yay! We can upload the entire plane in a single GL call.
glTexSubImage2D(GL_TEXTURE_2D,
0,
0,
0,
width,
height,
GL_LUMINANCE,
GL_UNSIGNED_BYTE,
static_cast<const GLvoid*>(plane));
} else {
// Boo! Since GLES2 doesn't have GL_UNPACK_ROW_LENGTH and iOS doesn't
// have GL_EXT_unpack_subimage we have to upload a row at a time. Ick.
for (int row = 0; row < height; ++row) {
glTexSubImage2D(GL_TEXTURE_2D,
0,
0,
row,
width,
1,
GL_LUMINANCE,
GL_UNSIGNED_BYTE,
static_cast<const GLvoid*>(plane + (row * stride)));
}
}
}
void OpenGles20::UpdateTextures(const VideoFrame& frame) {
const GLsizei width = frame.width();
const GLsizei height = frame.height();
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture_ids_[0]);
GlTexSubImage2D(width, height, frame.stride(kYPlane), frame.buffer(kYPlane));
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, texture_ids_[1]);
GlTexSubImage2D(
width / 2, height / 2, frame.stride(kUPlane), frame.buffer(kUPlane));
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, texture_ids_[2]);
GlTexSubImage2D(
width / 2, height / 2, frame.stride(kVPlane), frame.buffer(kVPlane));
}

View File

@ -0,0 +1,45 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_
#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
namespace webrtc {
class VideoRenderIosGles20;
class VideoRenderIosChannel : public VideoRenderCallback {
public:
explicit VideoRenderIosChannel(VideoRenderIosView* view);
virtual ~VideoRenderIosChannel();
// Implementation of VideoRenderCallback.
int32_t RenderFrame(const uint32_t stream_id,
const VideoFrame& video_frame) override;
int SetStreamSettings(const float z_order,
const float left,
const float top,
const float right,
const float bottom);
bool IsUpdated();
bool RenderOffScreenBuffer();
private:
VideoRenderIosView* view_;
VideoFrame* current_frame_;
bool buffer_is_updated_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_

View File

@ -0,0 +1,61 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#include "webrtc/modules/video_render/ios/video_render_ios_channel.h"
using namespace webrtc;
VideoRenderIosChannel::VideoRenderIosChannel(VideoRenderIosView* view)
: view_(view), current_frame_(new VideoFrame()), buffer_is_updated_(false) {
}
VideoRenderIosChannel::~VideoRenderIosChannel() { delete current_frame_; }
int32_t VideoRenderIosChannel::RenderFrame(const uint32_t stream_id,
const VideoFrame& video_frame) {
current_frame_->CopyFrame(video_frame);
current_frame_->set_render_time_ms(0);
buffer_is_updated_ = true;
return 0;
}
bool VideoRenderIosChannel::RenderOffScreenBuffer() {
if (![view_ renderFrame:current_frame_]) {
return false;
}
buffer_is_updated_ = false;
return true;
}
bool VideoRenderIosChannel::IsUpdated() { return buffer_is_updated_; }
int VideoRenderIosChannel::SetStreamSettings(const float z_order,
const float left,
const float top,
const float right,
const float bottom) {
if (![view_ setCoordinatesForZOrder:z_order
Left:left
Top:bottom
Right:right
Bottom:top]) {
return -1;
}
return 0;
}

View File

@ -0,0 +1,87 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_
#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_
#include <list>
#include <map>
#include <memory>
#include "webrtc/base/platform_thread.h"
#include "webrtc/modules/video_render/ios/video_render_ios_channel.h"
#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
namespace webrtc {
class CriticalSectionWrapper;
class EventTimerWrapper;
class VideoRenderIosGles20 {
public:
VideoRenderIosGles20(VideoRenderIosView* view,
bool full_screen,
int render_id);
virtual ~VideoRenderIosGles20();
int Init();
VideoRenderIosChannel* CreateEaglChannel(int channel,
int z_order,
float left,
float top,
float right,
float bottom);
int DeleteEaglChannel(int channel);
bool HasChannel(int channel);
bool ScreenUpdateProcess();
int GetWindowRect(Rect& rect); // NOLINT
int GetScreenResolution(uint& screen_width, uint& screen_height); // NOLINT
int SetStreamCropping(const uint stream_id,
const float left,
const float top,
const float right,
const float bottom);
int ChangeWindow(void* new_window);
int StartRender();
int StopRender();
protected:
static bool ScreenUpdateThreadProc(void* obj);
private:
bool RenderOffScreenBuffers();
int SwapAndDisplayBuffers();
private:
std::unique_ptr<CriticalSectionWrapper> gles_crit_sec_;
EventTimerWrapper* screen_update_event_;
// TODO(pbos): Remove unique_ptr and use member directly.
std::unique_ptr<rtc::PlatformThread> screen_update_thread_;
VideoRenderIosView* view_;
Rect window_rect_;
int window_width_;
int window_height_;
bool is_full_screen_;
GLint backing_width_;
GLint backing_height_;
GLuint view_renderbuffer_;
GLuint view_framebuffer_;
GLuint depth_renderbuffer_;
std::map<int, VideoRenderIosChannel*> agl_channels_;
std::multimap<int, int> z_order_to_channel_;
EAGLContext* gles_context_;
bool is_rendering_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_

View File

@ -0,0 +1,285 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#include "webrtc/modules/video_render/ios/video_render_ios_gles20.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/event_wrapper.h"
using namespace webrtc;
VideoRenderIosGles20::VideoRenderIosGles20(VideoRenderIosView* view,
bool full_screen,
int render_id)
: gles_crit_sec_(CriticalSectionWrapper::CreateCriticalSection()),
screen_update_event_(0),
view_(view),
window_rect_(),
window_width_(0),
window_height_(0),
is_full_screen_(full_screen),
agl_channels_(),
z_order_to_channel_(),
gles_context_([view context]),
is_rendering_(true) {
screen_update_thread_.reset(new rtc::PlatformThread(
ScreenUpdateThreadProc, this, "ScreenUpdateGles20"));
screen_update_event_ = EventTimerWrapper::Create();
GetWindowRect(window_rect_);
}
VideoRenderIosGles20::~VideoRenderIosGles20() {
// Signal event to exit thread, then delete it
rtc::PlatformThread* thread_wrapper = screen_update_thread_.release();
if (thread_wrapper) {
screen_update_event_->Set();
screen_update_event_->StopTimer();
thread_wrapper->Stop();
delete thread_wrapper;
delete screen_update_event_;
screen_update_event_ = NULL;
is_rendering_ = FALSE;
}
// Delete all channels
std::map<int, VideoRenderIosChannel*>::iterator it = agl_channels_.begin();
while (it != agl_channels_.end()) {
delete it->second;
agl_channels_.erase(it);
it = agl_channels_.begin();
}
agl_channels_.clear();
// Clean the zOrder map
std::multimap<int, int>::iterator z_it = z_order_to_channel_.begin();
while (z_it != z_order_to_channel_.end()) {
z_order_to_channel_.erase(z_it);
z_it = z_order_to_channel_.begin();
}
z_order_to_channel_.clear();
}
int VideoRenderIosGles20::Init() {
CriticalSectionScoped cs(gles_crit_sec_.get());
if (!view_) {
view_ = [[VideoRenderIosView alloc] init];
}
if (![view_ createContext]) {
return -1;
}
screen_update_thread_->Start();
screen_update_thread_->SetPriority(rtc::kRealtimePriority);
// Start the event triggering the render process
unsigned int monitor_freq = 60;
screen_update_event_->StartTimer(true, 1000 / monitor_freq);
window_width_ = window_rect_.right - window_rect_.left;
window_height_ = window_rect_.bottom - window_rect_.top;
return 0;
}
VideoRenderIosChannel* VideoRenderIosGles20::CreateEaglChannel(int channel,
int z_order,
float left,
float top,
float right,
float bottom) {
CriticalSectionScoped cs(gles_crit_sec_.get());
if (HasChannel(channel)) {
return NULL;
}
VideoRenderIosChannel* new_eagl_channel = new VideoRenderIosChannel(view_);
if (new_eagl_channel->SetStreamSettings(z_order, left, top, right, bottom) ==
-1) {
return NULL;
}
agl_channels_[channel] = new_eagl_channel;
z_order_to_channel_.insert(std::pair<int, int>(z_order, channel));
return new_eagl_channel;
}
int VideoRenderIosGles20::DeleteEaglChannel(int channel) {
CriticalSectionScoped cs(gles_crit_sec_.get());
std::map<int, VideoRenderIosChannel*>::iterator it;
it = agl_channels_.find(channel);
if (it != agl_channels_.end()) {
delete it->second;
agl_channels_.erase(it);
} else {
return -1;
}
std::multimap<int, int>::iterator z_it = z_order_to_channel_.begin();
while (z_it != z_order_to_channel_.end()) {
if (z_it->second == channel) {
z_order_to_channel_.erase(z_it);
break;
}
z_it++;
}
return 0;
}
bool VideoRenderIosGles20::HasChannel(int channel) {
CriticalSectionScoped cs(gles_crit_sec_.get());
std::map<int, VideoRenderIosChannel*>::iterator it =
agl_channels_.find(channel);
if (it != agl_channels_.end()) {
return true;
}
return false;
}
// Rendering process
bool VideoRenderIosGles20::ScreenUpdateThreadProc(void* obj) {
return static_cast<VideoRenderIosGles20*>(obj)->ScreenUpdateProcess();
}
bool VideoRenderIosGles20::ScreenUpdateProcess() {
screen_update_event_->Wait(100);
CriticalSectionScoped cs(gles_crit_sec_.get());
if (!is_rendering_) {
return false;
}
if (!screen_update_thread_) {
return false;
}
if (GetWindowRect(window_rect_) == -1) {
return true;
}
if (window_width_ != (window_rect_.right - window_rect_.left) ||
window_height_ != (window_rect_.bottom - window_rect_.top)) {
window_width_ = window_rect_.right - window_rect_.left;
window_height_ = window_rect_.bottom - window_rect_.top;
}
// Check if there are any updated buffers
bool updated = false;
std::map<int, VideoRenderIosChannel*>::iterator it = agl_channels_.begin();
while (it != agl_channels_.end()) {
VideoRenderIosChannel* agl_channel = it->second;
updated = agl_channel->IsUpdated();
if (updated) {
break;
}
it++;
}
if (updated) {
// At least one buffer has been updated, we need to repaint the texture
// Loop through all channels starting highest zOrder ending with lowest.
for (std::multimap<int, int>::reverse_iterator r_it =
z_order_to_channel_.rbegin();
r_it != z_order_to_channel_.rend();
r_it++) {
int channel_id = r_it->second;
std::map<int, VideoRenderIosChannel*>::iterator it =
agl_channels_.find(channel_id);
VideoRenderIosChannel* agl_channel = it->second;
agl_channel->RenderOffScreenBuffer();
}
[view_ presentFramebuffer];
}
return true;
}
int VideoRenderIosGles20::GetWindowRect(Rect& rect) {
CriticalSectionScoped cs(gles_crit_sec_.get());
if (!view_) {
return -1;
}
CGRect bounds = [view_ bounds];
rect.top = bounds.origin.y;
rect.left = bounds.origin.x;
rect.bottom = bounds.size.height + bounds.origin.y;
rect.right = bounds.size.width + bounds.origin.x;
return 0;
}
int VideoRenderIosGles20::ChangeWindow(void* new_window) {
CriticalSectionScoped cs(gles_crit_sec_.get());
view_ = (__bridge VideoRenderIosView*)new_window;
return 0;
}
int VideoRenderIosGles20::StartRender() {
is_rendering_ = true;
return 0;
}
int VideoRenderIosGles20::StopRender() {
is_rendering_ = false;
return 0;
}
int VideoRenderIosGles20::GetScreenResolution(uint& screen_width,
uint& screen_height) {
screen_width = [view_ bounds].size.width;
screen_height = [view_ bounds].size.height;
return 0;
}
int VideoRenderIosGles20::SetStreamCropping(const uint stream_id,
const float left,
const float top,
const float right,
const float bottom) {
// Check if there are any updated buffers
// bool updated = false;
uint counter = 0;
std::map<int, VideoRenderIosChannel*>::iterator it = agl_channels_.begin();
while (it != agl_channels_.end()) {
if (counter == stream_id) {
VideoRenderIosChannel* agl_channel = it->second;
agl_channel->SetStreamSettings(0, left, top, right, bottom);
}
counter++;
it++;
}
return 0;
}

View File

@ -0,0 +1,105 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_
#include <list>
#include <map>
#include <memory>
#include "webrtc/modules/video_render/i_video_render.h"
namespace webrtc {
class VideoRenderIosGles20;
class CriticalSectionWrapper;
class VideoRenderIosImpl : IVideoRender {
public:
explicit VideoRenderIosImpl(const int32_t id,
void* window,
const bool full_screen);
~VideoRenderIosImpl();
// Implementation of IVideoRender.
int32_t Init() override;
int32_t ChangeWindow(void* window) override;
VideoRenderCallback* AddIncomingRenderStream(const uint32_t stream_id,
const uint32_t z_order,
const float left,
const float top,
const float right,
const float bottom) override;
int32_t DeleteIncomingRenderStream(const uint32_t stream_id) override;
int32_t GetIncomingRenderStreamProperties(const uint32_t stream_id,
uint32_t& z_order,
float& left,
float& top,
float& right,
float& bottom) const override;
int32_t StartRender() override;
int32_t StopRender() override;
VideoRenderType RenderType() override;
RawVideoType PerferedVideoType() override;
bool FullScreen() override;
int32_t GetGraphicsMemory(
uint64_t& total_graphics_memory,
uint64_t& available_graphics_memory) const override; // NOLINT
int32_t GetScreenResolution(
uint32_t& screen_width,
uint32_t& screen_height) const override; // NOLINT
uint32_t RenderFrameRate(const uint32_t stream_id);
int32_t SetStreamCropping(const uint32_t stream_id,
const float left,
const float top,
const float right,
const float bottom) override;
int32_t ConfigureRenderer(const uint32_t stream_id,
const unsigned int z_order,
const float left,
const float top,
const float right,
const float bottom) override;
int32_t SetTransparentBackground(const bool enable) override;
int32_t SetText(const uint8_t text_id,
const uint8_t* text,
const int32_t text_length,
const uint32_t text_color_ref,
const uint32_t background_color_ref,
const float left,
const float top,
const float right,
const float bottom) override;
int32_t SetBitmap(const void* bit_map,
const uint8_t picture_id,
const void* color_key,
const float left,
const float top,
const float right,
const float bottom);
int32_t FullScreenRender(void* window, const bool enable);
private:
int32_t id_;
void* ptr_window_;
bool full_screen_;
CriticalSectionWrapper* crit_sec_;
std::unique_ptr<VideoRenderIosGles20> ptr_ios_render_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_

View File

@ -0,0 +1,170 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#include "webrtc/modules/video_render/ios/video_render_ios_impl.h"
#include "webrtc/modules/video_render/ios/video_render_ios_gles20.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
using namespace webrtc;
#define IOS_UNSUPPORTED() \
WEBRTC_TRACE(kTraceError, \
kTraceVideoRenderer, \
id_, \
"%s is not supported on the iOS platform.", \
__FUNCTION__); \
return -1;
VideoRenderIosImpl::VideoRenderIosImpl(const int32_t id,
void* window,
const bool full_screen)
: id_(id),
ptr_window_(window),
full_screen_(full_screen),
crit_sec_(CriticalSectionWrapper::CreateCriticalSection()) {}
VideoRenderIosImpl::~VideoRenderIosImpl() {
delete crit_sec_;
}
int32_t VideoRenderIosImpl::Init() {
CriticalSectionScoped cs(crit_sec_);
ptr_ios_render_.reset(new VideoRenderIosGles20(
(__bridge VideoRenderIosView*)ptr_window_, full_screen_, id_));
return ptr_ios_render_->Init();
;
}
int32_t VideoRenderIosImpl::ChangeWindow(void* window) {
CriticalSectionScoped cs(crit_sec_);
if (window == NULL) {
return -1;
}
ptr_window_ = window;
return ptr_ios_render_->ChangeWindow(ptr_window_);
}
VideoRenderCallback* VideoRenderIosImpl::AddIncomingRenderStream(
const uint32_t stream_id,
const uint32_t z_order,
const float left,
const float top,
const float right,
const float bottom) {
CriticalSectionScoped cs(crit_sec_);
if (!ptr_window_) {
return NULL;
}
return ptr_ios_render_->CreateEaglChannel(
stream_id, z_order, left, top, right, bottom);
}
int32_t VideoRenderIosImpl::DeleteIncomingRenderStream(
const uint32_t stream_id) {
CriticalSectionScoped cs(crit_sec_);
return ptr_ios_render_->DeleteEaglChannel(stream_id);
}
int32_t VideoRenderIosImpl::GetIncomingRenderStreamProperties(
const uint32_t stream_id,
uint32_t& z_order,
float& left,
float& top,
float& right,
float& bottom) const {
IOS_UNSUPPORTED();
}
int32_t VideoRenderIosImpl::StartRender() {
return ptr_ios_render_->StartRender();
}
int32_t VideoRenderIosImpl::StopRender() {
return ptr_ios_render_->StopRender();
}
VideoRenderType VideoRenderIosImpl::RenderType() { return kRenderiOS; }
RawVideoType VideoRenderIosImpl::PerferedVideoType() { return kVideoI420; }
bool VideoRenderIosImpl::FullScreen() { IOS_UNSUPPORTED(); }
int32_t VideoRenderIosImpl::GetGraphicsMemory(
uint64_t& totalGraphicsMemory,
uint64_t& availableGraphicsMemory) const {
IOS_UNSUPPORTED();
}
int32_t VideoRenderIosImpl::GetScreenResolution(uint32_t& screenWidth,
uint32_t& screenHeight) const {
return ptr_ios_render_->GetScreenResolution(screenWidth, screenHeight);
}
uint32_t VideoRenderIosImpl::RenderFrameRate(const uint32_t streamId) {
IOS_UNSUPPORTED();
}
int32_t VideoRenderIosImpl::SetStreamCropping(const uint32_t streamId,
const float left,
const float top,
const float right,
const float bottom) {
return ptr_ios_render_->SetStreamCropping(streamId, left, top, right, bottom);
}
int32_t VideoRenderIosImpl::ConfigureRenderer(const uint32_t streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom) {
IOS_UNSUPPORTED();
}
int32_t VideoRenderIosImpl::SetTransparentBackground(const bool enable) {
IOS_UNSUPPORTED();
}
int32_t VideoRenderIosImpl::SetText(const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left,
const float top,
const float right,
const float bottom) {
IOS_UNSUPPORTED();
}
int32_t VideoRenderIosImpl::SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom) {
IOS_UNSUPPORTED();
}
int32_t VideoRenderIosImpl::FullScreenRender(void* window, const bool enable) {
IOS_UNSUPPORTED();
}

View File

@ -0,0 +1,34 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_
#define WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_
#import <UIKit/UIKit.h>
#import <QuartzCore/QuartzCore.h>
#include "webrtc/modules/video_render/ios/open_gles20.h"
@interface VideoRenderIosView : UIView
- (BOOL)createContext;
- (BOOL)presentFramebuffer;
- (BOOL)renderFrame:(webrtc::VideoFrame*)frameToRender;
- (BOOL)setCoordinatesForZOrder:(const float)zOrder
Left:(const float)left
Top:(const float)top
Right:(const float)right
Bottom:(const float)bottom;
@property(nonatomic, retain) EAGLContext* context;
@end
#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_

View File

@ -0,0 +1,163 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#include <memory>
#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
#include "webrtc/system_wrappers/include/trace.h"
using namespace webrtc;
@implementation VideoRenderIosView {
EAGLContext* _context;
std::unique_ptr<webrtc::OpenGles20> _gles_renderer20;
int _frameBufferWidth;
int _frameBufferHeight;
unsigned int _defaultFrameBuffer;
unsigned int _colorRenderBuffer;
}
@synthesize context = context_;
+ (Class)layerClass {
return [CAEAGLLayer class];
}
- (id)initWithCoder:(NSCoder*)coder {
// init super class
self = [super initWithCoder:coder];
if (self) {
_gles_renderer20.reset(new OpenGles20());
}
return self;
}
- (id)init {
// init super class
self = [super init];
if (self) {
_gles_renderer20.reset(new OpenGles20());
}
return self;
}
- (id)initWithFrame:(CGRect)frame {
// init super class
self = [super initWithFrame:frame];
if (self) {
_gles_renderer20.reset(new OpenGles20());
}
return self;
}
- (void)dealloc {
if (_defaultFrameBuffer) {
glDeleteFramebuffers(1, &_defaultFrameBuffer);
_defaultFrameBuffer = 0;
}
if (_colorRenderBuffer) {
glDeleteRenderbuffers(1, &_colorRenderBuffer);
_colorRenderBuffer = 0;
}
[EAGLContext setCurrentContext:nil];
}
- (NSString*)description {
return [NSString stringWithFormat:
@"A WebRTC implemented subclass of UIView."
"+Class method is overwritten, along with custom methods"];
}
- (BOOL)createContext {
// create OpenGLES context from self layer class
CAEAGLLayer* eagl_layer = (CAEAGLLayer*)self.layer;
eagl_layer.opaque = YES;
eagl_layer.drawableProperties =
[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:NO],
kEAGLDrawablePropertyRetainedBacking,
kEAGLColorFormatRGBA8,
kEAGLDrawablePropertyColorFormat,
nil];
_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!_context) {
return NO;
}
if (![EAGLContext setCurrentContext:_context]) {
return NO;
}
// generates and binds the OpenGLES buffers
glGenFramebuffers(1, &_defaultFrameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, _defaultFrameBuffer);
// Create color render buffer and allocate backing store.
glGenRenderbuffers(1, &_colorRenderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderBuffer);
[_context renderbufferStorage:GL_RENDERBUFFER
fromDrawable:(CAEAGLLayer*)self.layer];
glGetRenderbufferParameteriv(
GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_frameBufferWidth);
glGetRenderbufferParameteriv(
GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_frameBufferHeight);
glFramebufferRenderbuffer(GL_FRAMEBUFFER,
GL_COLOR_ATTACHMENT0,
GL_RENDERBUFFER,
_colorRenderBuffer);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
return NO;
}
// set the frame buffer
glBindFramebuffer(GL_FRAMEBUFFER, _defaultFrameBuffer);
glViewport(0, 0, self.frame.size.width, self.frame.size.height);
return _gles_renderer20->Setup([self bounds].size.width,
[self bounds].size.height);
}
- (BOOL)presentFramebuffer {
if (![_context presentRenderbuffer:GL_RENDERBUFFER]) {
WEBRTC_TRACE(kTraceWarning,
kTraceVideoRenderer,
0,
"%s:%d [context present_renderbuffer] "
"returned false",
__FUNCTION__,
__LINE__);
}
return YES;
}
- (BOOL)renderFrame:(VideoFrame*)frameToRender {
if (![EAGLContext setCurrentContext:_context]) {
return NO;
}
return _gles_renderer20->Render(*frameToRender);
}
- (BOOL)setCoordinatesForZOrder:(const float)zOrder
Left:(const float)left
Top:(const float)top
Right:(const float)right
Bottom:(const float)bottom {
return _gles_renderer20->SetCoordinates(zOrder, left, top, right, bottom);
}
@end

View File

@ -0,0 +1,261 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_render/linux/video_render_linux_impl.h"
#include "webrtc/modules/video_render/linux/video_x11_render.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
#include <X11/Xlib.h>
namespace webrtc {
VideoRenderLinuxImpl::VideoRenderLinuxImpl(
const int32_t id,
const VideoRenderType videoRenderType,
void* window, const bool fullscreen) :
_id(id),
_renderLinuxCritsect(
*CriticalSectionWrapper::CreateCriticalSection()),
_ptrWindow(window), _ptrX11Render(NULL)
{
}
VideoRenderLinuxImpl::~VideoRenderLinuxImpl()
{
if (_ptrX11Render)
delete _ptrX11Render;
delete &_renderLinuxCritsect;
}
int32_t VideoRenderLinuxImpl::Init()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(&_renderLinuxCritsect);
_ptrX11Render = new VideoX11Render((Window) _ptrWindow);
if (!_ptrX11Render)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s",
"Failed to create instance of VideoX11Render object");
return -1;
}
int retVal = _ptrX11Render->Init();
if (retVal == -1)
{
return -1;
}
return 0;
}
int32_t VideoRenderLinuxImpl::ChangeWindow(void* window)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(&_renderLinuxCritsect);
_ptrWindow = window;
if (_ptrX11Render)
{
return _ptrX11Render->ChangeWindow((Window) window);
}
return -1;
}
VideoRenderCallback* VideoRenderLinuxImpl::AddIncomingRenderStream(
const uint32_t streamId,
const uint32_t zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(&_renderLinuxCritsect);
VideoRenderCallback* renderCallback = NULL;
if (_ptrX11Render)
{
VideoX11Channel* renderChannel =
_ptrX11Render->CreateX11RenderChannel(streamId, zOrder, left,
top, right, bottom);
if (!renderChannel)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"Render channel creation failed for stream id: %d",
streamId);
return NULL;
}
renderCallback = (VideoRenderCallback *) renderChannel;
}
else
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"_ptrX11Render is NULL");
return NULL;
}
return renderCallback;
}
int32_t VideoRenderLinuxImpl::DeleteIncomingRenderStream(
const uint32_t streamId)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(&_renderLinuxCritsect);
if (_ptrX11Render)
{
return _ptrX11Render->DeleteX11RenderChannel(streamId);
}
return -1;
}
int32_t VideoRenderLinuxImpl::GetIncomingRenderStreamProperties(
const uint32_t streamId,
uint32_t& zOrder,
float& left,
float& top,
float& right,
float& bottom) const
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(&_renderLinuxCritsect);
if (_ptrX11Render)
{
return _ptrX11Render->GetIncomingStreamProperties(streamId, zOrder,
left, top, right,
bottom);
}
return -1;
}
int32_t VideoRenderLinuxImpl::StartRender()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
__FUNCTION__);
return 0;
}
int32_t VideoRenderLinuxImpl::StopRender()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
__FUNCTION__);
return 0;
}
VideoRenderType VideoRenderLinuxImpl::RenderType()
{
return kRenderX11;
}
RawVideoType VideoRenderLinuxImpl::PerferedVideoType()
{
return kVideoI420;
}
bool VideoRenderLinuxImpl::FullScreen()
{
return false;
}
int32_t VideoRenderLinuxImpl::GetGraphicsMemory(
uint64_t& /*totalGraphicsMemory*/,
uint64_t& /*availableGraphicsMemory*/) const
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Linux", __FUNCTION__);
return -1;
}
int32_t VideoRenderLinuxImpl::GetScreenResolution(
uint32_t& /*screenWidth*/,
uint32_t& /*screenHeight*/) const
{
return -1;
}
uint32_t VideoRenderLinuxImpl::RenderFrameRate(const uint32_t /*streamId*/)
{
return -1;
}
int32_t VideoRenderLinuxImpl::SetStreamCropping(
const uint32_t /*streamId*/,
const float /*left*/,
const float /*top*/,
const float /*right*/,
const float /*bottom*/)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Linux", __FUNCTION__);
return -1;
}
int32_t VideoRenderLinuxImpl::SetTransparentBackground(const bool /*enable*/)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Linux", __FUNCTION__);
return -1;
}
int32_t VideoRenderLinuxImpl::ConfigureRenderer(
const uint32_t streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Linux", __FUNCTION__);
return -1;
}
int32_t VideoRenderLinuxImpl::SetText(
const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left, const float top,
const float rigth,
const float bottom)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Linux", __FUNCTION__);
return -1;
}
int32_t VideoRenderLinuxImpl::SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Linux", __FUNCTION__);
return -1;
}
} // namespace webrtc

View File

@ -0,0 +1,128 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
#include "webrtc/modules/video_render/i_video_render.h"
namespace webrtc {
class CriticalSectionWrapper;
class VideoX11Render;
// Class definitions
class VideoRenderLinuxImpl: IVideoRender
{
public:
/*
* Constructor/destructor
*/
VideoRenderLinuxImpl(const int32_t id,
const VideoRenderType videoRenderType,
void* window, const bool fullscreen);
virtual ~VideoRenderLinuxImpl();
virtual int32_t Init();
virtual int32_t ChangeWindow(void* window);
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
virtual VideoRenderCallback
* AddIncomingRenderStream(const uint32_t streamId,
const uint32_t zOrder,
const float left, const float top,
const float right, const float bottom);
virtual int32_t
DeleteIncomingRenderStream(const uint32_t streamId);
virtual int32_t
GetIncomingRenderStreamProperties(const uint32_t streamId,
uint32_t& zOrder,
float& left, float& top,
float& right, float& bottom) const;
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
virtual int32_t StartRender();
virtual int32_t StopRender();
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual VideoRenderType RenderType();
virtual RawVideoType PerferedVideoType();
virtual bool FullScreen();
virtual int32_t
GetGraphicsMemory(uint64_t& totalGraphicsMemory,
uint64_t& availableGraphicsMemory) const;
virtual int32_t
GetScreenResolution(uint32_t& screenWidth,
uint32_t& screenHeight) const;
virtual uint32_t RenderFrameRate(const uint32_t streamId);
virtual int32_t SetStreamCropping(const uint32_t streamId,
const float left, const float top,
const float right, const float bottom);
virtual int32_t SetTransparentBackground(const bool enable);
virtual int32_t ConfigureRenderer(const uint32_t streamId,
const unsigned int zOrder,
const float left, const float top,
const float right, const float bottom);
virtual int32_t SetText(const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left, const float top,
const float rigth, const float bottom);
virtual int32_t SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left, const float top,
const float right, const float bottom);
private:
int32_t _id;
CriticalSectionWrapper& _renderLinuxCritsect;
void* _ptrWindow;
// X11 Render
VideoX11Render* _ptrX11Render;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_

View File

@ -0,0 +1,315 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_render/linux/video_x11_channel.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
#define DISP_MAX 128
static Display *dispArray[DISP_MAX];
static int dispCount = 0;
VideoX11Channel::VideoX11Channel(int32_t id) :
_crit(*CriticalSectionWrapper::CreateCriticalSection()), _display(NULL),
_shminfo(), _image(NULL), _window(0L), _gc(NULL),
_width(DEFAULT_RENDER_FRAME_WIDTH),
_height(DEFAULT_RENDER_FRAME_HEIGHT), _outWidth(0), _outHeight(0),
_xPos(0), _yPos(0), _prepared(false), _dispCount(0), _buffer(NULL),
_top(0.0), _left(0.0), _right(0.0), _bottom(0.0),
_Id(id)
{
}
VideoX11Channel::~VideoX11Channel()
{
if (_prepared)
{
_crit.Enter();
ReleaseWindow();
_crit.Leave();
}
delete &_crit;
}
int32_t VideoX11Channel::RenderFrame(const uint32_t streamId,
const VideoFrame& videoFrame) {
CriticalSectionScoped cs(&_crit);
if (_width != videoFrame.width() || _height
!= videoFrame.height()) {
if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) {
return -1;
}
}
return DeliverFrame(videoFrame);
}
int32_t VideoX11Channel::FrameSizeChange(int32_t width,
int32_t height,
int32_t /*numberOfStreams */)
{
CriticalSectionScoped cs(&_crit);
if (_prepared)
{
RemoveRenderer();
}
if (CreateLocalRenderer(width, height) == -1)
{
return -1;
}
return 0;
}
int32_t VideoX11Channel::DeliverFrame(const VideoFrame& videoFrame) {
CriticalSectionScoped cs(&_crit);
if (!_prepared) {
return 0;
}
if (!dispArray[_dispCount]) {
return -1;
}
ConvertFromI420(videoFrame, kARGB, 0, _buffer);
// Put image in window.
XShmPutImage(_display, _window, _gc, _image, 0, 0, _xPos, _yPos, _width,
_height, True);
// Very important for the image to update properly!
XSync(_display, False);
return 0;
}
int32_t VideoX11Channel::GetFrameSize(int32_t& width, int32_t& height)
{
width = _width;
height = _height;
return 0;
}
int32_t VideoX11Channel::Init(Window window, float left, float top,
float right, float bottom)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(&_crit);
_window = window;
_left = left;
_right = right;
_top = top;
_bottom = bottom;
_display = XOpenDisplay(NULL); // Use default display
if (!_window || !_display)
{
return -1;
}
if (dispCount < DISP_MAX)
{
dispArray[dispCount] = _display;
_dispCount = dispCount;
dispCount++;
}
else
{
return -1;
}
if ((1 < left || left < 0) || (1 < top || top < 0) || (1 < right || right
< 0) || (1 < bottom || bottom < 0))
{
return -1;
}
// calculate position and size of rendered video
int x, y;
unsigned int winWidth, winHeight, borderwidth, depth;
Window rootret;
if (XGetGeometry(_display, _window, &rootret, &x, &y, &winWidth,
&winHeight, &borderwidth, &depth) == 0)
{
return -1;
}
_xPos = (int32_t) (winWidth * left);
_yPos = (int32_t) (winHeight * top);
_outWidth = (int32_t) (winWidth * (right - left));
_outHeight = (int32_t) (winHeight * (bottom - top));
if (_outWidth % 2)
_outWidth++; // the renderer want's sizes that are multiples of two
if (_outHeight % 2)
_outHeight++;
_gc = XCreateGC(_display, _window, 0, 0);
if (!_gc) {
// Failed to create the graphics context.
assert(false);
return -1;
}
if (CreateLocalRenderer(winWidth, winHeight) == -1)
{
return -1;
}
return 0;
}
int32_t VideoX11Channel::ChangeWindow(Window window)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(&_crit);
// Stop the rendering, if we are rendering...
RemoveRenderer();
_window = window;
// calculate position and size of rendered video
int x, y;
unsigned int winWidth, winHeight, borderwidth, depth;
Window rootret;
if (XGetGeometry(_display, _window, &rootret, &x, &y, &winWidth,
&winHeight, &borderwidth, &depth) == -1)
{
return -1;
}
_xPos = (int) (winWidth * _left);
_yPos = (int) (winHeight * _top);
_outWidth = (int) (winWidth * (_right - _left));
_outHeight = (int) (winHeight * (_bottom - _top));
if (_outWidth % 2)
_outWidth++; // the renderer want's sizes that are multiples of two
if (_outHeight % 2)
_outHeight++;
// Prepare rendering using the
if (CreateLocalRenderer(_width, _height) == -1)
{
return -1;
}
return 0;
}
int32_t VideoX11Channel::ReleaseWindow()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(&_crit);
RemoveRenderer();
if (_gc) {
XFreeGC(_display, _gc);
_gc = NULL;
}
if (_display)
{
XCloseDisplay(_display);
_display = NULL;
}
return 0;
}
int32_t VideoX11Channel::CreateLocalRenderer(int32_t width, int32_t height)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(&_crit);
if (!_window || !_display)
{
return -1;
}
if (_prepared)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _Id,
"Renderer already prepared, exits.");
return -1;
}
_width = width;
_height = height;
// create shared memory image
_image = XShmCreateImage(_display, CopyFromParent, 24, ZPixmap, NULL,
&_shminfo, _width, _height); // this parameter needs to be the same for some reason.
_shminfo.shmid = shmget(IPC_PRIVATE, (_image->bytes_per_line
* _image->height), IPC_CREAT | 0777);
_shminfo.shmaddr = _image->data = (char*) shmat(_shminfo.shmid, 0, 0);
if (_image->data == reinterpret_cast<char*>(-1))
{
return -1;
}
_buffer = (unsigned char*) _image->data;
_shminfo.readOnly = False;
// attach image to display
if (!XShmAttach(_display, &_shminfo))
{
//printf("XShmAttach failed !\n");
return -1;
}
XSync(_display, False);
_prepared = true;
return 0;
}
int32_t VideoX11Channel::RemoveRenderer()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
__FUNCTION__);
if (!_prepared)
{
return 0;
}
_prepared = false;
// Free the memory.
XShmDetach(_display, &_shminfo);
XDestroyImage( _image );
_image = NULL;
shmdt(_shminfo.shmaddr);
_shminfo.shmaddr = NULL;
_buffer = NULL;
shmctl(_shminfo.shmid, IPC_RMID, 0);
_shminfo.shmid = 0;
return 0;
}
int32_t VideoX11Channel::GetStreamProperties(uint32_t& zOrder,
float& left, float& top,
float& right, float& bottom) const
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
__FUNCTION__);
zOrder = 0; // no z-order support yet
left = _left;
top = _top;
right = _right;
bottom = _bottom;
return 0;
}
} // namespace webrtc

View File

@ -0,0 +1,96 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
#include <sys/shm.h>
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_render/video_render_defines.h"
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <X11/extensions/XShm.h>
namespace webrtc {
class CriticalSectionWrapper;
#define DEFAULT_RENDER_FRAME_WIDTH 352
#define DEFAULT_RENDER_FRAME_HEIGHT 288
class VideoX11Channel: public VideoRenderCallback
{
public:
VideoX11Channel(int32_t id);
virtual ~VideoX11Channel();
virtual int32_t RenderFrame(const uint32_t streamId,
const VideoFrame& videoFrame);
int32_t FrameSizeChange(int32_t width, int32_t height,
int32_t numberOfStreams);
int32_t DeliverFrame(const VideoFrame& videoFrame);
int32_t GetFrameSize(int32_t& width, int32_t& height);
int32_t Init(Window window, float left, float top, float right,
float bottom);
int32_t ChangeWindow(Window window);
int32_t
GetStreamProperties(uint32_t& zOrder, float& left,
float& top, float& right, float& bottom) const;
int32_t ReleaseWindow();
bool IsPrepared()
{
return _prepared;
}
private:
int32_t
CreateLocalRenderer(int32_t width, int32_t height);
int32_t RemoveRenderer();
//FIXME a better place for this method? the GetWidthHeight no longer
// supported by common_video.
int GetWidthHeight(VideoType type, int bufferSize, int& width,
int& height);
CriticalSectionWrapper& _crit;
Display* _display;
XShmSegmentInfo _shminfo;
XImage* _image;
Window _window;
GC _gc;
int32_t _width; // incoming frame width
int32_t _height; // incoming frame height
int32_t _outWidth; // render frame width
int32_t _outHeight; // render frame height
int32_t _xPos; // position within window
int32_t _yPos;
bool _prepared; // true if ready to use
int32_t _dispCount;
unsigned char* _buffer;
float _top;
float _left;
float _right;
float _bottom;
int32_t _Id;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_

View File

@ -0,0 +1,153 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_render/linux/video_x11_channel.h"
#include "webrtc/modules/video_render/linux/video_x11_render.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
VideoX11Render::VideoX11Render(Window window) :
_window(window),
_critSect(*CriticalSectionWrapper::CreateCriticalSection())
{
}
VideoX11Render::~VideoX11Render()
{
delete &_critSect;
}
int32_t VideoX11Render::Init()
{
CriticalSectionScoped cs(&_critSect);
_streamIdToX11ChannelMap.clear();
return 0;
}
int32_t VideoX11Render::ChangeWindow(Window window)
{
CriticalSectionScoped cs(&_critSect);
VideoX11Channel* renderChannel = NULL;
std::map<int, VideoX11Channel*>::iterator iter =
_streamIdToX11ChannelMap.begin();
while (iter != _streamIdToX11ChannelMap.end())
{
renderChannel = iter->second;
if (renderChannel)
{
renderChannel->ChangeWindow(window);
}
iter++;
}
_window = window;
return 0;
}
VideoX11Channel* VideoX11Render::CreateX11RenderChannel(
int32_t streamId,
int32_t zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_critSect);
VideoX11Channel* renderChannel = NULL;
std::map<int, VideoX11Channel*>::iterator iter =
_streamIdToX11ChannelMap.find(streamId);
if (iter == _streamIdToX11ChannelMap.end())
{
renderChannel = new VideoX11Channel(streamId);
if (!renderChannel)
{
WEBRTC_TRACE(
kTraceError,
kTraceVideoRenderer,
-1,
"Failed to create VideoX11Channel for streamId : %d",
streamId);
return NULL;
}
renderChannel->Init(_window, left, top, right, bottom);
_streamIdToX11ChannelMap[streamId] = renderChannel;
}
else
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1,
"Render Channel already exists for streamId: %d", streamId);
renderChannel = iter->second;
}
return renderChannel;
}
int32_t VideoX11Render::DeleteX11RenderChannel(int32_t streamId)
{
CriticalSectionScoped cs(&_critSect);
std::map<int, VideoX11Channel*>::iterator iter =
_streamIdToX11ChannelMap.find(streamId);
if (iter != _streamIdToX11ChannelMap.end())
{
VideoX11Channel *renderChannel = iter->second;
if (renderChannel)
{
renderChannel->ReleaseWindow();
delete renderChannel;
renderChannel = NULL;
}
_streamIdToX11ChannelMap.erase(iter);
}
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"No VideoX11Channel object exists for stream id: %d",
streamId);
return -1;
}
int32_t VideoX11Render::GetIncomingStreamProperties(
int32_t streamId,
uint32_t& zOrder,
float& left,
float& top,
float& right,
float& bottom)
{
CriticalSectionScoped cs(&_critSect);
std::map<int, VideoX11Channel*>::iterator iter =
_streamIdToX11ChannelMap.find(streamId);
if (iter != _streamIdToX11ChannelMap.end())
{
VideoX11Channel *renderChannel = iter->second;
if (renderChannel)
{
renderChannel->GetStreamProperties(zOrder, left, top, right, bottom);
}
}
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"No VideoX11Channel object exists for stream id: %d",
streamId);
return -1;
}
} // namespace webrtc

View File

@ -0,0 +1,58 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
#include "webrtc/modules/video_render/video_render_defines.h"
#include <X11/Xlib.h>
#include <map>
namespace webrtc {
class CriticalSectionWrapper;
class VideoX11Channel;
class VideoX11Render
{
public:
VideoX11Render(Window window);
~VideoX11Render();
int32_t Init();
int32_t ChangeWindow(Window window);
VideoX11Channel* CreateX11RenderChannel(int32_t streamId,
int32_t zOrder,
const float left,
const float top,
const float right,
const float bottom);
int32_t DeleteX11RenderChannel(int32_t streamId);
int32_t GetIncomingStreamProperties(int32_t streamId,
uint32_t& zOrder,
float& left, float& top,
float& right, float& bottom);
private:
Window _window;
CriticalSectionWrapper& _critSect;
std::map<int, VideoX11Channel*> _streamIdToX11ChannelMap;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_

View File

@ -0,0 +1,33 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// cocoa_full_screen_window.h
//
//
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
#import <Cocoa/Cocoa.h>
//#define GRAB_ALL_SCREENS 1
@interface CocoaFullScreenWindow : NSObject {
NSWindow* _window;
}
-(id)init;
-(void)grabFullScreen;
-(void)releaseFullScreen;
-(NSWindow*)window;
@end
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_

View File

@ -0,0 +1,87 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_render/mac/cocoa_full_screen_window.h"
#include "webrtc/system_wrappers/include/trace.h"
using namespace webrtc;
@implementation CocoaFullScreenWindow
-(id)init{
self = [super init];
if(!self){
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d COULD NOT CREATE INSTANCE", __FUNCTION__, __LINE__);
return nil;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, 0, "%s:%d Created instance", __FUNCTION__, __LINE__);
return self;
}
-(void)grabFullScreen{
#ifdef GRAB_ALL_SCREENS
if(CGCaptureAllDisplays() != kCGErrorSuccess)
#else
if(CGDisplayCapture(kCGDirectMainDisplay) != kCGErrorSuccess)
#endif
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not capture main level", __FUNCTION__, __LINE__);
}
// get the shielding window level
int windowLevel = CGShieldingWindowLevel();
// get the screen rect of main display
NSRect screenRect = [[NSScreen mainScreen]frame];
_window = [[NSWindow alloc]initWithContentRect:screenRect
styleMask:NSBorderlessWindowMask
backing:NSBackingStoreBuffered
defer:NO
screen:[NSScreen mainScreen]];
[_window setLevel:windowLevel];
[_window setBackgroundColor:[NSColor blackColor]];
[_window makeKeyAndOrderFront:nil];
}
-(void)releaseFullScreen
{
[_window orderOut:self];
#ifdef GRAB_ALL_SCREENS
if(CGReleaseAllDisplays() != kCGErrorSuccess)
#else
if(CGDisplayRelease(kCGDirectMainDisplay) != kCGErrorSuccess)
#endif
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not release the displays", __FUNCTION__, __LINE__);
}
}
- (NSWindow*)window
{
return _window;
}
- (void) dealloc
{
[self releaseFullScreen];
[super dealloc];
}
@end

View File

@ -0,0 +1,32 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// cocoa_render_view.h
//
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
#import <Cocoa/Cocoa.h>
#import <OpenGL/gl.h>
#import <OpenGL/glu.h>
#import <OpenGL/OpenGL.h>
@interface CocoaRenderView : NSOpenGLView {
NSOpenGLContext* _nsOpenGLContext;
}
-(void)initCocoaRenderView:(NSOpenGLPixelFormat*)fmt;
-(void)initCocoaRenderViewFullScreen:(NSOpenGLPixelFormat*)fmt;
-(NSOpenGLContext*)nsOpenGLContext;
@end
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_

View File

@ -0,0 +1,55 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Cocoa/Cocoa.h>
#import <AppKit/AppKit.h>
#include "webrtc/modules/video_render/mac/cocoa_render_view.h"
#include "webrtc/system_wrappers/include/trace.h"
using namespace webrtc;
@implementation CocoaRenderView
-(void)initCocoaRenderView:(NSOpenGLPixelFormat*)fmt{
self = [super initWithFrame:[self frame] pixelFormat:fmt];
if (self == nil){
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not create instance", __FUNCTION__, __LINE__);
}
_nsOpenGLContext = [self openGLContext];
}
-(NSOpenGLContext*)nsOpenGLContext {
return _nsOpenGLContext;
}
-(void)initCocoaRenderViewFullScreen:(NSOpenGLPixelFormat*)fmt{
NSRect screenRect = [[NSScreen mainScreen]frame];
// [_windowRef setFrame:screenRect];
// [_windowRef setBounds:screenRect];
self = [super initWithFrame:screenRect pixelFormat:fmt];
if (self == nil){
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not create instance", __FUNCTION__, __LINE__);
}
_nsOpenGLContext = [self openGLContext];
}
@end

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,178 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/engine_configurations.h"
#if defined(CARBON_RENDERING)
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
#include "webrtc/base/platform_thread.h"
#include "webrtc/modules/video_render/video_render_defines.h"
#define NEW_HIVIEW_PARENT_EVENT_HANDLER 1
#define NEW_HIVIEW_EVENT_HANDLER 1
#define USE_STRUCT_RGN
#include <AGL/agl.h>
#include <Carbon/Carbon.h>
#include <OpenGL/OpenGL.h>
#include <OpenGL/glext.h>
#include <OpenGL/glu.h>
#include <list>
#include <map>
#include <memory>
class VideoRenderAGL;
namespace webrtc {
class CriticalSectionWrapper;
class EventWrapper;
class VideoChannelAGL : public VideoRenderCallback {
public:
VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner);
virtual ~VideoChannelAGL();
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
virtual int DeliverFrame(const VideoFrame& videoFrame);
virtual int UpdateSize(int width, int height);
int SetStreamSettings(int streamId, float startWidth, float startHeight,
float stopWidth, float stopHeight);
int SetStreamCropSettings(int streamId, float startWidth, float startHeight,
float stopWidth, float stopHeight);
int RenderOffScreenBuffer();
int IsUpdated(bool& isUpdated);
virtual int UpdateStretchSize(int stretchHeight, int stretchWidth);
virtual int32_t RenderFrame(const uint32_t streamId, VideoFrame& videoFrame);
private:
AGLContext _aglContext;
int _id;
VideoRenderAGL* _owner;
int _width;
int _height;
int _stretchedWidth;
int _stretchedHeight;
float _startHeight;
float _startWidth;
float _stopWidth;
float _stopHeight;
int _xOldWidth;
int _yOldHeight;
int _oldStretchedHeight;
int _oldStretchedWidth;
unsigned char* _buffer;
size_t _bufferSize;
size_t _incomingBufferSize;
bool _bufferIsUpdated;
bool _sizeInitialized;
int _numberOfStreams;
bool _bVideoSizeStartedChanging;
GLenum _pixelFormat;
GLenum _pixelDataType;
unsigned int _texture;
};
class VideoRenderAGL {
public:
VideoRenderAGL(WindowRef windowRef, bool fullscreen, int iId);
VideoRenderAGL(HIViewRef windowRef, bool fullscreen, int iId);
~VideoRenderAGL();
int Init();
VideoChannelAGL* CreateAGLChannel(int channel, int zOrder, float startWidth,
float startHeight, float stopWidth,
float stopHeight);
VideoChannelAGL* ConfigureAGLChannel(int channel, int zOrder,
float startWidth, float startHeight,
float stopWidth, float stopHeight);
int DeleteAGLChannel(int channel);
int DeleteAllAGLChannels();
int StopThread();
bool IsFullScreen();
bool HasChannels();
bool HasChannel(int channel);
int GetChannels(std::list<int>& channelList);
void LockAGLCntx();
void UnlockAGLCntx();
static int GetOpenGLVersion(int& aglMajor, int& aglMinor);
// ********** new module functions ************ //
int ChangeWindow(void* newWindowRef);
int32_t StartRender();
int32_t StopRender();
int32_t DeleteAGLChannel(const uint32_t streamID);
int32_t GetChannelProperties(const uint16_t streamId, uint32_t& zOrder,
float& left, float& top, float& right,
float& bottom);
protected:
static bool ScreenUpdateThreadProc(void* obj);
bool ScreenUpdateProcess();
int GetWindowRect(Rect& rect);
private:
int CreateMixingContext();
int RenderOffScreenBuffers();
int SwapAndDisplayBuffers();
int UpdateClipping();
int CalculateVisibleRegion(ControlRef control, RgnHandle& visibleRgn,
bool clipChildren);
bool CheckValidRegion(RgnHandle rHandle);
void ParentWindowResized(WindowRef window);
// Carbon GUI event handlers
static pascal OSStatus sHandleWindowResized(
EventHandlerCallRef nextHandler, EventRef theEvent, void* userData);
static pascal OSStatus sHandleHiViewResized(
EventHandlerCallRef nextHandler, EventRef theEvent, void* userData);
HIViewRef _hiviewRef;
WindowRef _windowRef;
bool _fullScreen;
int _id;
webrtc::CriticalSectionWrapper& _renderCritSec;
// TODO(pbos): Remove unique_ptr and use PlatformThread directly.
std::unique_ptr<rtc::PlatformThread> _screenUpdateThread;
webrtc::EventWrapper* _screenUpdateEvent;
bool _isHIViewRef;
AGLContext _aglContext;
int _windowWidth;
int _windowHeight;
int _lastWindowWidth;
int _lastWindowHeight;
int _lastHiViewWidth;
int _lastHiViewHeight;
int _currentParentWindowHeight;
int _currentParentWindowWidth;
Rect _currentParentWindowBounds;
bool _windowHasResized;
Rect _lastParentWindowBounds;
Rect _currentHIViewBounds;
Rect _lastHIViewBounds;
Rect _windowRect;
std::map<int, VideoChannelAGL*> _aglChannels;
std::multimap<int, int> _zOrderToChannel;
EventHandlerRef _hiviewEventHandlerRef;
EventHandlerRef _windowEventHandlerRef;
HIRect _currentViewBounds;
HIRect _lastViewBounds;
bool _renderingIsPaused;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
#endif // CARBON_RENDERING

View File

@ -0,0 +1,280 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/engine_configurations.h"
#if defined(CARBON_RENDERING)
#include <AGL/agl.h>
#include "webrtc/modules/video_render/mac/video_render_agl.h"
#include "webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
VideoRenderMacCarbonImpl::VideoRenderMacCarbonImpl(const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen) :
_id(id),
_renderMacCarbonCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_fullScreen(fullscreen),
_ptrWindow(window)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
}
VideoRenderMacCarbonImpl::~VideoRenderMacCarbonImpl()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Destructor %s:%d", __FUNCTION__, __LINE__);
delete &_renderMacCarbonCritsect;
}
int32_t
VideoRenderMacCarbonImpl::Init()
{
CriticalSectionScoped cs(&_renderMacCarbonCritsect);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
if (!_ptrWindow)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
return -1;
}
// We don't know if the user passed us a WindowRef or a HIViewRef, so test.
bool referenceIsValid = false;
// Check if it's a valid WindowRef
//WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d _ptrWindowRef before WindowRef cast: %x", __FUNCTION__, __LINE__, _ptrWindowRef);
WindowRef* windowRef = static_cast<WindowRef*>(_ptrWindow);
//WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d _ptrWindowRef after cast: %x", __FUNCTION__, __LINE__, _ptrWindowRef);
if (IsValidWindowPtr(*windowRef))
{
_ptrCarbonRender = new VideoRenderAGL(*windowRef, _fullScreen, _id);
referenceIsValid = true;
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Successfully initialized CarbonRenderer with WindowRef:%x", __FUNCTION__, __LINE__, *windowRef);
}
else
{
HIViewRef* hiviewRef = static_cast<HIViewRef*>(_ptrWindow);
if (HIViewIsValid(*hiviewRef))
{
_ptrCarbonRender = new VideoRenderAGL(*hiviewRef, _fullScreen, _id);
referenceIsValid = true;
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Successfully initialized CarbonRenderer with HIViewRef:%x", __FUNCTION__, __LINE__, hiviewRef);
}
}
if(!referenceIsValid)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Invalid WindowRef/HIViewRef Returning -1", __FUNCTION__, __LINE__);
return -1;
}
if(!_ptrCarbonRender)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to create an instance of VideoRenderAGL. Returning -1", __FUNCTION__, __LINE__);
}
int retVal = _ptrCarbonRender->Init();
if (retVal == -1)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to init CarbonRenderer", __FUNCTION__, __LINE__);
return -1;
}
return 0;
}
int32_t
VideoRenderMacCarbonImpl::ChangeWindow(void* window)
{
return -1;
CriticalSectionScoped cs(&_renderMacCarbonCritsect);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s changing ID to ", __FUNCTION__, window);
if (window == NULL)
{
return -1;
}
_ptrWindow = window;
_ptrWindow = window;
return 0;
}
VideoRenderCallback*
VideoRenderMacCarbonImpl::AddIncomingRenderStream(const uint32_t streamId,
const uint32_t zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_renderMacCarbonCritsect);
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
VideoChannelAGL* AGLChannel = NULL;
if(!_ptrWindow)
{
}
if(!AGLChannel)
{
AGLChannel = _ptrCocoaRender->CreateNSGLChannel(streamId, zOrder, left, top, right, bottom);
}
return AGLChannel;
}
int32_t
VideoRenderMacCarbonImpl::DeleteIncomingRenderStream(const uint32_t streamId)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
CriticalSectionScoped cs(&_renderMacCarbonCritsect);
_ptrCarbonRender->DeleteAGLChannel(streamId);
return 0;
}
int32_t
VideoRenderMacCarbonImpl::GetIncomingRenderStreamProperties(const uint32_t streamId,
uint32_t& zOrder,
float& left,
float& top,
float& right,
float& bottom) const
{
return -1;
return _ptrCarbonRender->GetChannelProperties(streamId, zOrder, left, top, right, bottom);
}
int32_t
VideoRenderMacCarbonImpl::StartRender()
{
return _ptrCarbonRender->StartRender();
}
int32_t
VideoRenderMacCarbonImpl::StopRender()
{
return _ptrCarbonRender->StopRender();
}
VideoRenderType
VideoRenderMacCarbonImpl::RenderType()
{
return kRenderCarbon;
}
RawVideoType
VideoRenderMacCarbonImpl::PerferedVideoType()
{
return kVideoI420;
}
bool
VideoRenderMacCarbonImpl::FullScreen()
{
return false;
}
int32_t
VideoRenderMacCarbonImpl::GetGraphicsMemory(uint64_t& totalGraphicsMemory,
uint64_t& availableGraphicsMemory) const
{
totalGraphicsMemory = 0;
availableGraphicsMemory = 0;
return 0;
}
int32_t
VideoRenderMacCarbonImpl::GetScreenResolution(uint32_t& screenWidth,
uint32_t& screenHeight) const
{
CriticalSectionScoped cs(&_renderMacCarbonCritsect);
//NSScreen* mainScreen = [NSScreen mainScreen];
//NSRect frame = [mainScreen frame];
//screenWidth = frame.size.width;
//screenHeight = frame.size.height;
return 0;
}
uint32_t
VideoRenderMacCarbonImpl::RenderFrameRate(const uint32_t streamId)
{
CriticalSectionScoped cs(&_renderMacCarbonCritsect);
return 0;
}
int32_t
VideoRenderMacCarbonImpl::SetStreamCropping(const uint32_t streamId,
const float left,
const float top,
const float right,
const float bottom)
{
return 0;
}
int32_t VideoRenderMacCarbonImpl::ConfigureRenderer(const uint32_t streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
return 0;
}
int32_t
VideoRenderMacCarbonImpl::SetTransparentBackground(const bool enable)
{
return 0;
}
int32_t VideoRenderMacCarbonImpl::SetText(const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left,
const float top,
const float right,
const float bottom)
{
return 0;
}
int32_t VideoRenderMacCarbonImpl::SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom)
{
return 0;
}
} // namespace webrtc
#endif // CARBON_RENDERING

View File

@ -0,0 +1,146 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/engine_configurations.h"
#if defined(CARBON_RENDERING)
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
#include "webrtc/modules/video_render/i_video_render.h"
namespace webrtc {
class CriticalSectionWrapper;
class VideoRenderAGL;
// Class definitions
class VideoRenderMacCarbonImpl : IVideoRender
{
public:
/*
* Constructor/destructor
*/
VideoRenderMacCarbonImpl(const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen);
virtual ~VideoRenderMacCarbonImpl();
virtual int32_t Init();
virtual int32_t ChangeWindow(void* window);
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
virtual VideoRenderCallback* AddIncomingRenderStream(const uint32_t streamId,
const uint32_t zOrder,
const float left,
const float top,
const float right,
const float bottom);
virtual int32_t DeleteIncomingRenderStream(const uint32_t streamId);
virtual int32_t GetIncomingRenderStreamProperties(const uint32_t streamId,
uint32_t& zOrder,
float& left,
float& top,
float& right,
float& bottom) const;
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
virtual int32_t StartRender();
virtual int32_t StopRender();
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual VideoRenderType RenderType();
virtual RawVideoType PerferedVideoType();
virtual bool FullScreen();
virtual int32_t GetGraphicsMemory(uint64_t& totalGraphicsMemory,
uint64_t& availableGraphicsMemory) const;
virtual int32_t GetScreenResolution(uint32_t& screenWidth,
uint32_t& screenHeight) const;
virtual uint32_t RenderFrameRate(const uint32_t streamId);
virtual int32_t SetStreamCropping(const uint32_t streamId,
const float left,
const float top,
const float right,
const float bottom);
virtual int32_t ConfigureRenderer(const uint32_t streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom);
virtual int32_t SetTransparentBackground(const bool enable);
virtual int32_t SetText(const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left,
const float top,
const float right,
const float bottom);
virtual int32_t SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom);
virtual int32_t FullScreenRender(void* window, const bool enable)
{
// not supported in Carbon at this time
return -1;
}
private:
int32_t _id;
CriticalSectionWrapper& _renderMacCarbonCritsect;
bool _fullScreen;
void* _ptrWindow;
VideoRenderAGL* _ptrCarbonRender;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
#endif // CARBON_RENDERING

View File

@ -0,0 +1,141 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/engine_configurations.h"
#if defined(COCOA_RENDERING)
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
#include "webrtc/modules/video_render/i_video_render.h"
namespace webrtc {
class CriticalSectionWrapper;
class VideoRenderNSOpenGL;
// Class definitions
class VideoRenderMacCocoaImpl : IVideoRender
{
public:
/*
* Constructor/destructor
*/
VideoRenderMacCocoaImpl(const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen);
virtual ~VideoRenderMacCocoaImpl();
virtual int32_t Init();
virtual int32_t ChangeWindow(void* window);
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
virtual VideoRenderCallback* AddIncomingRenderStream(const uint32_t streamId,
const uint32_t zOrder,
const float left,
const float top,
const float right,
const float bottom);
virtual int32_t DeleteIncomingRenderStream(const uint32_t streamId);
virtual int32_t GetIncomingRenderStreamProperties(const uint32_t streamId,
uint32_t& zOrder,
float& left,
float& top,
float& right,
float& bottom) const;
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
virtual int32_t StartRender();
virtual int32_t StopRender();
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual VideoRenderType RenderType();
virtual RawVideoType PerferedVideoType();
virtual bool FullScreen();
virtual int32_t GetGraphicsMemory(uint64_t& totalGraphicsMemory,
uint64_t& availableGraphicsMemory) const;
virtual int32_t GetScreenResolution(uint32_t& screenWidth,
uint32_t& screenHeight) const;
virtual uint32_t RenderFrameRate(const uint32_t streamId);
virtual int32_t SetStreamCropping(const uint32_t streamId,
const float left,
const float top,
const float right,
const float bottom);
virtual int32_t ConfigureRenderer(const uint32_t streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom);
virtual int32_t SetTransparentBackground(const bool enable);
virtual int32_t SetText(const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left,
const float top,
const float right,
const float bottom);
virtual int32_t SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom);
virtual int32_t FullScreenRender(void* window, const bool enable);
private:
int32_t _id;
CriticalSectionWrapper& _renderMacCocoaCritsect;
bool _fullScreen;
void* _ptrWindow;
VideoRenderNSOpenGL* _ptrCocoaRender;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
#endif // COCOA_RENDERING

View File

@ -0,0 +1,253 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/engine_configurations.h"
#if defined(COCOA_RENDERING)
#include "webrtc/modules/video_render/mac/cocoa_render_view.h"
#include "webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h"
#include "webrtc/modules/video_render/mac/video_render_nsopengl.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
VideoRenderMacCocoaImpl::VideoRenderMacCocoaImpl(const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen) :
_id(id),
_renderMacCocoaCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_fullScreen(fullscreen),
_ptrWindow(window)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
}
VideoRenderMacCocoaImpl::~VideoRenderMacCocoaImpl()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Destructor %s:%d", __FUNCTION__, __LINE__);
delete &_renderMacCocoaCritsect;
if (_ptrCocoaRender)
{
delete _ptrCocoaRender;
_ptrCocoaRender = NULL;
}
}
int32_t
VideoRenderMacCocoaImpl::Init()
{
CriticalSectionScoped cs(&_renderMacCocoaCritsect);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
// cast ptrWindow from void* to CocoaRenderer. Void* was once NSOpenGLView, and CocoaRenderer is NSOpenGLView.
_ptrCocoaRender = new VideoRenderNSOpenGL((CocoaRenderView*)_ptrWindow, _fullScreen, _id);
if (!_ptrWindow)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
return -1;
}
int retVal = _ptrCocoaRender->Init();
if (retVal == -1)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Failed to init %s:%d", __FUNCTION__, __LINE__);
return -1;
}
return 0;
}
int32_t
VideoRenderMacCocoaImpl::ChangeWindow(void* window)
{
CriticalSectionScoped cs(&_renderMacCocoaCritsect);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s changing ID to ", __FUNCTION__, window);
if (window == NULL)
{
return -1;
}
_ptrWindow = window;
_ptrWindow = window;
_ptrCocoaRender->ChangeWindow((CocoaRenderView*)_ptrWindow);
return 0;
}
VideoRenderCallback*
VideoRenderMacCocoaImpl::AddIncomingRenderStream(const uint32_t streamId,
const uint32_t zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_renderMacCocoaCritsect);
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
VideoChannelNSOpenGL* nsOpenGLChannel = NULL;
if(!_ptrWindow)
{
}
if(!nsOpenGLChannel)
{
nsOpenGLChannel = _ptrCocoaRender->CreateNSGLChannel(streamId, zOrder, left, top, right, bottom);
}
return nsOpenGLChannel;
}
int32_t
VideoRenderMacCocoaImpl::DeleteIncomingRenderStream(const uint32_t streamId)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
CriticalSectionScoped cs(&_renderMacCocoaCritsect);
_ptrCocoaRender->DeleteNSGLChannel(streamId);
return 0;
}
int32_t
VideoRenderMacCocoaImpl::GetIncomingRenderStreamProperties(const uint32_t streamId,
uint32_t& zOrder,
float& left,
float& top,
float& right,
float& bottom) const
{
return _ptrCocoaRender->GetChannelProperties(streamId, zOrder, left, top, right, bottom);
}
int32_t
VideoRenderMacCocoaImpl::StartRender()
{
return _ptrCocoaRender->StartRender();
}
int32_t
VideoRenderMacCocoaImpl::StopRender()
{
return _ptrCocoaRender->StopRender();
}
VideoRenderType
VideoRenderMacCocoaImpl::RenderType()
{
return kRenderCocoa;
}
RawVideoType
VideoRenderMacCocoaImpl::PerferedVideoType()
{
return kVideoI420;
}
bool
VideoRenderMacCocoaImpl::FullScreen()
{
return false;
}
int32_t
VideoRenderMacCocoaImpl::GetGraphicsMemory(uint64_t& totalGraphicsMemory,
uint64_t& availableGraphicsMemory) const
{
totalGraphicsMemory = 0;
availableGraphicsMemory = 0;
return 0;
}
int32_t
VideoRenderMacCocoaImpl::GetScreenResolution(uint32_t& screenWidth,
uint32_t& screenHeight) const
{
CriticalSectionScoped cs(&_renderMacCocoaCritsect);
NSScreen* mainScreen = [NSScreen mainScreen];
NSRect frame = [mainScreen frame];
screenWidth = frame.size.width;
screenHeight = frame.size.height;
return 0;
}
uint32_t
VideoRenderMacCocoaImpl::RenderFrameRate(const uint32_t streamId)
{
CriticalSectionScoped cs(&_renderMacCocoaCritsect);
return 0;
}
int32_t
VideoRenderMacCocoaImpl::SetStreamCropping(const uint32_t streamId,
const float left,
const float top,
const float right,
const float bottom)
{
return 0;
}
int32_t VideoRenderMacCocoaImpl::ConfigureRenderer(const uint32_t streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
return 0;
}
int32_t
VideoRenderMacCocoaImpl::SetTransparentBackground(const bool enable)
{
return 0;
}
int32_t VideoRenderMacCocoaImpl::SetText(const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left,
const float top,
const float right,
const float bottom)
{
return _ptrCocoaRender->SetText(textId, text, textLength, textColorRef, backgroundColorRef, left, top, right, bottom);
}
int32_t VideoRenderMacCocoaImpl::SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom)
{
return 0;
}
int32_t VideoRenderMacCocoaImpl::FullScreenRender(void* window, const bool enable)
{
return -1;
}
} // namespace webrtc
#endif // COCOA_RENDERING

View File

@ -0,0 +1,192 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/engine_configurations.h"
#if defined(COCOA_RENDERING)
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
#import <Cocoa/Cocoa.h>
#import <OpenGL/OpenGL.h>
#import <OpenGL/glext.h>
#import <OpenGL/glu.h>
#include <QuickTime/QuickTime.h>
#include <list>
#include <map>
#include <memory>
#include "webrtc/base/thread_annotations.h"
#include "webrtc/modules/video_render/video_render_defines.h"
#import "webrtc/modules/video_render/mac/cocoa_full_screen_window.h"
#import "webrtc/modules/video_render/mac/cocoa_render_view.h"
class Trace;
namespace rtc {
class PlatformThread;
} // namespace rtc
namespace webrtc {
class EventTimerWrapper;
class VideoRenderNSOpenGL;
class CriticalSectionWrapper;
class VideoChannelNSOpenGL : public VideoRenderCallback {
public:
VideoChannelNSOpenGL(NSOpenGLContext *nsglContext, int iId, VideoRenderNSOpenGL* owner);
virtual ~VideoChannelNSOpenGL();
// A new frame is delivered
virtual int DeliverFrame(const VideoFrame& videoFrame);
// Called when the incoming frame size and/or number of streams in mix
// changes.
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
virtual int UpdateSize(int width, int height);
// Setup
int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
// Called when it's time to render the last frame for the channel
int RenderOffScreenBuffer();
// Returns true if a new buffer has been delivered to the texture
int IsUpdated(bool& isUpdated);
virtual int UpdateStretchSize(int stretchHeight, int stretchWidth);
// ********** new module functions ************ //
virtual int32_t RenderFrame(const uint32_t streamId,
const VideoFrame& videoFrame);
// ********** new module helper functions ***** //
int ChangeContext(NSOpenGLContext *nsglContext);
int32_t GetChannelProperties(float& left,
float& top,
float& right,
float& bottom);
private:
NSOpenGLContext* _nsglContext;
const int _id;
VideoRenderNSOpenGL* _owner;
int32_t _width;
int32_t _height;
float _startWidth;
float _startHeight;
float _stopWidth;
float _stopHeight;
int _stretchedWidth;
int _stretchedHeight;
int _oldStretchedHeight;
int _oldStretchedWidth;
unsigned char* _buffer;
size_t _bufferSize;
size_t _incomingBufferSize;
bool _bufferIsUpdated;
int _numberOfStreams;
GLenum _pixelFormat;
GLenum _pixelDataType;
unsigned int _texture;
};
class VideoRenderNSOpenGL
{
public: // methods
VideoRenderNSOpenGL(CocoaRenderView *windowRef, bool fullScreen, int iId);
~VideoRenderNSOpenGL();
static int GetOpenGLVersion(int& nsglMajor, int& nsglMinor);
// Allocates textures
int Init();
VideoChannelNSOpenGL* CreateNSGLChannel(int streamID, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
VideoChannelNSOpenGL* ConfigureNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
int DeleteNSGLChannel(int channel);
int DeleteAllNSGLChannels();
int StopThread();
bool IsFullScreen();
bool HasChannels();
bool HasChannel(int channel);
int GetChannels(std::list<int>& channelList);
void LockAGLCntx() EXCLUSIVE_LOCK_FUNCTION(_nsglContextCritSec);
void UnlockAGLCntx() UNLOCK_FUNCTION(_nsglContextCritSec);
// ********** new module functions ************ //
int ChangeWindow(CocoaRenderView* newWindowRef);
int32_t StartRender();
int32_t StopRender();
int32_t DeleteNSGLChannel(const uint32_t streamID);
int32_t GetChannelProperties(const uint16_t streamId,
uint32_t& zOrder,
float& left,
float& top,
float& right,
float& bottom);
int32_t SetText(const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left,
const float top,
const float right,
const float bottom);
// ********** new module helper functions ***** //
int configureNSOpenGLEngine();
int configureNSOpenGLView();
int setRenderTargetWindow();
int setRenderTargetFullScreen();
protected: // methods
static bool ScreenUpdateThreadProc(void* obj);
bool ScreenUpdateProcess();
int GetWindowRect(Rect& rect);
private: // methods
int CreateMixingContext();
int RenderOffScreenBuffers();
int DisplayBuffers();
private: // variables
CocoaRenderView* _windowRef;
bool _fullScreen;
int _id;
CriticalSectionWrapper& _nsglContextCritSec;
// TODO(pbos): Remove unique_ptr and use PlatformThread directly.
std::unique_ptr<rtc::PlatformThread> _screenUpdateThread;
EventTimerWrapper* _screenUpdateEvent;
NSOpenGLContext* _nsglContext;
NSOpenGLContext* _nsglFullScreenContext;
CocoaFullScreenWindow* _fullScreenWindow;
Rect _windowRect; // The size of the window
int _windowWidth;
int _windowHeight;
std::map<int, VideoChannelNSOpenGL*> _nsglChannels;
std::multimap<int, int> _zOrderToChannel;
bool _renderingIsPaused;
NSView* _windowRefSuperView;
NSRect _windowRefSuperViewFrame;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
#endif // COCOA_RENDERING

File diff suppressed because it is too large Load Diff

Binary file not shown.

After

Width:  |  Height:  |  Size: 297 KiB

View File

@ -0,0 +1,645 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_render/test/testAPI/testAPI.h"
#include <stdio.h>
#if defined(_WIN32)
#include <tchar.h>
#include <windows.h>
#include <assert.h>
#include <fstream>
#include <iostream>
#include <string>
#include <windows.h>
#include <ddraw.h>
#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <iostream>
#include <sys/time.h>
#endif
#include "webrtc/common_types.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/utility/include/process_thread.h"
#include "webrtc/modules/video_render/video_render.h"
#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/system_wrappers/include/sleep.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
using namespace webrtc;
void GetTestVideoFrame(VideoFrame* frame, uint8_t startColor);
int TestSingleStream(VideoRender* renderModule);
int TestFullscreenStream(VideoRender* &renderModule,
void* window,
const VideoRenderType videoRenderType);
int TestBitmapText(VideoRender* renderModule);
int TestMultipleStreams(VideoRender* renderModule);
int TestExternalRender(VideoRender* renderModule);
#define TEST_FRAME_RATE 30
#define TEST_TIME_SECOND 5
#define TEST_FRAME_NUM (TEST_FRAME_RATE*TEST_TIME_SECOND)
#define TEST_STREAM0_START_COLOR 0
#define TEST_STREAM1_START_COLOR 64
#define TEST_STREAM2_START_COLOR 128
#define TEST_STREAM3_START_COLOR 192
#if defined(WEBRTC_LINUX)
#define GET_TIME_IN_MS timeGetTime()
unsigned long timeGetTime()
{
struct timeval tv;
struct timezone tz;
unsigned long val;
gettimeofday(&tv, &tz);
val= tv.tv_sec*1000+ tv.tv_usec/1000;
return(val);
}
#elif defined(WEBRTC_MAC)
#include <unistd.h>
#define GET_TIME_IN_MS timeGetTime()
unsigned long timeGetTime()
{
return 0;
}
#else
#define GET_TIME_IN_MS ::timeGetTime()
#endif
using namespace std;
#if defined(_WIN32)
LRESULT CALLBACK WebRtcWinProc( HWND hWnd,UINT uMsg,WPARAM wParam,LPARAM lParam)
{
switch(uMsg)
{
case WM_DESTROY:
break;
case WM_COMMAND:
break;
}
return DefWindowProc(hWnd,uMsg,wParam,lParam);
}
int WebRtcCreateWindow(HWND &hwndMain,int winNum, int width, int height)
{
HINSTANCE hinst = GetModuleHandle(0);
WNDCLASSEX wcx;
wcx.hInstance = hinst;
wcx.lpszClassName = TEXT("VideoRenderTest");
wcx.lpfnWndProc = (WNDPROC)WebRtcWinProc;
wcx.style = CS_DBLCLKS;
wcx.hIcon = LoadIcon (NULL, IDI_APPLICATION);
wcx.hIconSm = LoadIcon (NULL, IDI_APPLICATION);
wcx.hCursor = LoadCursor (NULL, IDC_ARROW);
wcx.lpszMenuName = NULL;
wcx.cbSize = sizeof (WNDCLASSEX);
wcx.cbClsExtra = 0;
wcx.cbWndExtra = 0;
wcx.hbrBackground = GetSysColorBrush(COLOR_3DFACE);
// Register our window class with the operating system.
// If there is an error, exit program.
if ( !RegisterClassEx (&wcx) )
{
MessageBox( 0, TEXT("Failed to register window class!"),TEXT("Error!"), MB_OK|MB_ICONERROR );
return 0;
}
// Create the main window.
hwndMain = CreateWindowEx(
0, // no extended styles
TEXT("VideoRenderTest"), // class name
TEXT("VideoRenderTest Window"), // window name
WS_OVERLAPPED |WS_THICKFRAME, // overlapped window
800, // horizontal position
0, // vertical position
width, // width
height, // height
(HWND) NULL, // no parent or owner window
(HMENU) NULL, // class menu used
hinst, // instance handle
NULL); // no window creation data
if (!hwndMain)
return -1;
// Show the window using the flag specified by the program
// that started the application, and send the application
// a WM_PAINT message.
ShowWindow(hwndMain, SW_SHOWDEFAULT);
UpdateWindow(hwndMain);
return 0;
}
#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
int WebRtcCreateWindow(Window *outWindow, Display **outDisplay, int winNum, int width, int height) // unsigned char* title, int titleLength)
{
int screen, xpos = 10, ypos = 10;
XEvent evnt;
XSetWindowAttributes xswa; // window attribute struct
XVisualInfo vinfo; // screen visual info struct
unsigned long mask; // attribute mask
// get connection handle to xserver
Display* _display = XOpenDisplay( NULL );
// get screen number
screen = DefaultScreen(_display);
// put desired visual info for the screen in vinfo
if( XMatchVisualInfo(_display, screen, 24, TrueColor, &vinfo) != 0 )
{
//printf( "Screen visual info match!\n" );
}
// set window attributes
xswa.colormap = XCreateColormap(_display, DefaultRootWindow(_display), vinfo.visual, AllocNone);
xswa.event_mask = StructureNotifyMask | ExposureMask;
xswa.background_pixel = 0;
xswa.border_pixel = 0;
// value mask for attributes
mask = CWBackPixel | CWBorderPixel | CWColormap | CWEventMask;
switch( winNum )
{
case 0:
xpos = 200;
ypos = 200;
break;
case 1:
xpos = 300;
ypos = 200;
break;
default:
break;
}
// create a subwindow for parent (defroot)
Window _window = XCreateWindow(_display, DefaultRootWindow(_display),
xpos, ypos,
width,
height,
0, vinfo.depth,
InputOutput,
vinfo.visual,
mask, &xswa);
// Set window name
if( winNum == 0 )
{
XStoreName(_display, _window, "VE MM Local Window");
XSetIconName(_display, _window, "VE MM Local Window");
}
else if( winNum == 1 )
{
XStoreName(_display, _window, "VE MM Remote Window");
XSetIconName(_display, _window, "VE MM Remote Window");
}
// make x report events for mask
XSelectInput(_display, _window, StructureNotifyMask);
// map the window to the display
XMapWindow(_display, _window);
// wait for map event
do
{
XNextEvent(_display, &evnt);
}
while (evnt.type != MapNotify || evnt.xmap.event != _window);
*outWindow = _window;
*outDisplay = _display;
return 0;
}
#endif // WEBRTC_LINUX
// Note: Mac code is in testApi_mac.mm.
class MyRenderCallback: public VideoRenderCallback
{
public:
MyRenderCallback() :
_cnt(0)
{
}
;
~MyRenderCallback()
{
}
;
virtual int32_t RenderFrame(const uint32_t streamId,
const VideoFrame& videoFrame) {
_cnt++;
if (_cnt % 100 == 0)
{
printf("Render callback %d \n",_cnt);
}
return 0;
}
int32_t _cnt;
};
void GetTestVideoFrame(VideoFrame* frame, uint8_t startColor) {
// changing color
static uint8_t color = startColor;
memset(frame->buffer(kYPlane), color, frame->allocated_size(kYPlane));
memset(frame->buffer(kUPlane), color, frame->allocated_size(kUPlane));
memset(frame->buffer(kVPlane), color, frame->allocated_size(kVPlane));
++color;
}
int TestSingleStream(VideoRender* renderModule) {
int error = 0;
// Add settings for a stream to render
printf("Add stream 0 to entire window\n");
const int streamId0 = 0;
VideoRenderCallback* renderCallback0 = renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 1.0f, 1.0f);
assert(renderCallback0 != NULL);
printf("Start render\n");
error = renderModule->StartRender(streamId0);
if (error != 0) {
// TODO(phoglund): This test will not work if compiled in release mode.
// This rather silly construct here is to avoid compilation errors when
// compiling in release. Release => no asserts => unused 'error' variable.
assert(false);
}
// Loop through an I420 file and render each frame
const int width = 352;
const int half_width = (width + 1) / 2;
const int height = 288;
VideoFrame videoFrame0;
videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width);
const uint32_t renderDelayMs = 500;
for (int i=0; i<TEST_FRAME_NUM; i++) {
GetTestVideoFrame(&videoFrame0, TEST_STREAM0_START_COLOR);
// Render this frame with the specified delay
videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp()
+ renderDelayMs);
renderCallback0->RenderFrame(streamId0, videoFrame0);
SleepMs(1000/TEST_FRAME_RATE);
}
// Shut down
printf("Closing...\n");
error = renderModule->StopRender(streamId0);
assert(error == 0);
error = renderModule->DeleteIncomingRenderStream(streamId0);
assert(error == 0);
return 0;
}
int TestFullscreenStream(VideoRender* &renderModule,
void* window,
const VideoRenderType videoRenderType) {
VideoRender::DestroyVideoRender(renderModule);
renderModule = VideoRender::CreateVideoRender(12345, window, true, videoRenderType);
TestSingleStream(renderModule);
VideoRender::DestroyVideoRender(renderModule);
renderModule = VideoRender::CreateVideoRender(12345, window, false, videoRenderType);
return 0;
}
int TestBitmapText(VideoRender* renderModule) {
#if defined(WIN32)
int error = 0;
// Add settings for a stream to render
printf("Add stream 0 to entire window\n");
const int streamId0 = 0;
VideoRenderCallback* renderCallback0 = renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 1.0f, 1.0f);
assert(renderCallback0 != NULL);
printf("Adding Bitmap\n");
DDCOLORKEY ColorKey; // black
ColorKey.dwColorSpaceHighValue = RGB(0, 0, 0);
ColorKey.dwColorSpaceLowValue = RGB(0, 0, 0);
HBITMAP hbm = (HBITMAP)LoadImage(NULL,
(LPCTSTR)_T("renderStartImage.bmp"),
IMAGE_BITMAP, 0, 0, LR_LOADFROMFILE);
renderModule->SetBitmap(hbm, 0, &ColorKey, 0.0f, 0.0f, 0.3f,
0.3f);
printf("Adding Text\n");
renderModule->SetText(1, (uint8_t*) "WebRtc Render Demo App", 20,
RGB(255, 0, 0), RGB(0, 0, 0), 0.25f, 0.1f, 1.0f,
1.0f);
printf("Start render\n");
error = renderModule->StartRender(streamId0);
assert(error == 0);
// Loop through an I420 file and render each frame
const int width = 352;
const int half_width = (width + 1) / 2;
const int height = 288;
VideoFrame videoFrame0;
videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width);
const uint32_t renderDelayMs = 500;
for (int i=0; i<TEST_FRAME_NUM; i++) {
GetTestVideoFrame(&videoFrame0, TEST_STREAM0_START_COLOR);
// Render this frame with the specified delay
videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
renderDelayMs);
renderCallback0->RenderFrame(streamId0, videoFrame0);
SleepMs(1000/TEST_FRAME_RATE);
}
// Sleep and let all frames be rendered before closing
SleepMs(renderDelayMs*2);
// Shut down
printf("Closing...\n");
ColorKey.dwColorSpaceHighValue = RGB(0,0,0);
ColorKey.dwColorSpaceLowValue = RGB(0,0,0);
renderModule->SetBitmap(NULL, 0, &ColorKey, 0.0f, 0.0f, 0.0f, 0.0f);
renderModule->SetText(1, NULL, 20, RGB(255,255,255),
RGB(0,0,0), 0.0f, 0.0f, 0.0f, 0.0f);
error = renderModule->StopRender(streamId0);
assert(error == 0);
error = renderModule->DeleteIncomingRenderStream(streamId0);
assert(error == 0);
#endif
return 0;
}
int TestMultipleStreams(VideoRender* renderModule) {
int error = 0;
// Add settings for a stream to render
printf("Add stream 0\n");
const int streamId0 = 0;
VideoRenderCallback* renderCallback0 =
renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 0.45f, 0.45f);
assert(renderCallback0 != NULL);
printf("Add stream 1\n");
const int streamId1 = 1;
VideoRenderCallback* renderCallback1 =
renderModule->AddIncomingRenderStream(streamId1, 0, 0.55f, 0.0f, 1.0f, 0.45f);
assert(renderCallback1 != NULL);
printf("Add stream 2\n");
const int streamId2 = 2;
VideoRenderCallback* renderCallback2 =
renderModule->AddIncomingRenderStream(streamId2, 0, 0.0f, 0.55f, 0.45f, 1.0f);
assert(renderCallback2 != NULL);
printf("Add stream 3\n");
const int streamId3 = 3;
VideoRenderCallback* renderCallback3 =
renderModule->AddIncomingRenderStream(streamId3, 0, 0.55f, 0.55f, 1.0f, 1.0f);
assert(renderCallback3 != NULL);
error = renderModule->StartRender(streamId0);
if (error != 0) {
// TODO(phoglund): This test will not work if compiled in release mode.
// This rather silly construct here is to avoid compilation errors when
// compiling in release. Release => no asserts => unused 'error' variable.
assert(false);
}
error = renderModule->StartRender(streamId1);
assert(error == 0);
error = renderModule->StartRender(streamId2);
assert(error == 0);
error = renderModule->StartRender(streamId3);
assert(error == 0);
// Loop through an I420 file and render each frame
const int width = 352;
const int half_width = (width + 1) / 2;
const int height = 288;
VideoFrame videoFrame0;
videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width);
VideoFrame videoFrame1;
videoFrame1.CreateEmptyFrame(width, height, width, half_width, half_width);
VideoFrame videoFrame2;
videoFrame2.CreateEmptyFrame(width, height, width, half_width, half_width);
VideoFrame videoFrame3;
videoFrame3.CreateEmptyFrame(width, height, width, half_width, half_width);
const uint32_t renderDelayMs = 500;
// Render frames with the specified delay.
for (int i=0; i<TEST_FRAME_NUM; i++) {
GetTestVideoFrame(&videoFrame0, TEST_STREAM0_START_COLOR);
videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
renderDelayMs);
renderCallback0->RenderFrame(streamId0, videoFrame0);
GetTestVideoFrame(&videoFrame1, TEST_STREAM1_START_COLOR);
videoFrame1.set_render_time_ms(TickTime::MillisecondTimestamp() +
renderDelayMs);
renderCallback1->RenderFrame(streamId1, videoFrame1);
GetTestVideoFrame(&videoFrame2, TEST_STREAM2_START_COLOR);
videoFrame2.set_render_time_ms(TickTime::MillisecondTimestamp() +
renderDelayMs);
renderCallback2->RenderFrame(streamId2, videoFrame2);
GetTestVideoFrame(&videoFrame3, TEST_STREAM3_START_COLOR);
videoFrame3.set_render_time_ms(TickTime::MillisecondTimestamp() +
renderDelayMs);
renderCallback3->RenderFrame(streamId3, videoFrame3);
SleepMs(1000/TEST_FRAME_RATE);
}
// Shut down
printf("Closing...\n");
error = renderModule->StopRender(streamId0);
assert(error == 0);
error = renderModule->DeleteIncomingRenderStream(streamId0);
assert(error == 0);
error = renderModule->StopRender(streamId1);
assert(error == 0);
error = renderModule->DeleteIncomingRenderStream(streamId1);
assert(error == 0);
error = renderModule->StopRender(streamId2);
assert(error == 0);
error = renderModule->DeleteIncomingRenderStream(streamId2);
assert(error == 0);
error = renderModule->StopRender(streamId3);
assert(error == 0);
error = renderModule->DeleteIncomingRenderStream(streamId3);
assert(error == 0);
return 0;
}
int TestExternalRender(VideoRender* renderModule) {
int error = 0;
MyRenderCallback *externalRender = new MyRenderCallback();
const int streamId0 = 0;
VideoRenderCallback* renderCallback0 =
renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f,
1.0f, 1.0f);
assert(renderCallback0 != NULL);
error = renderModule->AddExternalRenderCallback(streamId0, externalRender);
if (error != 0) {
// TODO(phoglund): This test will not work if compiled in release mode.
// This rather silly construct here is to avoid compilation errors when
// compiling in release. Release => no asserts => unused 'error' variable.
assert(false);
}
error = renderModule->StartRender(streamId0);
assert(error == 0);
const int width = 352;
const int half_width = (width + 1) / 2;
const int height = 288;
VideoFrame videoFrame0;
videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width);
const uint32_t renderDelayMs = 500;
int frameCount = TEST_FRAME_NUM;
for (int i=0; i<frameCount; i++) {
videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
renderDelayMs);
renderCallback0->RenderFrame(streamId0, videoFrame0);
SleepMs(33);
}
// Sleep and let all frames be rendered before closing
SleepMs(2*renderDelayMs);
// Shut down
printf("Closing...\n");
error = renderModule->StopRender(streamId0);
assert(error == 0);
error = renderModule->DeleteIncomingRenderStream(streamId0);
assert(error == 0);
assert(frameCount == externalRender->_cnt);
delete externalRender;
externalRender = NULL;
return 0;
}
void RunVideoRenderTests(void* window, VideoRenderType windowType) {
int myId = 12345;
// Create the render module
printf("Create render module\n");
VideoRender* renderModule = NULL;
renderModule = VideoRender::CreateVideoRender(myId,
window,
false,
windowType);
assert(renderModule != NULL);
// ##### Test single stream rendering ####
printf("#### TestSingleStream ####\n");
if (TestSingleStream(renderModule) != 0) {
printf ("TestSingleStream failed\n");
}
// ##### Test fullscreen rendering ####
printf("#### TestFullscreenStream ####\n");
if (TestFullscreenStream(renderModule, window, windowType) != 0) {
printf ("TestFullscreenStream failed\n");
}
// ##### Test bitmap and text ####
printf("#### TestBitmapText ####\n");
if (TestBitmapText(renderModule) != 0) {
printf ("TestBitmapText failed\n");
}
// ##### Test multiple streams ####
printf("#### TestMultipleStreams ####\n");
if (TestMultipleStreams(renderModule) != 0) {
printf ("TestMultipleStreams failed\n");
}
// ##### Test multiple streams ####
printf("#### TestExternalRender ####\n");
if (TestExternalRender(renderModule) != 0) {
printf ("TestExternalRender failed\n");
}
delete renderModule;
renderModule = NULL;
printf("VideoRender unit tests passed.\n");
}
// Note: The Mac main is implemented in testApi_mac.mm.
#if defined(_WIN32)
int _tmain(int argc, _TCHAR* argv[])
#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
int main(int argc, char* argv[])
#endif
#if !defined(WEBRTC_MAC) && !defined(WEBRTC_ANDROID)
{
// Create a window for testing.
void* window = NULL;
#if defined (_WIN32)
HWND testHwnd;
WebRtcCreateWindow(testHwnd, 0, 352, 288);
window = (void*)testHwnd;
VideoRenderType windowType = kRenderWindows;
#elif defined(WEBRTC_LINUX)
Window testWindow;
Display* display;
WebRtcCreateWindow(&testWindow, &display, 0, 352, 288);
VideoRenderType windowType = kRenderX11;
window = (void*)testWindow;
#endif // WEBRTC_LINUX
RunVideoRenderTests(window, windowType);
return 0;
}
#endif // !WEBRTC_MAC

View File

@ -0,0 +1,18 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
#include "webrtc/modules/video_render/video_render_defines.h"
void RunVideoRenderTests(void* window, webrtc::VideoRenderType windowType);
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H

View File

@ -0,0 +1,15 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
int main(int argc, char* argv[]) {
// TODO(leozwang): Video render test app is not ready on android,
// make it dummy test now, will add android specific tests
return 0;
}

View File

@ -0,0 +1,69 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "testAPI.h"
#include <iostream>
#import <Foundation/Foundation.h>
#import <Cocoa/Cocoa.h>
#import <AppKit/AppKit.h>
#import <QTKit/QTKit.h>
#include <sys/time.h>
#import "webrtc/modules/video_render/mac/cocoa_render_view.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/utility/include/process_thread.h"
#include "webrtc/modules/video_render/video_render.h"
#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/system_wrappers/include/tick_util.h"
#include "webrtc/system_wrappers/include/trace.h"
using namespace webrtc;
int WebRtcCreateWindow(CocoaRenderView*& cocoaRenderer, int winNum, int width, int height)
{
// In Cocoa, rendering is not done directly to a window like in Windows and Linux.
// It is rendererd to a Subclass of NSOpenGLView
// create cocoa container window
NSRect outWindowFrame = NSMakeRect(200, 800, width + 20, height + 20);
NSWindow* outWindow = [[NSWindow alloc] initWithContentRect:outWindowFrame
styleMask:NSTitledWindowMask
backing:NSBackingStoreBuffered
defer:NO];
[outWindow orderOut:nil];
[outWindow setTitle:@"Cocoa Renderer"];
[outWindow setBackgroundColor:[NSColor blueColor]];
// create renderer and attach to window
NSRect cocoaRendererFrame = NSMakeRect(10, 10, width, height);
cocoaRenderer = [[CocoaRenderView alloc] initWithFrame:cocoaRendererFrame];
[[outWindow contentView] addSubview:(NSView*)cocoaRenderer];
[outWindow makeKeyAndOrderFront:NSApp];
return 0;
}
int main (int argc, const char * argv[]) {
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
[NSApplication sharedApplication];
CocoaRenderView* testWindow;
WebRtcCreateWindow(testWindow, 0, 352, 288);
VideoRenderType windowType = kRenderCocoa;
void* window = (void*)testWindow;
RunVideoRenderTests(window, windowType);
[pool release];
}

View File

@ -0,0 +1,218 @@
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
{
'targets': [
{
# Note this library is missing an implementation for the video render.
# For that targets must link with 'video_render' or
# 'video_render_module_internal_impl' if they want to compile and use
# the internal render as the default renderer.
'target_name': 'video_render_module',
'type': 'static_library',
'dependencies': [
'webrtc_utility',
'<(webrtc_root)/common.gyp:webrtc_common',
'<(webrtc_root)/common_video/common_video.gyp:common_video',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
],
'sources': [
'external/video_render_external_impl.cc',
'external/video_render_external_impl.h',
'i_video_render.h',
'video_render.h',
'video_render_defines.h',
'video_render_impl.h',
],
},
{
# Default video_render_module implementation that only supports external
# renders.
'target_name': 'video_render',
'type': 'static_library',
'dependencies': [
'video_render_module',
],
'sources': [
'video_render_impl.cc',
],
},
], # targets
'conditions': [
['build_with_chromium==0', {
'targets': [
{
# video_render_module implementation that supports the internal
# video_render implementation.
'target_name': 'video_render_module_internal_impl',
'type': 'static_library',
'dependencies': [
'<(webrtc_root)/common.gyp:webrtc_common',
'video_render_module',
],
'sources': [
'video_render_internal_impl.cc',
],
# TODO(andrew): with the proper suffix, these files will be excluded
# automatically.
'conditions': [
['OS=="android"', {
'sources': [
'android/video_render_android_impl.h',
'android/video_render_android_native_opengl2.h',
'android/video_render_android_surface_view.h',
'android/video_render_opengles20.h',
'android/video_render_android_impl.cc',
'android/video_render_android_native_opengl2.cc',
'android/video_render_android_surface_view.cc',
'android/video_render_opengles20.cc',
],
'link_settings': {
'libraries': [
'-lGLESv2',
],
},
}],
['OS=="ios"', {
'sources': [
# iOS
'ios/open_gles20.h',
'ios/open_gles20.mm',
'ios/video_render_ios_channel.h',
'ios/video_render_ios_channel.mm',
'ios/video_render_ios_gles20.h',
'ios/video_render_ios_gles20.mm',
'ios/video_render_ios_impl.h',
'ios/video_render_ios_impl.mm',
'ios/video_render_ios_view.h',
'ios/video_render_ios_view.mm',
],
'xcode_settings': {
'CLANG_ENABLE_OBJC_ARC': 'YES',
},
'all_dependent_settings': {
'xcode_settings': {
'OTHER_LDFLAGS': [
'-framework OpenGLES',
'-framework QuartzCore',
'-framework UIKit',
],
},
},
}],
['OS=="linux"', {
'sources': [
'linux/video_render_linux_impl.h',
'linux/video_x11_channel.h',
'linux/video_x11_render.h',
'linux/video_render_linux_impl.cc',
'linux/video_x11_channel.cc',
'linux/video_x11_render.cc',
],
'link_settings': {
'libraries': [
'-lXext',
],
},
}],
['OS=="mac"', {
'sources': [
'mac/cocoa_full_screen_window.h',
'mac/cocoa_render_view.h',
'mac/video_render_agl.h',
'mac/video_render_mac_carbon_impl.h',
'mac/video_render_mac_cocoa_impl.h',
'mac/video_render_nsopengl.h',
'mac/video_render_nsopengl.mm',
'mac/video_render_mac_cocoa_impl.mm',
'mac/video_render_agl.cc',
'mac/video_render_mac_carbon_impl.cc',
'mac/cocoa_render_view.mm',
'mac/cocoa_full_screen_window.mm',
],
}],
['OS=="win"', {
'sources': [
'windows/i_video_render_win.h',
'windows/video_render_direct3d9.h',
'windows/video_render_windows_impl.h',
'windows/video_render_direct3d9.cc',
'windows/video_render_windows_impl.cc',
],
'include_dirs': [
'<(directx_sdk_path)/Include',
],
}],
['OS=="win" and clang==1', {
'msvs_settings': {
'VCCLCompilerTool': {
'AdditionalOptions': [
# Disable warnings failing when compiling with Clang on Windows.
# https://bugs.chromium.org/p/webrtc/issues/detail?id=5366
'-Wno-comment',
'-Wno-reorder',
'-Wno-unused-value',
'-Wno-unused-private-field',
],
},
},
}],
] # conditions
},
],
}], # build_with_chromium==0
['include_tests==1 and OS!="ios"', {
'targets': [
{
# Does not compile on iOS: webrtc:4755.
'target_name': 'video_render_tests',
'type': 'executable',
'dependencies': [
'video_render_module_internal_impl',
'webrtc_utility',
'<(webrtc_root)/common.gyp:webrtc_common',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/common_video/common_video.gyp:common_video',
],
'sources': [
'test/testAPI/testAPI.cc',
'test/testAPI/testAPI.h',
'test/testAPI/testAPI_android.cc',
'test/testAPI/testAPI_mac.mm',
],
'conditions': [
['OS=="mac" or OS=="linux"', {
'cflags': [
'-Wno-write-strings',
],
'ldflags': [
'-lpthread -lm',
],
}],
['OS=="linux"', {
'link_settings': {
'libraries': [
'-lX11',
],
},
}],
['OS=="mac"', {
'xcode_settings': {
'OTHER_LDFLAGS': [
'-framework Foundation -framework AppKit -framework Cocoa -framework OpenGL',
],
},
}],
] # conditions
}, # video_render_module_test
], # targets
}], # include_tests==1 and OS!=ios
], # conditions
}

View File

@ -0,0 +1,255 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_
#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_
/*
* video_render.h
*
* This header file together with module.h and module_common_types.h
* contains all of the APIs that are needed for using the video render
* module class.
*
*/
#include "webrtc/modules/include/module.h"
#include "webrtc/modules/video_render/video_render_defines.h"
namespace webrtc {
// Class definitions
class VideoRender: public Module
{
public:
/*
* Create a video render module object
*
* id - unique identifier of this video render module object
* window - pointer to the window to render to
* fullscreen - true if this is a fullscreen renderer
* videoRenderType - type of renderer to create
*/
static VideoRender
* CreateVideoRender(
const int32_t id,
void* window,
const bool fullscreen,
const VideoRenderType videoRenderType =
kRenderDefault);
/*
* Destroy a video render module object
*
* module - object to destroy
*/
static void DestroyVideoRender(VideoRender* module);
int64_t TimeUntilNextProcess() override = 0;
void Process() override = 0;
/**************************************************************************
*
* Window functions
*
***************************************************************************/
/*
* Get window for this renderer
*/
virtual void* Window() = 0;
/*
* Change render window
*
* window - the new render window, assuming same type as originally created.
*/
virtual int32_t ChangeWindow(void* window) = 0;
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
/*
* Add incoming render stream
*
* streamID - id of the stream to add
* zOrder - relative render order for the streams, 0 = on top
* left - position of the stream in the window, [0.0f, 1.0f]
* top - position of the stream in the window, [0.0f, 1.0f]
* right - position of the stream in the window, [0.0f, 1.0f]
* bottom - position of the stream in the window, [0.0f, 1.0f]
*
* Return - callback class to use for delivering new frames to render.
*/
virtual VideoRenderCallback
* AddIncomingRenderStream(const uint32_t streamId,
const uint32_t zOrder,
const float left, const float top,
const float right, const float bottom) = 0;
/*
* Delete incoming render stream
*
* streamID - id of the stream to add
*/
virtual int32_t
DeleteIncomingRenderStream(const uint32_t streamId) = 0;
/*
* Add incoming render callback, used for external rendering
*
* streamID - id of the stream the callback is used for
* renderObject - the VideoRenderCallback to use for this stream, NULL to remove
*
* Return - callback class to use for delivering new frames to render.
*/
virtual int32_t
AddExternalRenderCallback(const uint32_t streamId,
VideoRenderCallback* renderObject) = 0;
/*
* Get the porperties for an incoming render stream
*
* streamID - [in] id of the stream to get properties for
* zOrder - [out] relative render order for the streams, 0 = on top
* left - [out] position of the stream in the window, [0.0f, 1.0f]
* top - [out] position of the stream in the window, [0.0f, 1.0f]
* right - [out] position of the stream in the window, [0.0f, 1.0f]
* bottom - [out] position of the stream in the window, [0.0f, 1.0f]
*/
virtual int32_t
GetIncomingRenderStreamProperties(const uint32_t streamId,
uint32_t& zOrder,
float& left, float& top,
float& right, float& bottom) const = 0;
/*
* The incoming frame rate to the module, not the rate rendered in the window.
*/
virtual uint32_t
GetIncomingFrameRate(const uint32_t streamId) = 0;
/*
* Returns the number of incoming streams added to this render module
*/
virtual uint32_t GetNumIncomingRenderStreams() const = 0;
/*
* Returns true if this render module has the streamId added, false otherwise.
*/
virtual bool
HasIncomingRenderStream(const uint32_t streamId) const = 0;
/*
* Registers a callback to get raw images in the same time as sent
* to the renderer. To be used for external rendering.
*/
virtual int32_t
RegisterRawFrameCallback(const uint32_t streamId,
VideoRenderCallback* callbackObj) = 0;
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
/*
* Starts rendering the specified stream
*/
virtual int32_t StartRender(const uint32_t streamId) = 0;
/*
* Stops the renderer
*/
virtual int32_t StopRender(const uint32_t streamId) = 0;
/*
* Resets the renderer
* No streams are removed. The state should be as after AddStream was called.
*/
virtual int32_t ResetRender() = 0;
/**************************************************************************
*
* Properties
*
***************************************************************************/
/*
* Returns the preferred render video type
*/
virtual RawVideoType PreferredVideoType() const = 0;
/*
* Returns true if the renderer is in fullscreen mode, otherwise false.
*/
virtual bool IsFullScreen() = 0;
/*
* Gets screen resolution in pixels
*/
virtual int32_t
GetScreenResolution(uint32_t& screenWidth,
uint32_t& screenHeight) const = 0;
/*
* Get the actual render rate for this stream. I.e rendered frame rate,
* not frames delivered to the renderer.
*/
virtual uint32_t RenderFrameRate(const uint32_t streamId) = 0;
/*
* Set cropping of incoming stream
*/
virtual int32_t SetStreamCropping(const uint32_t streamId,
const float left,
const float top,
const float right,
const float bottom) = 0;
/*
* re-configure renderer
*/
// Set the expected time needed by the graphics card or external renderer,
// i.e. frames will be released for rendering |delay_ms| before set render
// time in the video frame.
virtual int32_t SetExpectedRenderDelay(uint32_t stream_id,
int32_t delay_ms) = 0;
virtual int32_t ConfigureRenderer(const uint32_t streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom) = 0;
virtual int32_t SetTransparentBackground(const bool enable) = 0;
virtual int32_t FullScreenRender(void* window, const bool enable) = 0;
virtual int32_t SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left, const float top,
const float right, const float bottom) = 0;
virtual int32_t SetText(const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left, const float top,
const float right, const float bottom) = 0;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_

View File

@ -0,0 +1,70 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_
#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_
#include "webrtc/common_types.h"
#include "webrtc/common_video/include/incoming_video_stream.h"
#include "webrtc/modules/include/module_common_types.h"
namespace webrtc
{
// Defines
#ifndef NULL
#define NULL 0
#endif
// Enums
enum VideoRenderType
{
kRenderExternal = 0, // External
kRenderWindows = 1, // Windows
kRenderCocoa = 2, // Mac
kRenderCarbon = 3,
kRenderiOS = 4, // iPhone
kRenderAndroid = 5, // Android
kRenderX11 = 6, // Linux
kRenderDefault
};
// Runtime errors
enum VideoRenderError
{
kRenderShutDown = 0,
kRenderPerformanceAlarm = 1
};
// Feedback class to be implemented by module user
class VideoRenderFeedback
{
public:
virtual void OnRenderError(const int32_t streamId,
const VideoRenderError error) = 0;
protected:
virtual ~VideoRenderFeedback()
{
}
};
// Mobile enums
enum StretchMode
{
kStretchToInsideEdge = 1,
kStretchToOutsideEdge = 2,
kStretchMatchWidth = 3,
kStretchMatchHeight = 4,
kStretchNone = 5
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_

View File

@ -0,0 +1,550 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <assert.h>
#include "webrtc/common_video/include/incoming_video_stream.h"
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/video_render/external/video_render_external_impl.h"
#include "webrtc/modules/video_render/i_video_render.h"
#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/modules/video_render/video_render_impl.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
VideoRender*
VideoRender::CreateVideoRender(const int32_t id,
void* window,
const bool fullscreen,
const VideoRenderType videoRenderType/*=kRenderDefault*/)
{
VideoRenderType resultVideoRenderType = videoRenderType;
if (videoRenderType == kRenderDefault)
{
resultVideoRenderType = kRenderExternal;
}
return new ModuleVideoRenderImpl(id, resultVideoRenderType, window,
fullscreen);
}
void VideoRender::DestroyVideoRender(
VideoRender* module)
{
if (module)
{
delete module;
}
}
ModuleVideoRenderImpl::ModuleVideoRenderImpl(
const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen) :
_id(id), _moduleCrit(*CriticalSectionWrapper::CreateCriticalSection()),
_ptrWindow(window), _fullScreen(fullscreen), _ptrRenderer(NULL)
{
// Create platform specific renderer
switch (videoRenderType)
{
case kRenderExternal:
{
VideoRenderExternalImpl* ptrRenderer(NULL);
ptrRenderer = new VideoRenderExternalImpl(_id, videoRenderType,
window, _fullScreen);
if (ptrRenderer)
{
_ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
}
}
break;
default:
// Error...
break;
}
if (_ptrRenderer)
{
if (_ptrRenderer->Init() == -1)
{
}
}
}
ModuleVideoRenderImpl::~ModuleVideoRenderImpl()
{
delete &_moduleCrit;
for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
it != _streamRenderMap.end();
++it) {
delete it->second;
}
// Delete platform specific renderer
if (_ptrRenderer)
{
VideoRenderType videoRenderType = _ptrRenderer->RenderType();
switch (videoRenderType)
{
case kRenderExternal:
{
VideoRenderExternalImpl
* ptrRenderer =
reinterpret_cast<VideoRenderExternalImpl*> (_ptrRenderer);
_ptrRenderer = NULL;
delete ptrRenderer;
}
break;
default:
// Error...
break;
}
}
}
int64_t ModuleVideoRenderImpl::TimeUntilNextProcess()
{
// Not used
return 50;
}
void ModuleVideoRenderImpl::Process() {}
void*
ModuleVideoRenderImpl::Window()
{
CriticalSectionScoped cs(&_moduleCrit);
return _ptrWindow;
}
int32_t ModuleVideoRenderImpl::ChangeWindow(void* window)
{
return -1;
}
int32_t ModuleVideoRenderImpl::Id()
{
CriticalSectionScoped cs(&_moduleCrit);
return _id;
}
uint32_t ModuleVideoRenderImpl::GetIncomingFrameRate(const uint32_t streamId) {
CriticalSectionScoped cs(&_moduleCrit);
IncomingVideoStreamMap::iterator it = _streamRenderMap.find(streamId);
if (it == _streamRenderMap.end()) {
// This stream doesn't exist
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: stream doesn't exist",
__FUNCTION__);
return 0;
}
assert(it->second != NULL);
return it->second->IncomingRate();
}
VideoRenderCallback*
ModuleVideoRenderImpl::AddIncomingRenderStream(const uint32_t streamId,
const uint32_t zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return NULL;
}
if (_streamRenderMap.find(streamId) != _streamRenderMap.end()) {
// The stream already exists...
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream already exists", __FUNCTION__);
return NULL;
}
VideoRenderCallback* ptrRenderCallback =
_ptrRenderer->AddIncomingRenderStream(streamId, zOrder, left, top,
right, bottom);
if (ptrRenderCallback == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Can't create incoming stream in renderer",
__FUNCTION__);
return NULL;
}
// Create platform independant code
IncomingVideoStream* ptrIncomingStream =
new IncomingVideoStream(streamId, false);
ptrIncomingStream->SetRenderCallback(ptrRenderCallback);
VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback();
// Store the stream
_streamRenderMap[streamId] = ptrIncomingStream;
return moduleCallback;
}
int32_t ModuleVideoRenderImpl::DeleteIncomingRenderStream(
const uint32_t streamId)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
if (item == _streamRenderMap.end())
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream doesn't exist", __FUNCTION__);
return -1;
}
delete item->second;
_ptrRenderer->DeleteIncomingRenderStream(streamId);
_streamRenderMap.erase(item);
return 0;
}
int32_t ModuleVideoRenderImpl::AddExternalRenderCallback(
const uint32_t streamId,
VideoRenderCallback* renderObject) {
CriticalSectionScoped cs(&_moduleCrit);
IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
if (item == _streamRenderMap.end())
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream doesn't exist", __FUNCTION__);
return -1;
}
if (item->second == NULL) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not get stream", __FUNCTION__);
return -1;
}
item->second->SetExternalCallback(renderObject);
return 0;
}
int32_t ModuleVideoRenderImpl::GetIncomingRenderStreamProperties(
const uint32_t streamId,
uint32_t& zOrder,
float& left,
float& top,
float& right,
float& bottom) const {
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
return _ptrRenderer->GetIncomingRenderStreamProperties(streamId, zOrder,
left, top, right,
bottom);
}
uint32_t ModuleVideoRenderImpl::GetNumIncomingRenderStreams() const
{
CriticalSectionScoped cs(&_moduleCrit);
return static_cast<uint32_t>(_streamRenderMap.size());
}
bool ModuleVideoRenderImpl::HasIncomingRenderStream(
const uint32_t streamId) const {
CriticalSectionScoped cs(&_moduleCrit);
return _streamRenderMap.find(streamId) != _streamRenderMap.end();
}
int32_t ModuleVideoRenderImpl::RegisterRawFrameCallback(
const uint32_t streamId,
VideoRenderCallback* callbackObj) {
return -1;
}
int32_t ModuleVideoRenderImpl::StartRender(const uint32_t streamId)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
// Start the stream
IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
if (item == _streamRenderMap.end())
{
return -1;
}
if (item->second->Start() == -1)
{
return -1;
}
// Start the HW renderer
if (_ptrRenderer->StartRender() == -1)
{
return -1;
}
return 0;
}
int32_t ModuleVideoRenderImpl::StopRender(const uint32_t streamId)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s(%d): No renderer", __FUNCTION__, streamId);
return -1;
}
// Stop the incoming stream
IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
if (item == _streamRenderMap.end())
{
return -1;
}
if (item->second->Stop() == -1)
{
return -1;
}
return 0;
}
int32_t ModuleVideoRenderImpl::ResetRender()
{
CriticalSectionScoped cs(&_moduleCrit);
int32_t ret = 0;
// Loop through all incoming streams and reset them
for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
it != _streamRenderMap.end();
++it) {
if (it->second->Reset() == -1)
ret = -1;
}
return ret;
}
RawVideoType ModuleVideoRenderImpl::PreferredVideoType() const
{
CriticalSectionScoped cs(&_moduleCrit);
if (_ptrRenderer == NULL)
{
return kVideoI420;
}
return _ptrRenderer->PerferedVideoType();
}
bool ModuleVideoRenderImpl::IsFullScreen()
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->FullScreen();
}
int32_t ModuleVideoRenderImpl::GetScreenResolution(
uint32_t& screenWidth,
uint32_t& screenHeight) const
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->GetScreenResolution(screenWidth, screenHeight);
}
uint32_t ModuleVideoRenderImpl::RenderFrameRate(
const uint32_t streamId)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->RenderFrameRate(streamId);
}
int32_t ModuleVideoRenderImpl::SetStreamCropping(
const uint32_t streamId,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->SetStreamCropping(streamId, left, top, right, bottom);
}
int32_t ModuleVideoRenderImpl::SetTransparentBackground(const bool enable)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->SetTransparentBackground(enable);
}
int32_t ModuleVideoRenderImpl::FullScreenRender(void* window, const bool enable)
{
return -1;
}
int32_t ModuleVideoRenderImpl::SetText(
const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left, const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
return _ptrRenderer->SetText(textId, text, textLength, textColorRef,
backgroundColorRef, left, top, right, bottom);
}
int32_t ModuleVideoRenderImpl::SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
return _ptrRenderer->SetBitmap(bitMap, pictureId, colorKey, left, top,
right, bottom);
}
int32_t ModuleVideoRenderImpl::SetExpectedRenderDelay(
uint32_t stream_id, int32_t delay_ms) {
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
IncomingVideoStreamMap::const_iterator item =
_streamRenderMap.find(stream_id);
if (item == _streamRenderMap.end()) {
// This stream doesn't exist
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s(%u, %d): stream doesn't exist", __FUNCTION__, stream_id,
delay_ms);
return -1;
}
assert(item->second != NULL);
return item->second->SetExpectedRenderDelay(delay_ms);
}
int32_t ModuleVideoRenderImpl::ConfigureRenderer(
const uint32_t streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->ConfigureRenderer(streamId, zOrder, left, top, right,
bottom);
}
} // namespace webrtc

View File

@ -0,0 +1,208 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
#include <map>
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/video_render/video_render.h"
namespace webrtc {
class CriticalSectionWrapper;
class IncomingVideoStream;
class IVideoRender;
// Class definitions
class ModuleVideoRenderImpl: public VideoRender
{
public:
/*
* VideoRenderer constructor/destructor
*/
ModuleVideoRenderImpl(const int32_t id,
const VideoRenderType videoRenderType,
void* window, const bool fullscreen);
virtual ~ModuleVideoRenderImpl();
virtual int64_t TimeUntilNextProcess();
virtual void Process();
/*
* Returns the render window
*/
virtual void* Window();
/*
* Change render window
*/
virtual int32_t ChangeWindow(void* window);
/*
* Returns module id
*/
int32_t Id();
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
/*
* Add incoming render stream
*/
virtual VideoRenderCallback
* AddIncomingRenderStream(const uint32_t streamId,
const uint32_t zOrder,
const float left, const float top,
const float right, const float bottom);
/*
* Delete incoming render stream
*/
virtual int32_t
DeleteIncomingRenderStream(const uint32_t streamId);
/*
* Add incoming render callback, used for external rendering
*/
virtual int32_t
AddExternalRenderCallback(const uint32_t streamId,
VideoRenderCallback* renderObject);
/*
* Get the porperties for an incoming render stream
*/
virtual int32_t
GetIncomingRenderStreamProperties(const uint32_t streamId,
uint32_t& zOrder,
float& left, float& top,
float& right, float& bottom) const;
/*
* Incoming frame rate for the specified stream.
*/
virtual uint32_t GetIncomingFrameRate(const uint32_t streamId);
/*
* Returns the number of incoming streams added to this render module
*/
virtual uint32_t GetNumIncomingRenderStreams() const;
/*
* Returns true if this render module has the streamId added, false otherwise.
*/
virtual bool HasIncomingRenderStream(const uint32_t streamId) const;
/*
*
*/
virtual int32_t
RegisterRawFrameCallback(const uint32_t streamId,
VideoRenderCallback* callbackObj);
virtual int32_t SetExpectedRenderDelay(uint32_t stream_id,
int32_t delay_ms);
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
/*
* Starts rendering the specified stream
*/
virtual int32_t StartRender(const uint32_t streamId);
/*
* Stops the renderer
*/
virtual int32_t StopRender(const uint32_t streamId);
/*
* Sets the renderer in start state, no streams removed.
*/
virtual int32_t ResetRender();
/**************************************************************************
*
* Properties
*
***************************************************************************/
/*
* Returns the prefered render video type
*/
virtual RawVideoType PreferredVideoType() const;
/*
* Returns true if the renderer is in fullscreen mode, otherwise false.
*/
virtual bool IsFullScreen();
/*
* Gets screen resolution in pixels
*/
virtual int32_t
GetScreenResolution(uint32_t& screenWidth,
uint32_t& screenHeight) const;
/*
* Get the actual render rate for this stream. I.e rendered frame rate,
* not frames delivered to the renderer.
*/
virtual uint32_t RenderFrameRate(const uint32_t streamId);
/*
* Set cropping of incoming stream
*/
virtual int32_t SetStreamCropping(const uint32_t streamId,
const float left, const float top,
const float right, const float bottom);
virtual int32_t ConfigureRenderer(const uint32_t streamId,
const unsigned int zOrder,
const float left, const float top,
const float right, const float bottom);
virtual int32_t SetTransparentBackground(const bool enable);
virtual int32_t FullScreenRender(void* window, const bool enable);
virtual int32_t SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left, const float top,
const float right, const float bottom);
virtual int32_t SetText(const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left, const float top,
const float right, const float bottom);
private:
int32_t _id;
CriticalSectionWrapper& _moduleCrit;
void* _ptrWindow;
bool _fullScreen;
IVideoRender* _ptrRenderer;
typedef std::map<uint32_t, IncomingVideoStream*> IncomingVideoStreamMap;
IncomingVideoStreamMap _streamRenderMap;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_

View File

@ -0,0 +1,27 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_
#ifdef ANDROID
#include <jni.h>
namespace webrtc {
// In order to be able to use the internal webrtc video render
// for android, the jvm objects must be set via this method.
int32_t SetRenderAndroidVM(JavaVM* javaVM);
} // namespace webrtc
#endif // ANDROID
#endif // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_

View File

@ -0,0 +1,773 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <assert.h>
#include "webrtc/common_video/include/incoming_video_stream.h"
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/video_render/i_video_render.h"
#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/modules/video_render/video_render_impl.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
#if defined (_WIN32)
#include "webrtc/modules/video_render/windows/video_render_windows_impl.h"
#define STANDARD_RENDERING kRenderWindows
// WEBRTC_IOS should go before WEBRTC_MAC because WEBRTC_MAC
// gets defined if WEBRTC_IOS is defined
#elif defined(WEBRTC_IOS)
#define STANDARD_RENDERING kRenderiOS
#include "webrtc/modules/video_render/ios/video_render_ios_impl.h"
#elif defined(WEBRTC_MAC)
#if defined(COCOA_RENDERING)
#define STANDARD_RENDERING kRenderCocoa
#include "webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h"
#elif defined(CARBON_RENDERING)
#define STANDARD_RENDERING kRenderCarbon
#include "webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h"
#endif
#elif defined(WEBRTC_ANDROID)
#include "webrtc/modules/video_render/android/video_render_android_impl.h"
#include "webrtc/modules/video_render/android/video_render_android_native_opengl2.h"
#include "webrtc/modules/video_render/android/video_render_android_surface_view.h"
#define STANDARD_RENDERING kRenderAndroid
#elif defined(WEBRTC_LINUX)
#include "webrtc/modules/video_render/linux/video_render_linux_impl.h"
#define STANDARD_RENDERING kRenderX11
#else
//Other platforms
#endif
// For external rendering
#include "webrtc/modules/video_render/external/video_render_external_impl.h"
#ifndef STANDARD_RENDERING
#define STANDARD_RENDERING kRenderExternal
#endif // STANDARD_RENDERING
namespace webrtc {
VideoRender*
VideoRender::CreateVideoRender(const int32_t id,
void* window,
const bool fullscreen,
const VideoRenderType videoRenderType/*=kRenderDefault*/)
{
VideoRenderType resultVideoRenderType = videoRenderType;
if (videoRenderType == kRenderDefault)
{
resultVideoRenderType = STANDARD_RENDERING;
}
return new ModuleVideoRenderImpl(id, resultVideoRenderType, window,
fullscreen);
}
void VideoRender::DestroyVideoRender(
VideoRender* module)
{
if (module)
{
delete module;
}
}
ModuleVideoRenderImpl::ModuleVideoRenderImpl(
const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen) :
_id(id), _moduleCrit(*CriticalSectionWrapper::CreateCriticalSection()),
_ptrWindow(window), _fullScreen(fullscreen), _ptrRenderer(NULL)
{
// Create platform specific renderer
switch (videoRenderType)
{
#if defined(_WIN32)
case kRenderWindows:
{
VideoRenderWindowsImpl* ptrRenderer;
ptrRenderer = new VideoRenderWindowsImpl(_id, videoRenderType, window, _fullScreen);
if (ptrRenderer)
{
_ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
}
}
break;
#elif defined(WEBRTC_IOS)
case kRenderiOS:
{
VideoRenderIosImpl* ptrRenderer = new VideoRenderIosImpl(_id, window, _fullScreen);
if(ptrRenderer)
{
_ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
}
}
break;
#elif defined(WEBRTC_MAC)
#if defined(COCOA_RENDERING)
case kRenderCocoa:
{
VideoRenderMacCocoaImpl* ptrRenderer = new VideoRenderMacCocoaImpl(_id, videoRenderType, window, _fullScreen);
if(ptrRenderer)
{
_ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
}
}
break;
#elif defined(CARBON_RENDERING)
case kRenderCarbon:
{
VideoRenderMacCarbonImpl* ptrRenderer = new VideoRenderMacCarbonImpl(_id, videoRenderType, window, _fullScreen);
if(ptrRenderer)
{
_ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
}
}
break;
#endif
#elif defined(WEBRTC_ANDROID)
case kRenderAndroid:
{
if(AndroidNativeOpenGl2Renderer::UseOpenGL2(window))
{
AndroidNativeOpenGl2Renderer* ptrRenderer = NULL;
ptrRenderer = new AndroidNativeOpenGl2Renderer(_id, videoRenderType, window, _fullScreen);
if (ptrRenderer)
{
_ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
}
}
else
{
AndroidSurfaceViewRenderer* ptrRenderer = NULL;
ptrRenderer = new AndroidSurfaceViewRenderer(_id, videoRenderType, window, _fullScreen);
if (ptrRenderer)
{
_ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
}
}
}
break;
#elif defined(WEBRTC_LINUX)
case kRenderX11:
{
VideoRenderLinuxImpl* ptrRenderer = NULL;
ptrRenderer = new VideoRenderLinuxImpl(_id, videoRenderType, window, _fullScreen);
if ( ptrRenderer )
{
_ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
}
}
break;
#else
// Other platforms
#endif
case kRenderExternal:
{
VideoRenderExternalImpl* ptrRenderer(NULL);
ptrRenderer = new VideoRenderExternalImpl(_id, videoRenderType,
window, _fullScreen);
if (ptrRenderer)
{
_ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
}
}
break;
default:
// Error...
break;
}
if (_ptrRenderer)
{
if (_ptrRenderer->Init() == -1)
{
}
}
}
ModuleVideoRenderImpl::~ModuleVideoRenderImpl()
{
delete &_moduleCrit;
for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
it != _streamRenderMap.end();
++it) {
delete it->second;
}
// Delete platform specific renderer
if (_ptrRenderer)
{
VideoRenderType videoRenderType = _ptrRenderer->RenderType();
switch (videoRenderType)
{
case kRenderExternal:
{
VideoRenderExternalImpl
* ptrRenderer =
reinterpret_cast<VideoRenderExternalImpl*> (_ptrRenderer);
_ptrRenderer = NULL;
delete ptrRenderer;
}
break;
#if defined(_WIN32)
case kRenderWindows:
{
VideoRenderWindowsImpl* ptrRenderer = reinterpret_cast<VideoRenderWindowsImpl*>(_ptrRenderer);
_ptrRenderer = NULL;
delete ptrRenderer;
}
break;
#elif defined(WEBRTC_IOS)
case kRenderiOS:
{
VideoRenderIosImpl* ptrRenderer = reinterpret_cast<VideoRenderIosImpl*> (_ptrRenderer);
_ptrRenderer = NULL;
delete ptrRenderer;
}
break;
#elif defined(WEBRTC_MAC)
#if defined(COCOA_RENDERING)
case kRenderCocoa:
{
VideoRenderMacCocoaImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCocoaImpl*> (_ptrRenderer);
_ptrRenderer = NULL;
delete ptrRenderer;
}
break;
#elif defined(CARBON_RENDERING)
case kRenderCarbon:
{
VideoRenderMacCarbonImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCarbonImpl*> (_ptrRenderer);
_ptrRenderer = NULL;
delete ptrRenderer;
}
break;
#endif
#elif defined(WEBRTC_ANDROID)
case kRenderAndroid:
{
VideoRenderAndroid* ptrRenderer = reinterpret_cast<VideoRenderAndroid*> (_ptrRenderer);
_ptrRenderer = NULL;
delete ptrRenderer;
}
break;
#elif defined(WEBRTC_LINUX)
case kRenderX11:
{
VideoRenderLinuxImpl* ptrRenderer = reinterpret_cast<VideoRenderLinuxImpl*> (_ptrRenderer);
_ptrRenderer = NULL;
delete ptrRenderer;
}
break;
#else
//other platforms
#endif
default:
// Error...
break;
}
}
}
int64_t ModuleVideoRenderImpl::TimeUntilNextProcess()
{
// Not used
return 50;
}
void ModuleVideoRenderImpl::Process() {}
void*
ModuleVideoRenderImpl::Window()
{
CriticalSectionScoped cs(&_moduleCrit);
return _ptrWindow;
}
int32_t ModuleVideoRenderImpl::ChangeWindow(void* window)
{
CriticalSectionScoped cs(&_moduleCrit);
#if defined(WEBRTC_IOS) // WEBRTC_IOS must go before WEBRTC_MAC
_ptrRenderer = NULL;
delete _ptrRenderer;
VideoRenderIosImpl* ptrRenderer;
ptrRenderer = new VideoRenderIosImpl(_id, window, _fullScreen);
if (!ptrRenderer)
{
return -1;
}
_ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
return _ptrRenderer->ChangeWindow(window);
#elif defined(WEBRTC_MAC)
_ptrRenderer = NULL;
delete _ptrRenderer;
#if defined(COCOA_RENDERING)
VideoRenderMacCocoaImpl* ptrRenderer;
ptrRenderer = new VideoRenderMacCocoaImpl(_id, kRenderCocoa, window, _fullScreen);
#elif defined(CARBON_RENDERING)
VideoRenderMacCarbonImpl* ptrRenderer;
ptrRenderer = new VideoRenderMacCarbonImpl(_id, kRenderCarbon, window, _fullScreen);
#endif
if (!ptrRenderer)
{
return -1;
}
_ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
return _ptrRenderer->ChangeWindow(window);
#else
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
return _ptrRenderer->ChangeWindow(window);
#endif
}
int32_t ModuleVideoRenderImpl::Id()
{
CriticalSectionScoped cs(&_moduleCrit);
return _id;
}
uint32_t ModuleVideoRenderImpl::GetIncomingFrameRate(const uint32_t streamId) {
CriticalSectionScoped cs(&_moduleCrit);
IncomingVideoStreamMap::iterator it = _streamRenderMap.find(streamId);
if (it == _streamRenderMap.end()) {
// This stream doesn't exist
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: stream doesn't exist",
__FUNCTION__);
return 0;
}
assert(it->second != NULL);
return it->second->IncomingRate();
}
VideoRenderCallback*
ModuleVideoRenderImpl::AddIncomingRenderStream(const uint32_t streamId,
const uint32_t zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return NULL;
}
if (_streamRenderMap.find(streamId) != _streamRenderMap.end()) {
// The stream already exists...
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream already exists", __FUNCTION__);
return NULL;
}
VideoRenderCallback* ptrRenderCallback =
_ptrRenderer->AddIncomingRenderStream(streamId, zOrder, left, top,
right, bottom);
if (ptrRenderCallback == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Can't create incoming stream in renderer",
__FUNCTION__);
return NULL;
}
// Create platform independant code
IncomingVideoStream* ptrIncomingStream =
new IncomingVideoStream(streamId, false);
ptrIncomingStream->SetRenderCallback(ptrRenderCallback);
VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback();
// Store the stream
_streamRenderMap[streamId] = ptrIncomingStream;
return moduleCallback;
}
int32_t ModuleVideoRenderImpl::DeleteIncomingRenderStream(
const uint32_t streamId)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
if (item == _streamRenderMap.end())
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream doesn't exist", __FUNCTION__);
return -1;
}
delete item->second;
_ptrRenderer->DeleteIncomingRenderStream(streamId);
_streamRenderMap.erase(item);
return 0;
}
int32_t ModuleVideoRenderImpl::AddExternalRenderCallback(
const uint32_t streamId,
VideoRenderCallback* renderObject) {
CriticalSectionScoped cs(&_moduleCrit);
IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
if (item == _streamRenderMap.end())
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream doesn't exist", __FUNCTION__);
return -1;
}
if (item->second == NULL) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not get stream", __FUNCTION__);
return -1;
}
item->second->SetExternalCallback(renderObject);
return 0;
}
int32_t ModuleVideoRenderImpl::GetIncomingRenderStreamProperties(
const uint32_t streamId,
uint32_t& zOrder,
float& left,
float& top,
float& right,
float& bottom) const {
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
return _ptrRenderer->GetIncomingRenderStreamProperties(streamId, zOrder,
left, top, right,
bottom);
}
uint32_t ModuleVideoRenderImpl::GetNumIncomingRenderStreams() const
{
CriticalSectionScoped cs(&_moduleCrit);
return static_cast<uint32_t>(_streamRenderMap.size());
}
bool ModuleVideoRenderImpl::HasIncomingRenderStream(
const uint32_t streamId) const {
CriticalSectionScoped cs(&_moduleCrit);
return _streamRenderMap.find(streamId) != _streamRenderMap.end();
}
int32_t ModuleVideoRenderImpl::RegisterRawFrameCallback(
const uint32_t streamId,
VideoRenderCallback* callbackObj) {
return -1;
}
int32_t ModuleVideoRenderImpl::StartRender(const uint32_t streamId)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
// Start the stream
IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
if (item == _streamRenderMap.end())
{
return -1;
}
if (item->second->Start() == -1)
{
return -1;
}
// Start the HW renderer
if (_ptrRenderer->StartRender() == -1)
{
return -1;
}
return 0;
}
int32_t ModuleVideoRenderImpl::StopRender(const uint32_t streamId)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s(%d): No renderer", __FUNCTION__, streamId);
return -1;
}
// Stop the incoming stream
IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
if (item == _streamRenderMap.end())
{
return -1;
}
if (item->second->Stop() == -1)
{
return -1;
}
return 0;
}
int32_t ModuleVideoRenderImpl::ResetRender()
{
CriticalSectionScoped cs(&_moduleCrit);
int32_t ret = 0;
// Loop through all incoming streams and reset them
for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
it != _streamRenderMap.end();
++it) {
if (it->second->Reset() == -1)
ret = -1;
}
return ret;
}
RawVideoType ModuleVideoRenderImpl::PreferredVideoType() const
{
CriticalSectionScoped cs(&_moduleCrit);
if (_ptrRenderer == NULL)
{
return kVideoI420;
}
return _ptrRenderer->PerferedVideoType();
}
bool ModuleVideoRenderImpl::IsFullScreen()
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->FullScreen();
}
int32_t ModuleVideoRenderImpl::GetScreenResolution(
uint32_t& screenWidth,
uint32_t& screenHeight) const
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->GetScreenResolution(screenWidth, screenHeight);
}
uint32_t ModuleVideoRenderImpl::RenderFrameRate(
const uint32_t streamId)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->RenderFrameRate(streamId);
}
int32_t ModuleVideoRenderImpl::SetStreamCropping(
const uint32_t streamId,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->SetStreamCropping(streamId, left, top, right, bottom);
}
int32_t ModuleVideoRenderImpl::SetTransparentBackground(const bool enable)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->SetTransparentBackground(enable);
}
int32_t ModuleVideoRenderImpl::FullScreenRender(void* window, const bool enable)
{
return -1;
}
int32_t ModuleVideoRenderImpl::SetText(
const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left, const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
return _ptrRenderer->SetText(textId, text, textLength, textColorRef,
backgroundColorRef, left, top, right, bottom);
}
int32_t ModuleVideoRenderImpl::SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
return _ptrRenderer->SetBitmap(bitMap, pictureId, colorKey, left, top,
right, bottom);
}
int32_t ModuleVideoRenderImpl::SetExpectedRenderDelay(
uint32_t stream_id, int32_t delay_ms) {
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
IncomingVideoStreamMap::const_iterator item =
_streamRenderMap.find(stream_id);
if (item == _streamRenderMap.end()) {
// This stream doesn't exist
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s(%u, %d): stream doesn't exist", __FUNCTION__, stream_id,
delay_ms);
return -1;
}
assert(item->second != NULL);
return item->second->SetExpectedRenderDelay(delay_ms);
}
int32_t ModuleVideoRenderImpl::ConfigureRenderer(
const uint32_t streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->ConfigureRenderer(streamId, zOrder, left, top, right,
bottom);
}
} // namespace webrtc

View File

@ -0,0 +1,110 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
#include "webrtc/modules/video_render/video_render.h"
namespace webrtc {
// Class definitions
class IVideoRenderWin
{
public:
/**************************************************************************
*
* Constructor/destructor
*
***************************************************************************/
virtual ~IVideoRenderWin()
{
};
virtual int32_t Init() = 0;
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
virtual VideoRenderCallback
* CreateChannel(const uint32_t streamId,
const uint32_t zOrder,
const float left,
const float top,
const float right,
const float bottom) = 0;
virtual int32_t DeleteChannel(const uint32_t streamId) = 0;
virtual int32_t GetStreamSettings(const uint32_t channel,
const uint16_t streamId,
uint32_t& zOrder,
float& left, float& top,
float& right, float& bottom) = 0;
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
virtual int32_t StartRender() = 0;
virtual int32_t StopRender() = 0;
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual bool IsFullScreen() = 0;
virtual int32_t SetCropping(const uint32_t channel,
const uint16_t streamId,
const float left, const float top,
const float right, const float bottom) = 0;
virtual int32_t ConfigureRenderer(const uint32_t channel,
const uint16_t streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom) = 0;
virtual int32_t SetTransparentBackground(const bool enable) = 0;
virtual int32_t SetText(const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t colorText,
const uint32_t colorBg,
const float left, const float top,
const float rigth, const float bottom) = 0;
virtual int32_t SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left, const float top,
const float right, const float bottom) = 0;
virtual int32_t ChangeWindow(void* window) = 0;
virtual int32_t GetGraphicsMemory(uint64_t& totalMemory,
uint64_t& availableMemory) = 0;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,256 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
#include <memory>
#include "webrtc/modules/video_render/windows/i_video_render_win.h"
#include <d3d9.h>
#include <ddraw.h>
#include <Map>
// Added
#include "webrtc/base/platform_thread.h"
#include "webrtc/modules/video_render/video_render_defines.h"
#pragma comment(lib, "d3d9.lib") // located in DirectX SDK
namespace webrtc {
class CriticalSectionWrapper;
class EventTimerWrapper;
class Trace;
class D3D9Channel: public VideoRenderCallback
{
public:
D3D9Channel(LPDIRECT3DDEVICE9 pd3DDevice,
CriticalSectionWrapper* critSect, Trace* trace);
virtual ~D3D9Channel();
// Inherited from VideoRencerCallback, called from VideoAPI class.
// Called when the incomming frame size and/or number of streams in mix changes
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
// A new frame is delivered.
virtual int DeliverFrame(const VideoFrame& videoFrame);
virtual int32_t RenderFrame(const uint32_t streamId,
const VideoFrame& videoFrame);
// Called to check if the video frame is updated.
int IsUpdated(bool& isUpdated);
// Called after the video frame has been render to the screen
int RenderOffFrame();
// Called to get the texture that contains the video frame
LPDIRECT3DTEXTURE9 GetTexture();
// Called to get the texture(video frame) size
int GetTextureWidth();
int GetTextureHeight();
//
void SetStreamSettings(uint16_t streamId,
uint32_t zOrder,
float startWidth,
float startHeight,
float stopWidth,
float stopHeight);
int GetStreamSettings(uint16_t streamId,
uint32_t& zOrder,
float& startWidth,
float& startHeight,
float& stopWidth,
float& stopHeight);
int ReleaseTexture();
int RecreateTexture(LPDIRECT3DDEVICE9 pd3DDevice);
protected:
private:
//critical section passed from the owner
CriticalSectionWrapper* _critSect;
LPDIRECT3DDEVICE9 _pd3dDevice;
LPDIRECT3DTEXTURE9 _pTexture;
bool _bufferIsUpdated;
// the frame size
int _width;
int _height;
//sream settings
//TODO support multiple streams in one channel
uint16_t _streamId;
uint32_t _zOrder;
float _startWidth;
float _startHeight;
float _stopWidth;
float _stopHeight;
};
class VideoRenderDirect3D9: IVideoRenderWin
{
public:
VideoRenderDirect3D9(Trace* trace, HWND hWnd, bool fullScreen);
~VideoRenderDirect3D9();
public:
//IVideoRenderWin
/**************************************************************************
*
* Init
*
***************************************************************************/
virtual int32_t Init();
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
virtual VideoRenderCallback
* CreateChannel(const uint32_t streamId,
const uint32_t zOrder,
const float left,
const float top,
const float right,
const float bottom);
virtual int32_t DeleteChannel(const uint32_t streamId);
virtual int32_t GetStreamSettings(const uint32_t channel,
const uint16_t streamId,
uint32_t& zOrder,
float& left, float& top,
float& right, float& bottom);
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
virtual int32_t StartRender();
virtual int32_t StopRender();
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual bool IsFullScreen();
virtual int32_t SetCropping(const uint32_t channel,
const uint16_t streamId,
const float left, const float top,
const float right, const float bottom);
virtual int32_t ConfigureRenderer(const uint32_t channel,
const uint16_t streamId,
const unsigned int zOrder,
const float left, const float top,
const float right, const float bottom);
virtual int32_t SetTransparentBackground(const bool enable);
virtual int32_t ChangeWindow(void* window);
virtual int32_t GetGraphicsMemory(uint64_t& totalMemory,
uint64_t& availableMemory);
virtual int32_t SetText(const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t colorText,
const uint32_t colorBg,
const float left, const float top,
const float rigth, const float bottom);
virtual int32_t SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left, const float top,
const float right, const float bottom);
public:
// Get a channel by channel id
D3D9Channel* GetD3DChannel(int channel);
int UpdateRenderSurface();
protected:
// The thread rendering the screen
static bool ScreenUpdateThreadProc(void* obj);
bool ScreenUpdateProcess();
private:
// Init/close the d3d device
int InitDevice();
int CloseDevice();
// Transparent related functions
int SetTransparentColor(LPDIRECT3DTEXTURE9 pTexture,
DDCOLORKEY* transparentColorKey,
DWORD width,
DWORD height);
CriticalSectionWrapper& _refD3DCritsect;
Trace* _trace;
// TODO(pbos): Remove unique_ptr and use PlatformThread directly.
std::unique_ptr<rtc::PlatformThread> _screenUpdateThread;
EventTimerWrapper* _screenUpdateEvent;
HWND _hWnd;
bool _fullScreen;
RECT _originalHwndRect;
//FIXME we probably don't need this since all the information can be get from _d3dChannels
int _channel;
//Window size
UINT _winWidth;
UINT _winHeight;
// Device
LPDIRECT3D9 _pD3D; // Used to create the D3DDevice
LPDIRECT3DDEVICE9 _pd3dDevice; // Our rendering device
LPDIRECT3DVERTEXBUFFER9 _pVB; // Buffer to hold Vertices
LPDIRECT3DTEXTURE9 _pTextureLogo;
std::map<int, D3D9Channel*> _d3dChannels;
std::multimap<int, unsigned int> _d3dZorder;
// The position where the logo will be placed
float _logoLeft;
float _logoTop;
float _logoRight;
float _logoBottom;
typedef HRESULT (WINAPI *DIRECT3DCREATE9EX)(UINT SDKVersion, IDirect3D9Ex**);
LPDIRECT3DSURFACE9 _pd3dSurface;
DWORD GetVertexProcessingCaps();
int InitializeD3D(HWND hWnd, D3DPRESENT_PARAMETERS* pd3dpp);
D3DPRESENT_PARAMETERS _d3dpp;
int ResetDevice();
int UpdateVerticeBuffer(LPDIRECT3DVERTEXBUFFER9 pVB, int offset,
float startWidth, float startHeight,
float stopWidth, float stopHeight);
//code for providing graphics settings
DWORD _totalMemory;
DWORD _availableMemory;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_

View File

@ -0,0 +1,337 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/engine_configurations.h"
#include "webrtc/modules/video_render/windows/video_render_windows_impl.h"
#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
#include "webrtc/system_wrappers/include/trace.h"
#ifdef DIRECT3D9_RENDERING
#include "webrtc/modules/video_render/windows/video_render_direct3d9.h"
#endif
#include <tchar.h>
namespace webrtc {
VideoRenderWindowsImpl::VideoRenderWindowsImpl(const int32_t id,
const VideoRenderType videoRenderType, void* window, const bool fullscreen)
: _renderWindowsCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_prtWindow(window),
_fullscreen(fullscreen),
_renderMethod(kVideoRenderWinD3D9),
_ptrRendererWin(NULL) {
}
VideoRenderWindowsImpl::~VideoRenderWindowsImpl()
{
delete &_renderWindowsCritsect;
if (_ptrRendererWin)
{
delete _ptrRendererWin;
_ptrRendererWin = NULL;
}
}
int32_t VideoRenderWindowsImpl::Init()
{
// Create the win renderer
switch (_renderMethod)
{
case kVideoRenderWinD3D9:
{
#ifdef DIRECT3D9_RENDERING
VideoRenderDirect3D9* ptrRenderer;
ptrRenderer = new VideoRenderDirect3D9(NULL, (HWND) _prtWindow, _fullscreen);
if (ptrRenderer == NULL)
{
break;
}
_ptrRendererWin = reinterpret_cast<IVideoRenderWin*>(ptrRenderer);
#else
return NULL;
#endif //DIRECT3D9_RENDERING
}
break;
default:
break;
}
//Init renderer
if (_ptrRendererWin)
return _ptrRendererWin->Init();
else
return -1;
}
int32_t VideoRenderWindowsImpl::ChangeWindow(void* window)
{
CriticalSectionScoped cs(&_renderWindowsCritsect);
if (!_ptrRendererWin)
{
return -1;
}
else
{
return _ptrRendererWin->ChangeWindow(window);
}
}
VideoRenderCallback*
VideoRenderWindowsImpl::AddIncomingRenderStream(const uint32_t streamId,
const uint32_t zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_renderWindowsCritsect);
VideoRenderCallback* renderCallback = NULL;
if (!_ptrRendererWin)
{
}
else
{
renderCallback = _ptrRendererWin->CreateChannel(streamId, zOrder, left,
top, right, bottom);
}
return renderCallback;
}
int32_t VideoRenderWindowsImpl::DeleteIncomingRenderStream(
const uint32_t streamId)
{
CriticalSectionScoped cs(&_renderWindowsCritsect);
int32_t error = -1;
if (!_ptrRendererWin)
{
}
else
{
error = _ptrRendererWin->DeleteChannel(streamId);
}
return error;
}
int32_t VideoRenderWindowsImpl::GetIncomingRenderStreamProperties(
const uint32_t streamId,
uint32_t& zOrder,
float& left,
float& top,
float& right,
float& bottom) const
{
CriticalSectionScoped cs(&_renderWindowsCritsect);
zOrder = 0;
left = 0;
top = 0;
right = 0;
bottom = 0;
int32_t error = -1;
if (!_ptrRendererWin)
{
}
else
{
error = _ptrRendererWin->GetStreamSettings(streamId, 0, zOrder, left,
top, right, bottom);
}
return error;
}
int32_t VideoRenderWindowsImpl::StartRender()
{
CriticalSectionScoped cs(&_renderWindowsCritsect);
int32_t error = -1;
if (!_ptrRendererWin)
{
}
else
{
error = _ptrRendererWin->StartRender();
}
return error;
}
int32_t VideoRenderWindowsImpl::StopRender()
{
CriticalSectionScoped cs(&_renderWindowsCritsect);
int32_t error = -1;
if (!_ptrRendererWin)
{
}
else
{
error = _ptrRendererWin->StopRender();
}
return error;
}
VideoRenderType VideoRenderWindowsImpl::RenderType()
{
return kRenderWindows;
}
RawVideoType VideoRenderWindowsImpl::PerferedVideoType()
{
return kVideoI420;
}
bool VideoRenderWindowsImpl::FullScreen()
{
CriticalSectionScoped cs(&_renderWindowsCritsect);
bool fullscreen = false;
if (!_ptrRendererWin)
{
}
else
{
fullscreen = _ptrRendererWin->IsFullScreen();
}
return fullscreen;
}
int32_t VideoRenderWindowsImpl::GetGraphicsMemory(
uint64_t& totalGraphicsMemory,
uint64_t& availableGraphicsMemory) const
{
if (_ptrRendererWin)
{
return _ptrRendererWin->GetGraphicsMemory(totalGraphicsMemory,
availableGraphicsMemory);
}
totalGraphicsMemory = 0;
availableGraphicsMemory = 0;
return -1;
}
int32_t VideoRenderWindowsImpl::GetScreenResolution(
uint32_t& screenWidth,
uint32_t& screenHeight) const
{
CriticalSectionScoped cs(&_renderWindowsCritsect);
screenWidth = 0;
screenHeight = 0;
return 0;
}
uint32_t VideoRenderWindowsImpl::RenderFrameRate(
const uint32_t streamId)
{
CriticalSectionScoped cs(&_renderWindowsCritsect);
return 0;
}
int32_t VideoRenderWindowsImpl::SetStreamCropping(
const uint32_t streamId,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_renderWindowsCritsect);
int32_t error = -1;
if (!_ptrRendererWin)
{
}
else
{
error = _ptrRendererWin->SetCropping(streamId, 0, left, top, right,
bottom);
}
return error;
}
int32_t VideoRenderWindowsImpl::ConfigureRenderer(
const uint32_t streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_renderWindowsCritsect);
int32_t error = -1;
if (!_ptrRendererWin)
{
}
else
{
error = _ptrRendererWin->ConfigureRenderer(streamId, 0, zOrder, left,
top, right, bottom);
}
return error;
}
int32_t VideoRenderWindowsImpl::SetTransparentBackground(
const bool enable)
{
CriticalSectionScoped cs(&_renderWindowsCritsect);
int32_t error = -1;
if (!_ptrRendererWin)
{
}
else
{
error = _ptrRendererWin->SetTransparentBackground(enable);
}
return error;
}
int32_t VideoRenderWindowsImpl::SetText(
const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(&_renderWindowsCritsect);
int32_t error = -1;
if (!_ptrRendererWin)
{
}
else
{
error = _ptrRendererWin->SetText(textId, text, textLength,
textColorRef, backgroundColorRef,
left, top, right, bottom);
}
return error;
}
int32_t VideoRenderWindowsImpl::SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left, const float top,
const float right, const float bottom)
{
CriticalSectionScoped cs(&_renderWindowsCritsect);
int32_t error = -1;
if (!_ptrRendererWin)
{
}
else
{
error = _ptrRendererWin->SetBitmap(bitMap, pictureId, colorKey, left,
top, right, bottom);
}
return error;
}
} // namespace webrtc

View File

@ -0,0 +1,137 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_
#include <Winerror.h>
#include <dxdiag.h>
#include "webrtc/modules/video_render/i_video_render.h"
#include "webrtc/modules/video_render/windows/i_video_render_win.h"
namespace webrtc {
class CriticalSectionWrapper;
#define EXPAND(x) x, sizeof(x)/sizeof(TCHAR)
enum VideoRenderWinMethod {
kVideoRenderWinD3D9 = 0,
};
// Class definitions
class VideoRenderWindowsImpl: IVideoRender
{
public:
/*
* Constructor/destructor
*/
VideoRenderWindowsImpl(const int32_t id,
const VideoRenderType videoRenderType,
void* window, const bool fullscreen);
virtual ~VideoRenderWindowsImpl();
virtual int32_t Init();
virtual int32_t ChangeWindow(void* window);
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
virtual VideoRenderCallback
* AddIncomingRenderStream(const uint32_t streamId,
const uint32_t zOrder,
const float left, const float top,
const float right, const float bottom);
virtual int32_t
DeleteIncomingRenderStream(const uint32_t streamId);
virtual int32_t
GetIncomingRenderStreamProperties(const uint32_t streamId,
uint32_t& zOrder,
float& left, float& top,
float& right, float& bottom) const;
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
virtual int32_t StartRender();
virtual int32_t StopRender();
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual VideoRenderType RenderType();
virtual RawVideoType PerferedVideoType();
virtual bool FullScreen();
virtual int32_t
GetGraphicsMemory(uint64_t& totalGraphicsMemory,
uint64_t& availableGraphicsMemory) const;
virtual int32_t
GetScreenResolution(uint32_t& screenWidth,
uint32_t& screenHeight) const;
virtual uint32_t RenderFrameRate(const uint32_t streamId);
virtual int32_t SetStreamCropping(const uint32_t streamId,
const float left, const float top,
const float right, const float bottom);
virtual int32_t ConfigureRenderer(const uint32_t streamId,
const unsigned int zOrder,
const float left, const float top,
const float right, const float bottom);
virtual int32_t SetTransparentBackground(const bool enable);
virtual int32_t SetText(const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left, const float top,
const float right, const float bottom);
virtual int32_t SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left, const float top,
const float right, const float bottom);
private:
CriticalSectionWrapper& _renderWindowsCritsect;
void* _prtWindow;
bool _fullscreen;
VideoRenderWinMethod _renderMethod;
IVideoRenderWin* _ptrRendererWin;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_

View File

@ -304,6 +304,7 @@
'<(webrtc_root)/base/base.gyp:rtc_base_approved',
'<(webrtc_root)/common.gyp:webrtc_common',
'<(webrtc_root)/modules/modules.gyp:media_file',
'<(webrtc_root)/modules/modules.gyp:video_render',
'<(webrtc_root)/webrtc.gyp:webrtc',
'rtp_test_utils',
'test_support',

View File

@ -71,6 +71,7 @@ source_set("video") {
"../modules/video_capture:video_capture_module",
"../modules/video_coding",
"../modules/video_processing",
"../modules/video_render:video_render_module",
"../system_wrappers",
"../voice_engine",
]

View File

@ -12,6 +12,7 @@ include_rules = [
"+webrtc/modules/video_coding",
"+webrtc/modules/video_capture",
"+webrtc/modules/video_processing",
"+webrtc/modules/video_render",
"+webrtc/system_wrappers",
"+webrtc/voice_engine",
]

View File

@ -16,6 +16,7 @@
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_capture/video_capture_factory.h"
#include "webrtc/modules/video_processing/include/video_processing.h"
#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/video/overuse_frame_detector.h"
#include "webrtc/video/send_statistics_proxy.h"
#include "webrtc/video/vie_encoder.h"

View File

@ -18,6 +18,7 @@
#include "webrtc/call/transport_adapter.h"
#include "webrtc/common_video/include/incoming_video_stream.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/modules/video_coding/video_coding_impl.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "webrtc/video/encoded_frame_callback_adapter.h"

View File

@ -25,6 +25,7 @@
#include "webrtc/modules/utility/include/process_thread.h"
#include "webrtc/modules/video_coding/video_coding_impl.h"
#include "webrtc/modules/video_processing/include/video_processing.h"
#include "webrtc/modules/video_render/video_render_defines.h"
#include "webrtc/system_wrappers/include/metrics.h"
#include "webrtc/video/call_stats.h"
#include "webrtc/video/payload_router.h"

View File

@ -17,6 +17,7 @@
'<(webrtc_root)/modules/modules.gyp:rtp_rtcp',
'<(webrtc_root)/modules/modules.gyp:video_capture_module',
'<(webrtc_root)/modules/modules.gyp:video_processing',
'<(webrtc_root)/modules/modules.gyp:video_render_module',
'<(webrtc_root)/modules/modules.gyp:webrtc_utility',
'<(webrtc_root)/modules/modules.gyp:webrtc_video_coding',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',

View File

@ -148,6 +148,7 @@
['build_with_chromium==1', {
'dependencies': [
'<(webrtc_root)/modules/modules.gyp:video_capture',
'<(webrtc_root)/modules/modules.gyp:video_render',
],
}],
],

View File

@ -57,6 +57,7 @@
],
'dependencies': [
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(webrtc_root)/modules/modules.gyp:video_render',
'<(webrtc_root)/modules/modules.gyp:video_capture_module_internal_impl',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
'webrtc',
@ -143,6 +144,7 @@
'test/test.gyp:test_common',
'test/test.gyp:test_renderer',
'<(webrtc_root)/modules/modules.gyp:video_capture',
'<(webrtc_root)/modules/modules.gyp:video_render',
'<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers_default',
'webrtc',
],
@ -182,6 +184,7 @@
'<(webrtc_root)/common.gyp:webrtc_common',
'<(webrtc_root)/modules/modules.gyp:rtp_rtcp',
'<(webrtc_root)/modules/modules.gyp:video_capture',
'<(webrtc_root)/modules/modules.gyp:video_render',
'<(webrtc_root)/test/test.gyp:channel_transport',
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine',
'test/metrics.gyp:metrics',