From 0190367cea6f94fc0bff470cfbf9313d9ac0ce94 Mon Sep 17 00:00:00 2001 From: kjellander Date: Wed, 27 Apr 2016 08:56:50 -0700 Subject: [PATCH] Revert of Delete video_render module. (patchset #12 id:220001 of https://codereview.webrtc.org/1912143002/ ) Reason for revert: This breaks every buildbot in chromium.webrtc.fyi and I don't see any roll in progress to address this (and I don't see how that would be possible either). Usage in Chrome: https://code.google.com/p/chromium/codesearch#search/&q=modules.gyp%3Avideo_render&sq=package:chromium&type=cs Example failures: https://build.chromium.org/p/chromium.webrtc.fyi/builders/Linux%20Builder/builds/5420 https://build.chromium.org/p/chromium.webrtc.fyi/builders/Win%20Builder/builds/4526 I think it's fine to delete our video_render_module_internal_impl target and those files, but video_render target needs to remain. Original issue's description: > Delete video_render module. > > BUG=webrtc:5817 > > Committed: https://crrev.com/97cfd1ec05d07ef233356e57f7aa4b028b74ffba > Cr-Commit-Position: refs/heads/master@{#12526} TBR=mflodman@webrtc.org,pbos@webrtc.org,nisse@webrtc.org # Skipping CQ checks because original CL landed less than 1 days ago. NOPRESUBMIT=true NOTREECHECKS=true NOTRY=true BUG=webrtc:5817 Review-Url: https://codereview.webrtc.org/1923613003 Cr-Commit-Position: refs/heads/master@{#12534} --- talk/app/webrtc/legacy_objc_api.gyp | 2 - webrtc/BUILD.gn | 6 +- webrtc/api/api.gyp | 2 +- webrtc/api/java/jni/peerconnection_jni.cc | 1 + webrtc/media/media.gyp | 3 + webrtc/modules/modules.gyp | 14 + webrtc/modules/modules_java.gyp | 9 + webrtc/modules/modules_java_chromium.gyp | 8 + webrtc/modules/video_render/BUILD.gn | 178 ++ webrtc/modules/video_render/DEPS | 5 + webrtc/modules/video_render/OWNERS | 12 + .../webrtc/videoengine/ViEAndroidGLES20.java | 371 +++ .../org/webrtc/videoengine/ViERenderer.java | 29 + .../videoengine/ViESurfaceRenderer.java | 185 ++ .../android/video_render_android_impl.cc | 316 +++ .../android/video_render_android_impl.h | 154 ++ .../video_render_android_native_opengl2.cc | 450 ++++ .../video_render_android_native_opengl2.h | 95 + .../video_render_android_surface_view.cc | 474 ++++ .../video_render_android_surface_view.h | 83 + .../android/video_render_opengles20.cc | 397 ++++ .../android/video_render_opengles20.h | 57 + .../external/video_render_external_impl.cc | 195 ++ .../external/video_render_external_impl.h | 128 ++ webrtc/modules/video_render/i_video_render.h | 129 ++ webrtc/modules/video_render/ios/open_gles20.h | 64 + .../modules/video_render/ios/open_gles20.mm | 330 +++ .../ios/video_render_ios_channel.h | 45 + .../ios/video_render_ios_channel.mm | 61 + .../ios/video_render_ios_gles20.h | 87 + .../ios/video_render_ios_gles20.mm | 285 +++ .../video_render/ios/video_render_ios_impl.h | 105 + .../video_render/ios/video_render_ios_impl.mm | 170 ++ .../video_render/ios/video_render_ios_view.h | 34 + .../video_render/ios/video_render_ios_view.mm | 163 ++ .../linux/video_render_linux_impl.cc | 261 +++ .../linux/video_render_linux_impl.h | 128 ++ .../video_render/linux/video_x11_channel.cc | 315 +++ .../video_render/linux/video_x11_channel.h | 96 + .../video_render/linux/video_x11_render.cc | 153 ++ .../video_render/linux/video_x11_render.h | 58 + .../mac/cocoa_full_screen_window.h | 33 + .../mac/cocoa_full_screen_window.mm | 87 + .../video_render/mac/cocoa_render_view.h | 32 + .../video_render/mac/cocoa_render_view.mm | 55 + .../video_render/mac/video_render_agl.cc | 1987 +++++++++++++++++ .../video_render/mac/video_render_agl.h | 178 ++ .../mac/video_render_mac_carbon_impl.cc | 280 +++ .../mac/video_render_mac_carbon_impl.h | 146 ++ .../mac/video_render_mac_cocoa_impl.h | 141 ++ .../mac/video_render_mac_cocoa_impl.mm | 253 +++ .../video_render/mac/video_render_nsopengl.h | 192 ++ .../video_render/mac/video_render_nsopengl.mm | 1247 +++++++++++ .../test/testAPI/renderStartImage.bmp | Bin 0 -> 304182 bytes .../video_render/test/testAPI/testAPI.cc | 645 ++++++ .../video_render/test/testAPI/testAPI.h | 18 + .../test/testAPI/testAPI_android.cc | 15 + .../video_render/test/testAPI/testAPI_mac.mm | 69 + webrtc/modules/video_render/video_render.gypi | 218 ++ webrtc/modules/video_render/video_render.h | 255 +++ .../video_render/video_render_defines.h | 70 + .../modules/video_render/video_render_impl.cc | 550 +++++ .../modules/video_render/video_render_impl.h | 208 ++ .../video_render/video_render_internal.h | 27 + .../video_render_internal_impl.cc | 773 +++++++ .../video_render/windows/i_video_render_win.h | 110 + .../windows/video_render_direct3d9.cc | 1160 ++++++++++ .../windows/video_render_direct3d9.h | 256 +++ .../windows/video_render_windows_impl.cc | 337 +++ .../windows/video_render_windows_impl.h | 137 ++ webrtc/test/test.gyp | 1 + webrtc/video/BUILD.gn | 1 + webrtc/video/DEPS | 1 + webrtc/video/video_capture_input.cc | 1 + webrtc/video/video_receive_stream.h | 1 + webrtc/video/vie_channel.cc | 1 + webrtc/video/webrtc_video.gypi | 1 + webrtc/webrtc.gyp | 1 + webrtc/webrtc_tests.gypi | 3 + 79 files changed, 15144 insertions(+), 4 deletions(-) create mode 100644 webrtc/modules/video_render/BUILD.gn create mode 100644 webrtc/modules/video_render/DEPS create mode 100644 webrtc/modules/video_render/OWNERS create mode 100644 webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViEAndroidGLES20.java create mode 100644 webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViERenderer.java create mode 100644 webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViESurfaceRenderer.java create mode 100644 webrtc/modules/video_render/android/video_render_android_impl.cc create mode 100644 webrtc/modules/video_render/android/video_render_android_impl.h create mode 100644 webrtc/modules/video_render/android/video_render_android_native_opengl2.cc create mode 100644 webrtc/modules/video_render/android/video_render_android_native_opengl2.h create mode 100644 webrtc/modules/video_render/android/video_render_android_surface_view.cc create mode 100644 webrtc/modules/video_render/android/video_render_android_surface_view.h create mode 100644 webrtc/modules/video_render/android/video_render_opengles20.cc create mode 100644 webrtc/modules/video_render/android/video_render_opengles20.h create mode 100644 webrtc/modules/video_render/external/video_render_external_impl.cc create mode 100644 webrtc/modules/video_render/external/video_render_external_impl.h create mode 100644 webrtc/modules/video_render/i_video_render.h create mode 100644 webrtc/modules/video_render/ios/open_gles20.h create mode 100644 webrtc/modules/video_render/ios/open_gles20.mm create mode 100644 webrtc/modules/video_render/ios/video_render_ios_channel.h create mode 100644 webrtc/modules/video_render/ios/video_render_ios_channel.mm create mode 100644 webrtc/modules/video_render/ios/video_render_ios_gles20.h create mode 100644 webrtc/modules/video_render/ios/video_render_ios_gles20.mm create mode 100644 webrtc/modules/video_render/ios/video_render_ios_impl.h create mode 100644 webrtc/modules/video_render/ios/video_render_ios_impl.mm create mode 100644 webrtc/modules/video_render/ios/video_render_ios_view.h create mode 100644 webrtc/modules/video_render/ios/video_render_ios_view.mm create mode 100644 webrtc/modules/video_render/linux/video_render_linux_impl.cc create mode 100644 webrtc/modules/video_render/linux/video_render_linux_impl.h create mode 100644 webrtc/modules/video_render/linux/video_x11_channel.cc create mode 100644 webrtc/modules/video_render/linux/video_x11_channel.h create mode 100644 webrtc/modules/video_render/linux/video_x11_render.cc create mode 100644 webrtc/modules/video_render/linux/video_x11_render.h create mode 100644 webrtc/modules/video_render/mac/cocoa_full_screen_window.h create mode 100644 webrtc/modules/video_render/mac/cocoa_full_screen_window.mm create mode 100644 webrtc/modules/video_render/mac/cocoa_render_view.h create mode 100644 webrtc/modules/video_render/mac/cocoa_render_view.mm create mode 100644 webrtc/modules/video_render/mac/video_render_agl.cc create mode 100644 webrtc/modules/video_render/mac/video_render_agl.h create mode 100644 webrtc/modules/video_render/mac/video_render_mac_carbon_impl.cc create mode 100644 webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h create mode 100644 webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h create mode 100644 webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm create mode 100644 webrtc/modules/video_render/mac/video_render_nsopengl.h create mode 100644 webrtc/modules/video_render/mac/video_render_nsopengl.mm create mode 100644 webrtc/modules/video_render/test/testAPI/renderStartImage.bmp create mode 100644 webrtc/modules/video_render/test/testAPI/testAPI.cc create mode 100644 webrtc/modules/video_render/test/testAPI/testAPI.h create mode 100644 webrtc/modules/video_render/test/testAPI/testAPI_android.cc create mode 100644 webrtc/modules/video_render/test/testAPI/testAPI_mac.mm create mode 100644 webrtc/modules/video_render/video_render.gypi create mode 100644 webrtc/modules/video_render/video_render.h create mode 100644 webrtc/modules/video_render/video_render_defines.h create mode 100644 webrtc/modules/video_render/video_render_impl.cc create mode 100644 webrtc/modules/video_render/video_render_impl.h create mode 100644 webrtc/modules/video_render/video_render_internal.h create mode 100644 webrtc/modules/video_render/video_render_internal_impl.cc create mode 100644 webrtc/modules/video_render/windows/i_video_render_win.h create mode 100644 webrtc/modules/video_render/windows/video_render_direct3d9.cc create mode 100644 webrtc/modules/video_render/windows/video_render_direct3d9.h create mode 100644 webrtc/modules/video_render/windows/video_render_windows_impl.cc create mode 100644 webrtc/modules/video_render/windows/video_render_windows_impl.h diff --git a/talk/app/webrtc/legacy_objc_api.gyp b/talk/app/webrtc/legacy_objc_api.gyp index c55e74c29d..f681906c9f 100755 --- a/talk/app/webrtc/legacy_objc_api.gyp +++ b/talk/app/webrtc/legacy_objc_api.gyp @@ -157,8 +157,6 @@ 'OTHER_LDFLAGS': [ '-framework CoreGraphics', '-framework GLKit', - '-framework OpenGLES', - '-framework QuartzCore', ], }, }, diff --git a/webrtc/BUILD.gn b/webrtc/BUILD.gn index 3ae09da2c9..9be2aa0c01 100644 --- a/webrtc/BUILD.gn +++ b/webrtc/BUILD.gn @@ -204,7 +204,10 @@ source_set("webrtc") { ] if (build_with_chromium) { - deps += [ "modules/video_capture" ] + deps += [ + "modules/video_capture", + "modules/video_render", + ] } if (rtc_enable_protobuf) { @@ -219,6 +222,7 @@ if (!build_with_chromium) { deps = [ ":webrtc", "modules/video_capture:video_capture_internal_impl", + "modules/video_render:video_render_internal_impl", "test", ] } diff --git a/webrtc/api/api.gyp b/webrtc/api/api.gyp index 1267595327..0341741e72 100644 --- a/webrtc/api/api.gyp +++ b/webrtc/api/api.gyp @@ -115,7 +115,7 @@ 'java/android', '<(webrtc_base_dir)/java/src', '<(webrtc_modules_dir)/audio_device/android/java/src', - + '<(webrtc_modules_dir)/video_render/android/java/src', ], }, 'includes': ['../../build/java.gypi'], diff --git a/webrtc/api/java/jni/peerconnection_jni.cc b/webrtc/api/java/jni/peerconnection_jni.cc index 228e5e1ab1..c5154c95d2 100644 --- a/webrtc/api/java/jni/peerconnection_jni.cc +++ b/webrtc/api/java/jni/peerconnection_jni.cc @@ -70,6 +70,7 @@ #include "webrtc/media/devices/videorendererfactory.h" #include "webrtc/media/engine/webrtcvideodecoderfactory.h" #include "webrtc/media/engine/webrtcvideoencoderfactory.h" +#include "webrtc/modules/video_render/video_render_internal.h" #include "webrtc/system_wrappers/include/field_trial_default.h" #include "webrtc/system_wrappers/include/logcat_trace_context.h" #include "webrtc/system_wrappers/include/trace.h" diff --git a/webrtc/media/media.gyp b/webrtc/media/media.gyp index 230d33ab38..5c4a24891f 100644 --- a/webrtc/media/media.gyp +++ b/webrtc/media/media.gyp @@ -15,6 +15,7 @@ 'dependencies': [ '<(webrtc_root)/base/base.gyp:rtc_base_approved', '<(webrtc_root)/common.gyp:webrtc_common', + '<(webrtc_root)/modules/modules.gyp:video_render_module', '<(webrtc_root)/webrtc.gyp:webrtc', '<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine', '<(webrtc_root)/system_wrappers/system_wrappers.gyp:metrics_default', @@ -132,6 +133,7 @@ ['build_with_chromium==1', { 'dependencies': [ '<(webrtc_root)/modules/modules.gyp:video_capture', + '<(webrtc_root)/modules/modules.gyp:video_render', ], }, { 'defines': [ @@ -146,6 +148,7 @@ }, 'dependencies': [ '<(webrtc_root)/modules/modules.gyp:video_capture_module_internal_impl', + '<(webrtc_root)/modules/modules.gyp:video_render_module_internal_impl', ], }], ['OS=="linux" and use_gtk==1', { diff --git a/webrtc/modules/modules.gyp b/webrtc/modules/modules.gyp index de472720da..502454cef8 100644 --- a/webrtc/modules/modules.gyp +++ b/webrtc/modules/modules.gyp @@ -26,6 +26,7 @@ 'video_coding/video_coding.gypi', 'video_capture/video_capture.gypi', 'video_processing/video_processing.gypi', + 'video_render/video_render.gypi', ], 'conditions': [ ['include_tests==1', { @@ -780,6 +781,19 @@ 'modules_unittests.isolate', ], }, + { + 'target_name': 'video_render_tests_run', + 'type': 'none', + 'dependencies': [ + 'video_render_tests', + ], + 'includes': [ + '../build/isolate.gypi', + ], + 'sources': [ + 'video_render_tests.isolate', + ], + }, ], }], ], diff --git a/webrtc/modules/modules_java.gyp b/webrtc/modules/modules_java.gyp index 2a72fb30a4..060de2a067 100644 --- a/webrtc/modules/modules_java.gyp +++ b/webrtc/modules/modules_java.gyp @@ -18,5 +18,14 @@ 'includes': [ '../../build/java.gypi' ], }, # audio_device_module_java + { + 'target_name': 'video_render_module_java', + 'type': 'none', + 'variables': { + 'java_in_dir': 'video_render/android/java', + 'additional_src_dirs': [ '../base/java/src', ], + }, + 'includes': [ '../../build/java.gypi' ], + }, # video_render_module_java ], } diff --git a/webrtc/modules/modules_java_chromium.gyp b/webrtc/modules/modules_java_chromium.gyp index ebc53d60ff..32d2d8d24e 100644 --- a/webrtc/modules/modules_java_chromium.gyp +++ b/webrtc/modules/modules_java_chromium.gyp @@ -16,5 +16,13 @@ }, 'includes': [ '../../../build/java.gypi' ], }, # audio_device_module_java + { + 'target_name': 'video_render_module_java', + 'type': 'none', + 'variables': { + 'java_in_dir': 'video_render/android/java', + }, + 'includes': [ '../../../build/java.gypi' ], + }, # video_render_module_java ], } diff --git a/webrtc/modules/video_render/BUILD.gn b/webrtc/modules/video_render/BUILD.gn new file mode 100644 index 0000000000..0771bd7080 --- /dev/null +++ b/webrtc/modules/video_render/BUILD.gn @@ -0,0 +1,178 @@ +# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../build/webrtc.gni") + +source_set("video_render_module") { + sources = [ + "external/video_render_external_impl.cc", + "external/video_render_external_impl.h", + "i_video_render.h", + "video_render.h", + "video_render_defines.h", + "video_render_impl.h", + ] + + deps = [ + "../..:webrtc_common", + "../../common_video", + "../../system_wrappers", + "../utility", + ] + + configs += [ "../..:common_config" ] + public_configs = [ "../..:common_inherited_config" ] + + if (is_clang) { + # Suppress warnings from Chrome's Clang plugins. + # See http://code.google.com/p/webrtc/issues/detail?id=163 for details. + configs -= [ "//build/config/clang:find_bad_constructs" ] + } +} + +source_set("video_render") { + sources = [ + "video_render_impl.cc", + ] + deps = [ + ":video_render_module", + "../../system_wrappers", + ] + + configs += [ "../..:common_config" ] + public_configs = [ "../..:common_inherited_config" ] + + if (is_clang) { + # Suppress warnings from Chrome's Clang plugins. + # See http://code.google.com/p/webrtc/issues/detail?id=163 for details. + configs -= [ "//build/config/clang:find_bad_constructs" ] + } +} + +if (!build_with_chromium) { + config("video_render_internal_impl_config") { + if (is_ios) { + libs = [ + "OpenGLES.framework", + "QuartzCore.framework", + ] + } + } + + source_set("video_render_internal_impl") { + libs = [] + sources = [ + "video_render_internal_impl.cc", + ] + deps = [ + ":video_render_module", + "../../system_wrappers", + ] + + if (is_linux) { + sources += [ + "linux/video_render_linux_impl.cc", + "linux/video_render_linux_impl.h", + "linux/video_x11_channel.cc", + "linux/video_x11_channel.h", + "linux/video_x11_render.cc", + "linux/video_x11_render.h", + ] + + deps += [ "../..:webrtc_common" ] + + libs += [ "Xext" ] + } + if (is_mac) { + sources += [ + "mac/cocoa_full_screen_window.h", + "mac/cocoa_full_screen_window.mm", + "mac/cocoa_render_view.h", + "mac/cocoa_render_view.mm", + "mac/video_render_agl.cc", + "mac/video_render_agl.h", + "mac/video_render_mac_carbon_impl.cc", + "mac/video_render_mac_carbon_impl.h", + "mac/video_render_mac_cocoa_impl.h", + "mac/video_render_mac_cocoa_impl.mm", + "mac/video_render_nsopengl.h", + "mac/video_render_nsopengl.mm", + ] + + libs += [ + "CoreVideo.framework", + "QTKit.framework", + ] + } + if (is_win) { + sources += [ + "windows/i_video_render_win.h", + "windows/video_render_direct3d9.cc", + "windows/video_render_direct3d9.h", + "windows/video_render_windows_impl.cc", + "windows/video_render_windows_impl.h", + ] + + directxsdk_exists = + exec_script("//build/dir_exists.py", + [ rebase_path("//third_party/directxsdk/files", + root_build_dir) ], + "trim string") == "True" + if (directxsdk_exists) { + directxsdk_path = "//third_party/directxsdk/files" + } else { + directxsdk_path = + exec_script("../../build/find_directx_sdk.py", [], "trim string") + } + include_dirs = [ directxsdk_path + "/Include" ] + } + if (is_android) { + sources += [ + "android/video_render_android_impl.cc", + "android/video_render_android_impl.h", + "android/video_render_android_native_opengl2.cc", + "android/video_render_android_native_opengl2.h", + "android/video_render_android_surface_view.cc", + "android/video_render_android_surface_view.h", + "android/video_render_opengles20.cc", + "android/video_render_opengles20.h", + ] + + libs += [ "GLESv2" ] + } + if (is_ios) { + sources += [ + "ios/open_gles20.h", + "ios/open_gles20.mm", + "ios/video_render_ios_channel.h", + "ios/video_render_ios_channel.mm", + "ios/video_render_ios_gles20.h", + "ios/video_render_ios_gles20.mm", + "ios/video_render_ios_impl.h", + "ios/video_render_ios_impl.mm", + "ios/video_render_ios_view.h", + "ios/video_render_ios_view.mm", + ] + + deps += [ "../..:webrtc_common" ] + + cflags = [ "-fobjc-arc" ] # CLANG_ENABLE_OBJC_ARC = YES. + } + + all_dependent_configs = [ ":video_render_internal_impl_config" ] + + configs += [ "../..:common_config" ] + public_configs = [ "../..:common_inherited_config" ] + + if (is_clang) { + # Suppress warnings from Chrome's Clang plugins. + # See http://code.google.com/p/webrtc/issues/detail?id=163 for details. + configs -= [ "//build/config/clang:find_bad_constructs" ] + } + } +} diff --git a/webrtc/modules/video_render/DEPS b/webrtc/modules/video_render/DEPS new file mode 100644 index 0000000000..58ae9fe714 --- /dev/null +++ b/webrtc/modules/video_render/DEPS @@ -0,0 +1,5 @@ +include_rules = [ + "+webrtc/base", + "+webrtc/common_video", + "+webrtc/system_wrappers", +] diff --git a/webrtc/modules/video_render/OWNERS b/webrtc/modules/video_render/OWNERS new file mode 100644 index 0000000000..3aaa5328f5 --- /dev/null +++ b/webrtc/modules/video_render/OWNERS @@ -0,0 +1,12 @@ +mflodman@webrtc.org +perkj@webrtc.org +tkchin@webrtc.org + +per-file *.isolate=kjellander@webrtc.org + +# These are for the common case of adding or renaming files. If you're doing +# structural changes, please get a review from a reviewer in this file. +per-file *.gyp=* +per-file *.gypi=* + +per-file BUILD.gn=kjellander@webrtc.org diff --git a/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViEAndroidGLES20.java b/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViEAndroidGLES20.java new file mode 100644 index 0000000000..fa756ba67f --- /dev/null +++ b/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViEAndroidGLES20.java @@ -0,0 +1,371 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.videoengine; + +import java.util.concurrent.locks.ReentrantLock; + +import javax.microedition.khronos.egl.EGL10; +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.egl.EGLContext; +import javax.microedition.khronos.egl.EGLDisplay; +import javax.microedition.khronos.opengles.GL10; + +import android.app.ActivityManager; +import android.content.Context; +import android.content.pm.ConfigurationInfo; +import android.graphics.PixelFormat; +import android.opengl.GLSurfaceView; + +import org.webrtc.Logging; + +public class ViEAndroidGLES20 extends GLSurfaceView + implements GLSurfaceView.Renderer { + private static String TAG = "WEBRTC-JR"; + private static final boolean DEBUG = false; + // True if onSurfaceCreated has been called. + private boolean surfaceCreated = false; + private boolean openGLCreated = false; + // True if NativeFunctionsRegistered has been called. + private boolean nativeFunctionsRegisted = false; + private ReentrantLock nativeFunctionLock = new ReentrantLock(); + // Address of Native object that will do the drawing. + private long nativeObject = 0; + private int viewWidth = 0; + private int viewHeight = 0; + + public static boolean UseOpenGL2(Object renderWindow) { + return ViEAndroidGLES20.class.isInstance(renderWindow); + } + + public ViEAndroidGLES20(Context context) { + super(context); + init(false, 0, 0); + } + + public ViEAndroidGLES20(Context context, boolean translucent, + int depth, int stencil) { + super(context); + init(translucent, depth, stencil); + } + + private void init(boolean translucent, int depth, int stencil) { + + // By default, GLSurfaceView() creates a RGB_565 opaque surface. + // If we want a translucent one, we should change the surface's + // format here, using PixelFormat.TRANSLUCENT for GL Surfaces + // is interpreted as any 32-bit surface with alpha by SurfaceFlinger. + if (translucent) { + this.getHolder().setFormat(PixelFormat.TRANSLUCENT); + } + + // Setup the context factory for 2.0 rendering. + // See ContextFactory class definition below + setEGLContextFactory(new ContextFactory()); + + // We need to choose an EGLConfig that matches the format of + // our surface exactly. This is going to be done in our + // custom config chooser. See ConfigChooser class definition + // below. + setEGLConfigChooser( translucent ? + new ConfigChooser(8, 8, 8, 8, depth, stencil) : + new ConfigChooser(5, 6, 5, 0, depth, stencil) ); + + // Set the renderer responsible for frame rendering + this.setRenderer(this); + this.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); + } + + private static class ContextFactory implements GLSurfaceView.EGLContextFactory { + private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098; + public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) { + Logging.w(TAG, "creating OpenGL ES 2.0 context"); + checkEglError("Before eglCreateContext", egl); + int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE }; + EGLContext context = egl.eglCreateContext(display, eglConfig, + EGL10.EGL_NO_CONTEXT, attrib_list); + checkEglError("After eglCreateContext", egl); + return context; + } + + public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) { + egl.eglDestroyContext(display, context); + } + } + + private static void checkEglError(String prompt, EGL10 egl) { + int error; + while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) { + Logging.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error)); + } + } + + private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser { + + public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) { + mRedSize = r; + mGreenSize = g; + mBlueSize = b; + mAlphaSize = a; + mDepthSize = depth; + mStencilSize = stencil; + } + + // This EGL config specification is used to specify 2.0 rendering. + // We use a minimum size of 4 bits for red/green/blue, but will + // perform actual matching in chooseConfig() below. + private static int EGL_OPENGL_ES2_BIT = 4; + private static int[] s_configAttribs2 = + { + EGL10.EGL_RED_SIZE, 4, + EGL10.EGL_GREEN_SIZE, 4, + EGL10.EGL_BLUE_SIZE, 4, + EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, + EGL10.EGL_NONE + }; + + public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) { + + // Get the number of minimally matching EGL configurations + int[] num_config = new int[1]; + egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config); + + int numConfigs = num_config[0]; + + if (numConfigs <= 0) { + throw new IllegalArgumentException("No configs match configSpec"); + } + + // Allocate then read the array of minimally matching EGL configs + EGLConfig[] configs = new EGLConfig[numConfigs]; + egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config); + + if (DEBUG) { + printConfigs(egl, display, configs); + } + // Now return the "best" one + return chooseConfig(egl, display, configs); + } + + public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, + EGLConfig[] configs) { + for(EGLConfig config : configs) { + int d = findConfigAttrib(egl, display, config, + EGL10.EGL_DEPTH_SIZE, 0); + int s = findConfigAttrib(egl, display, config, + EGL10.EGL_STENCIL_SIZE, 0); + + // We need at least mDepthSize and mStencilSize bits + if (d < mDepthSize || s < mStencilSize) + continue; + + // We want an *exact* match for red/green/blue/alpha + int r = findConfigAttrib(egl, display, config, + EGL10.EGL_RED_SIZE, 0); + int g = findConfigAttrib(egl, display, config, + EGL10.EGL_GREEN_SIZE, 0); + int b = findConfigAttrib(egl, display, config, + EGL10.EGL_BLUE_SIZE, 0); + int a = findConfigAttrib(egl, display, config, + EGL10.EGL_ALPHA_SIZE, 0); + + if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize) + return config; + } + return null; + } + + private int findConfigAttrib(EGL10 egl, EGLDisplay display, + EGLConfig config, int attribute, int defaultValue) { + + if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) { + return mValue[0]; + } + return defaultValue; + } + + private void printConfigs(EGL10 egl, EGLDisplay display, + EGLConfig[] configs) { + int numConfigs = configs.length; + Logging.w(TAG, String.format("%d configurations", numConfigs)); + for (int i = 0; i < numConfigs; i++) { + Logging.w(TAG, String.format("Configuration %d:\n", i)); + printConfig(egl, display, configs[i]); + } + } + + private void printConfig(EGL10 egl, EGLDisplay display, + EGLConfig config) { + int[] attributes = { + EGL10.EGL_BUFFER_SIZE, + EGL10.EGL_ALPHA_SIZE, + EGL10.EGL_BLUE_SIZE, + EGL10.EGL_GREEN_SIZE, + EGL10.EGL_RED_SIZE, + EGL10.EGL_DEPTH_SIZE, + EGL10.EGL_STENCIL_SIZE, + EGL10.EGL_CONFIG_CAVEAT, + EGL10.EGL_CONFIG_ID, + EGL10.EGL_LEVEL, + EGL10.EGL_MAX_PBUFFER_HEIGHT, + EGL10.EGL_MAX_PBUFFER_PIXELS, + EGL10.EGL_MAX_PBUFFER_WIDTH, + EGL10.EGL_NATIVE_RENDERABLE, + EGL10.EGL_NATIVE_VISUAL_ID, + EGL10.EGL_NATIVE_VISUAL_TYPE, + 0x3030, // EGL10.EGL_PRESERVED_RESOURCES, + EGL10.EGL_SAMPLES, + EGL10.EGL_SAMPLE_BUFFERS, + EGL10.EGL_SURFACE_TYPE, + EGL10.EGL_TRANSPARENT_TYPE, + EGL10.EGL_TRANSPARENT_RED_VALUE, + EGL10.EGL_TRANSPARENT_GREEN_VALUE, + EGL10.EGL_TRANSPARENT_BLUE_VALUE, + 0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB, + 0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA, + 0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL, + 0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL, + EGL10.EGL_LUMINANCE_SIZE, + EGL10.EGL_ALPHA_MASK_SIZE, + EGL10.EGL_COLOR_BUFFER_TYPE, + EGL10.EGL_RENDERABLE_TYPE, + 0x3042 // EGL10.EGL_CONFORMANT + }; + String[] names = { + "EGL_BUFFER_SIZE", + "EGL_ALPHA_SIZE", + "EGL_BLUE_SIZE", + "EGL_GREEN_SIZE", + "EGL_RED_SIZE", + "EGL_DEPTH_SIZE", + "EGL_STENCIL_SIZE", + "EGL_CONFIG_CAVEAT", + "EGL_CONFIG_ID", + "EGL_LEVEL", + "EGL_MAX_PBUFFER_HEIGHT", + "EGL_MAX_PBUFFER_PIXELS", + "EGL_MAX_PBUFFER_WIDTH", + "EGL_NATIVE_RENDERABLE", + "EGL_NATIVE_VISUAL_ID", + "EGL_NATIVE_VISUAL_TYPE", + "EGL_PRESERVED_RESOURCES", + "EGL_SAMPLES", + "EGL_SAMPLE_BUFFERS", + "EGL_SURFACE_TYPE", + "EGL_TRANSPARENT_TYPE", + "EGL_TRANSPARENT_RED_VALUE", + "EGL_TRANSPARENT_GREEN_VALUE", + "EGL_TRANSPARENT_BLUE_VALUE", + "EGL_BIND_TO_TEXTURE_RGB", + "EGL_BIND_TO_TEXTURE_RGBA", + "EGL_MIN_SWAP_INTERVAL", + "EGL_MAX_SWAP_INTERVAL", + "EGL_LUMINANCE_SIZE", + "EGL_ALPHA_MASK_SIZE", + "EGL_COLOR_BUFFER_TYPE", + "EGL_RENDERABLE_TYPE", + "EGL_CONFORMANT" + }; + int[] value = new int[1]; + for (int i = 0; i < attributes.length; i++) { + int attribute = attributes[i]; + String name = names[i]; + if (egl.eglGetConfigAttrib(display, config, attribute, value)) { + Logging.w(TAG, String.format(" %s: %d\n", name, value[0])); + } else { + // Logging.w(TAG, String.format(" %s: failed\n", name)); + while (egl.eglGetError() != EGL10.EGL_SUCCESS); + } + } + } + + // Subclasses can adjust these values: + protected int mRedSize; + protected int mGreenSize; + protected int mBlueSize; + protected int mAlphaSize; + protected int mDepthSize; + protected int mStencilSize; + private int[] mValue = new int[1]; + } + + // IsSupported + // Return true if this device support Open GL ES 2.0 rendering. + public static boolean IsSupported(Context context) { + ActivityManager am = + (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE); + ConfigurationInfo info = am.getDeviceConfigurationInfo(); + if(info.reqGlEsVersion >= 0x20000) { + // Open GL ES 2.0 is supported. + return true; + } + return false; + } + + public void onDrawFrame(GL10 gl) { + nativeFunctionLock.lock(); + if(!nativeFunctionsRegisted || !surfaceCreated) { + nativeFunctionLock.unlock(); + return; + } + + if(!openGLCreated) { + if(0 != CreateOpenGLNative(nativeObject, viewWidth, viewHeight)) { + return; // Failed to create OpenGL + } + openGLCreated = true; // Created OpenGL successfully + } + DrawNative(nativeObject); // Draw the new frame + nativeFunctionLock.unlock(); + } + + public void onSurfaceChanged(GL10 gl, int width, int height) { + surfaceCreated = true; + viewWidth = width; + viewHeight = height; + + nativeFunctionLock.lock(); + if(nativeFunctionsRegisted) { + if(CreateOpenGLNative(nativeObject,width,height) == 0) + openGLCreated = true; + } + nativeFunctionLock.unlock(); + } + + public void onSurfaceCreated(GL10 gl, EGLConfig config) { + } + + public void RegisterNativeObject(long nativeObject) { + nativeFunctionLock.lock(); + this.nativeObject = nativeObject; + nativeFunctionsRegisted = true; + nativeFunctionLock.unlock(); + } + + public void DeRegisterNativeObject() { + nativeFunctionLock.lock(); + nativeFunctionsRegisted = false; + openGLCreated = false; + this.nativeObject = 0; + nativeFunctionLock.unlock(); + } + + public void ReDraw() { + if(surfaceCreated) { + // Request the renderer to redraw using the render thread context. + this.requestRender(); + } + } + + private native int CreateOpenGLNative(long nativeObject, + int width, int height); + private native void DrawNative(long nativeObject); + +} diff --git a/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViERenderer.java b/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViERenderer.java new file mode 100644 index 0000000000..50b1a595cc --- /dev/null +++ b/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViERenderer.java @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.videoengine; + +import android.content.Context; +import android.view.SurfaceHolder; +import android.view.SurfaceView; + +public class ViERenderer { + public static SurfaceView CreateRenderer(Context context) { + return CreateRenderer(context, false); + } + + public static SurfaceView CreateRenderer(Context context, + boolean useOpenGLES2) { + if(useOpenGLES2 == true && ViEAndroidGLES20.IsSupported(context)) + return new ViEAndroidGLES20(context); + else + return new SurfaceView(context); + } +} diff --git a/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViESurfaceRenderer.java b/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViESurfaceRenderer.java new file mode 100644 index 0000000000..71f26c259f --- /dev/null +++ b/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViESurfaceRenderer.java @@ -0,0 +1,185 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.videoengine; + +// The following four imports are needed saveBitmapToJPEG which +// is for debug only +import java.io.ByteArrayOutputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; + +import android.graphics.Bitmap; +import android.graphics.Canvas; +import android.graphics.Rect; +import android.view.SurfaceHolder; +import android.view.SurfaceView; +import android.view.SurfaceHolder.Callback; + +import org.webrtc.Logging; + +public class ViESurfaceRenderer implements Callback { + + private final static String TAG = "WEBRTC"; + + // the bitmap used for drawing. + private Bitmap bitmap = null; + private ByteBuffer byteBuffer = null; + private SurfaceHolder surfaceHolder; + // Rect of the source bitmap to draw + private Rect srcRect = new Rect(); + // Rect of the destination canvas to draw to + private Rect dstRect = new Rect(); + private float dstTopScale = 0; + private float dstBottomScale = 1; + private float dstLeftScale = 0; + private float dstRightScale = 1; + + public ViESurfaceRenderer(SurfaceView view) { + surfaceHolder = view.getHolder(); + if(surfaceHolder == null) + return; + surfaceHolder.addCallback(this); + } + + // surfaceChanged and surfaceCreated share this function + private void changeDestRect(int dstWidth, int dstHeight) { + dstRect.right = (int)(dstRect.left + dstRightScale * dstWidth); + dstRect.bottom = (int)(dstRect.top + dstBottomScale * dstHeight); + } + + public void surfaceChanged(SurfaceHolder holder, int format, + int in_width, int in_height) { + Logging.d(TAG, "ViESurfaceRender::surfaceChanged"); + + changeDestRect(in_width, in_height); + + Logging.d(TAG, "ViESurfaceRender::surfaceChanged" + + " in_width:" + in_width + " in_height:" + in_height + + " srcRect.left:" + srcRect.left + + " srcRect.top:" + srcRect.top + + " srcRect.right:" + srcRect.right + + " srcRect.bottom:" + srcRect.bottom + + " dstRect.left:" + dstRect.left + + " dstRect.top:" + dstRect.top + + " dstRect.right:" + dstRect.right + + " dstRect.bottom:" + dstRect.bottom); + } + + public void surfaceCreated(SurfaceHolder holder) { + Canvas canvas = surfaceHolder.lockCanvas(); + if(canvas != null) { + Rect dst = surfaceHolder.getSurfaceFrame(); + if(dst != null) { + changeDestRect(dst.right - dst.left, dst.bottom - dst.top); + Logging.d(TAG, "ViESurfaceRender::surfaceCreated" + + " dst.left:" + dst.left + + " dst.top:" + dst.top + + " dst.right:" + dst.right + + " dst.bottom:" + dst.bottom + + " srcRect.left:" + srcRect.left + + " srcRect.top:" + srcRect.top + + " srcRect.right:" + srcRect.right + + " srcRect.bottom:" + srcRect.bottom + + " dstRect.left:" + dstRect.left + + " dstRect.top:" + dstRect.top + + " dstRect.right:" + dstRect.right + + " dstRect.bottom:" + dstRect.bottom); + } + surfaceHolder.unlockCanvasAndPost(canvas); + } + } + + public void surfaceDestroyed(SurfaceHolder holder) { + Logging.d(TAG, "ViESurfaceRenderer::surfaceDestroyed"); + bitmap = null; + byteBuffer = null; + } + + public Bitmap CreateBitmap(int width, int height) { + Logging.d(TAG, "CreateByteBitmap " + width + ":" + height); + if (bitmap == null) { + try { + android.os.Process.setThreadPriority( + android.os.Process.THREAD_PRIORITY_DISPLAY); + } + catch (Exception e) { + } + } + bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565); + srcRect.left = 0; + srcRect.top = 0; + srcRect.bottom = height; + srcRect.right = width; + return bitmap; + } + + public ByteBuffer CreateByteBuffer(int width, int height) { + Logging.d(TAG, "CreateByteBuffer " + width + ":" + height); + if (bitmap == null) { + bitmap = CreateBitmap(width, height); + byteBuffer = ByteBuffer.allocateDirect(width * height * 2); + } + return byteBuffer; + } + + public void SetCoordinates(float left, float top, + float right, float bottom) { + Logging.d(TAG, "SetCoordinates " + left + "," + top + ":" + + right + "," + bottom); + dstLeftScale = left; + dstTopScale = top; + dstRightScale = right; + dstBottomScale = bottom; + } + + // It saves bitmap data to a JPEG picture, this function is for debug only. + private void saveBitmapToJPEG(int width, int height) { + ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream(); + bitmap.compress(Bitmap.CompressFormat.JPEG, 100, byteOutStream); + + try{ + FileOutputStream output = new FileOutputStream(String.format( + "/sdcard/render_%d.jpg", System.currentTimeMillis())); + output.write(byteOutStream.toByteArray()); + output.flush(); + output.close(); + } + catch (FileNotFoundException e) { + } + catch (IOException e) { + } + } + + public void DrawByteBuffer() { + if(byteBuffer == null) + return; + byteBuffer.rewind(); + bitmap.copyPixelsFromBuffer(byteBuffer); + DrawBitmap(); + } + + public void DrawBitmap() { + if(bitmap == null) + return; + + Canvas canvas = surfaceHolder.lockCanvas(); + if(canvas != null) { + // The follow line is for debug only + // saveBitmapToJPEG(srcRect.right - srcRect.left, + // srcRect.bottom - srcRect.top); + canvas.drawBitmap(bitmap, srcRect, dstRect, null); + surfaceHolder.unlockCanvasAndPost(canvas); + } + } + +} diff --git a/webrtc/modules/video_render/android/video_render_android_impl.cc b/webrtc/modules/video_render/android/video_render_android_impl.cc new file mode 100644 index 0000000000..9affb23d99 --- /dev/null +++ b/webrtc/modules/video_render/android/video_render_android_impl.cc @@ -0,0 +1,316 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/video_render/android/video_render_android_impl.h" + +#include "webrtc/modules/video_render/video_render_internal.h" +#include "webrtc/system_wrappers/include/critical_section_wrapper.h" +#include "webrtc/system_wrappers/include/event_wrapper.h" +#include "webrtc/system_wrappers/include/tick_util.h" + +#ifdef ANDROID +#include +#include + +#undef WEBRTC_TRACE +#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__) +#else +#include "webrtc/system_wrappers/include/trace.h" +#endif + +namespace webrtc { + +JavaVM* VideoRenderAndroid::g_jvm = NULL; + +int32_t SetRenderAndroidVM(JavaVM* javaVM) { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__); + VideoRenderAndroid::g_jvm = javaVM; + return 0; +} + +VideoRenderAndroid::VideoRenderAndroid( + const int32_t id, + const VideoRenderType videoRenderType, + void* window, + const bool /*fullscreen*/): + _id(id), + _critSect(*CriticalSectionWrapper::CreateCriticalSection()), + _renderType(videoRenderType), + _ptrWindow((jobject)(window)), + _javaShutDownFlag(false), + _javaShutdownEvent(*EventWrapper::Create()), + _javaRenderEvent(*EventWrapper::Create()), + _lastJavaRenderEvent(0), + _javaRenderJniEnv(NULL) { +} + +VideoRenderAndroid::~VideoRenderAndroid() { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, + "VideoRenderAndroid dtor"); + + if (_javaRenderThread) + StopRender(); + + for (AndroidStreamMap::iterator it = _streamsMap.begin(); + it != _streamsMap.end(); + ++it) { + delete it->second; + } + delete &_javaShutdownEvent; + delete &_javaRenderEvent; + delete &_critSect; +} + +int32_t VideoRenderAndroid::ChangeWindow(void* /*window*/) { + return -1; +} + +VideoRenderCallback* +VideoRenderAndroid::AddIncomingRenderStream(const uint32_t streamId, + const uint32_t zOrder, + const float left, const float top, + const float right, + const float bottom) { + CriticalSectionScoped cs(&_critSect); + + AndroidStream* renderStream = NULL; + AndroidStreamMap::iterator item = _streamsMap.find(streamId); + if (item != _streamsMap.end() && item->second != NULL) { + WEBRTC_TRACE(kTraceInfo, + kTraceVideoRenderer, + -1, + "%s: Render stream already exists", + __FUNCTION__); + return renderStream; + } + + renderStream = CreateAndroidRenderChannel(streamId, zOrder, left, top, + right, bottom, *this); + if (renderStream) { + _streamsMap[streamId] = renderStream; + } + else { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__); + return NULL; + } + return renderStream; +} + +int32_t VideoRenderAndroid::DeleteIncomingRenderStream( + const uint32_t streamId) { + CriticalSectionScoped cs(&_critSect); + + AndroidStreamMap::iterator item = _streamsMap.find(streamId); + if (item == _streamsMap.end()) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__); + return -1; + } + delete item->second; + _streamsMap.erase(item); + return 0; +} + +int32_t VideoRenderAndroid::GetIncomingRenderStreamProperties( + const uint32_t streamId, + uint32_t& zOrder, + float& left, + float& top, + float& right, + float& bottom) const { + return -1; +} + +int32_t VideoRenderAndroid::StartRender() { + CriticalSectionScoped cs(&_critSect); + + if (_javaRenderThread) { + // StartRender is called when this stream should start render. + // However StopRender is not called when the streams stop rendering. + // Thus the the thread is only deleted when the renderer is removed. + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, + "%s, Render thread already exist", __FUNCTION__); + return 0; + } + + _javaRenderThread.reset(new rtc::PlatformThread(JavaRenderThreadFun, this, + "AndroidRenderThread")); + + _javaRenderThread->Start(); + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: thread started", + __FUNCTION__); + _javaRenderThread->SetPriority(rtc::kRealtimePriority); + return 0; +} + +int32_t VideoRenderAndroid::StopRender() { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:", __FUNCTION__); + { + CriticalSectionScoped cs(&_critSect); + if (!_javaRenderThread) + { + return -1; + } + _javaShutDownFlag = true; + _javaRenderEvent.Set(); + } + + _javaShutdownEvent.Wait(3000); + CriticalSectionScoped cs(&_critSect); + _javaRenderThread->Stop(); + _javaRenderThread.reset(); + + return 0; +} + +void VideoRenderAndroid::ReDraw() { + CriticalSectionScoped cs(&_critSect); + // Allow redraw if it was more than 20ms since last. + if (_lastJavaRenderEvent < TickTime::MillisecondTimestamp() - 20) { + _lastJavaRenderEvent = TickTime::MillisecondTimestamp(); + _javaRenderEvent.Set(); + } +} + +bool VideoRenderAndroid::JavaRenderThreadFun(void* obj) { + return static_cast (obj)->JavaRenderThreadProcess(); +} + +bool VideoRenderAndroid::JavaRenderThreadProcess() +{ + _javaRenderEvent.Wait(1000); + + CriticalSectionScoped cs(&_critSect); + if (!_javaRenderJniEnv) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&_javaRenderJniEnv, NULL); + + // Get the JNI env for this thread + if ((res < 0) || !_javaRenderJniEnv) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, _javaRenderJniEnv); + return false; + } + } + + for (AndroidStreamMap::iterator it = _streamsMap.begin(); + it != _streamsMap.end(); + ++it) { + it->second->DeliverFrame(_javaRenderJniEnv); + } + + if (_javaShutDownFlag) { + if (g_jvm->DetachCurrentThread() < 0) + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, + "%s: Could not detach thread from JVM", __FUNCTION__); + else { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, + "%s: Java thread detached", __FUNCTION__); + } + _javaRenderJniEnv = NULL; + _javaShutDownFlag = false; + _javaShutdownEvent.Set(); + return false; // Do not run this thread again. + } + return true; +} + +VideoRenderType VideoRenderAndroid::RenderType() { + return _renderType; +} + +RawVideoType VideoRenderAndroid::PerferedVideoType() { + return kVideoI420; +} + +bool VideoRenderAndroid::FullScreen() { + return false; +} + +int32_t VideoRenderAndroid::GetGraphicsMemory( + uint64_t& /*totalGraphicsMemory*/, + uint64_t& /*availableGraphicsMemory*/) const { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Android", __FUNCTION__); + return -1; +} + +int32_t VideoRenderAndroid::GetScreenResolution( + uint32_t& /*screenWidth*/, + uint32_t& /*screenHeight*/) const { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Android", __FUNCTION__); + return -1; +} + +uint32_t VideoRenderAndroid::RenderFrameRate( + const uint32_t /*streamId*/) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Android", __FUNCTION__); + return -1; +} + +int32_t VideoRenderAndroid::SetStreamCropping( + const uint32_t /*streamId*/, + const float /*left*/, + const float /*top*/, + const float /*right*/, + const float /*bottom*/) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Android", __FUNCTION__); + return -1; +} + +int32_t VideoRenderAndroid::SetTransparentBackground(const bool enable) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Android", __FUNCTION__); + return -1; +} + +int32_t VideoRenderAndroid::ConfigureRenderer( + const uint32_t streamId, + const unsigned int zOrder, + const float left, + const float top, + const float right, + const float bottom) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Android", __FUNCTION__); + return -1; +} + +int32_t VideoRenderAndroid::SetText( + const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, const float top, + const float rigth, const float bottom) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Android", __FUNCTION__); + return -1; +} + +int32_t VideoRenderAndroid::SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, const float top, + const float right, + const float bottom) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Android", __FUNCTION__); + return -1; +} + +} // namespace webrtc diff --git a/webrtc/modules/video_render/android/video_render_android_impl.h b/webrtc/modules/video_render/android/video_render_android_impl.h new file mode 100644 index 0000000000..06fd7a1c7c --- /dev/null +++ b/webrtc/modules/video_render/android/video_render_android_impl.h @@ -0,0 +1,154 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_ + +#include + +#include +#include + +#include "webrtc/base/platform_thread.h" +#include "webrtc/modules/video_render/i_video_render.h" + + +namespace webrtc { + +//#define ANDROID_LOG + +class CriticalSectionWrapper; +class EventWrapper; + +// The object a module user uses to send new frames to the java renderer +// Base class for android render streams. + +class AndroidStream : public VideoRenderCallback { + public: + // DeliverFrame is called from a thread connected to the Java VM. + // Used for Delivering frame for rendering. + virtual void DeliverFrame(JNIEnv* jniEnv)=0; + + virtual ~AndroidStream() {}; +}; + +class VideoRenderAndroid: IVideoRender { + public: + VideoRenderAndroid(const int32_t id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen); + + virtual ~VideoRenderAndroid(); + + virtual int32_t Init()=0; + + virtual int32_t ChangeWindow(void* window); + + virtual VideoRenderCallback* AddIncomingRenderStream( + const uint32_t streamId, + const uint32_t zOrder, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t DeleteIncomingRenderStream( + const uint32_t streamId); + + virtual int32_t GetIncomingRenderStreamProperties( + const uint32_t streamId, + uint32_t& zOrder, + float& left, float& top, + float& right, float& bottom) const; + + virtual int32_t StartRender(); + + virtual int32_t StopRender(); + + virtual void ReDraw(); + + // Properties + + virtual VideoRenderType RenderType(); + + virtual RawVideoType PerferedVideoType(); + + virtual bool FullScreen(); + + virtual int32_t GetGraphicsMemory( + uint64_t& totalGraphicsMemory, + uint64_t& availableGraphicsMemory) const; + + virtual int32_t GetScreenResolution( + uint32_t& screenWidth, + uint32_t& screenHeight) const; + + virtual uint32_t RenderFrameRate(const uint32_t streamId); + + virtual int32_t SetStreamCropping(const uint32_t streamId, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t SetTransparentBackground(const bool enable); + + virtual int32_t ConfigureRenderer(const uint32_t streamId, + const unsigned int zOrder, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t SetText(const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, const float top, + const float rigth, const float bottom); + + virtual int32_t SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, const float left, + const float top, const float right, + const float bottom); + static JavaVM* g_jvm; + + protected: + virtual AndroidStream* CreateAndroidRenderChannel( + int32_t streamId, + int32_t zOrder, + const float left, + const float top, + const float right, + const float bottom, + VideoRenderAndroid& renderer) = 0; + + int32_t _id; + CriticalSectionWrapper& _critSect; + VideoRenderType _renderType; + jobject _ptrWindow; + + private: + static bool JavaRenderThreadFun(void* obj); + bool JavaRenderThreadProcess(); + + // Map with streams to render. + typedef std::map AndroidStreamMap; + AndroidStreamMap _streamsMap; + // True if the _javaRenderThread thread shall be detached from the JVM. + bool _javaShutDownFlag; + EventWrapper& _javaShutdownEvent; + EventWrapper& _javaRenderEvent; + int64_t _lastJavaRenderEvent; + JNIEnv* _javaRenderJniEnv; // JNIEnv for the java render thread. + // TODO(pbos): Remove unique_ptr and use the member directly. + std::unique_ptr _javaRenderThread; +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_ diff --git a/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc b/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc new file mode 100644 index 0000000000..286776e317 --- /dev/null +++ b/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc @@ -0,0 +1,450 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/video_render/android/video_render_android_native_opengl2.h" +#include "webrtc/system_wrappers/include/critical_section_wrapper.h" +#include "webrtc/system_wrappers/include/tick_util.h" + +#ifdef ANDROID_LOG +#include +#include + +#undef WEBRTC_TRACE +#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__) +#else +#include "webrtc/system_wrappers/include/trace.h" +#endif + +namespace webrtc { + +AndroidNativeOpenGl2Renderer::AndroidNativeOpenGl2Renderer( + const int32_t id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen) : + VideoRenderAndroid(id, videoRenderType, window, fullscreen), + _javaRenderObj(NULL), + _javaRenderClass(NULL) { +} + +bool AndroidNativeOpenGl2Renderer::UseOpenGL2(void* window) { + if (!g_jvm) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, + "RendererAndroid():UseOpenGL No JVM set."); + return false; + } + bool isAttached = false; + JNIEnv* env = NULL; + if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&env, NULL); + + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE( + kTraceError, + kTraceVideoRenderer, + -1, + "RendererAndroid(): Could not attach thread to JVM (%d, %p)", + res, env); + return false; + } + isAttached = true; + } + + // get the renderer class + jclass javaRenderClassLocal = + env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20"); + if (!javaRenderClassLocal) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, + "%s: could not find ViEAndroidRenderer class", + __FUNCTION__); + return false; + } + + // get the method ID for UseOpenGL + jmethodID cidUseOpenGL = env->GetStaticMethodID(javaRenderClassLocal, + "UseOpenGL2", + "(Ljava/lang/Object;)Z"); + if (cidUseOpenGL == NULL) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, + "%s: could not get UseOpenGL ID", __FUNCTION__); + return false; + } + jboolean res = env->CallStaticBooleanMethod(javaRenderClassLocal, + cidUseOpenGL, (jobject) window); + + // Detach this thread if it was attached + if (isAttached) { + if (g_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1, + "%s: Could not detach thread from JVM", __FUNCTION__); + } + } + return res; +} + +AndroidNativeOpenGl2Renderer::~AndroidNativeOpenGl2Renderer() { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, + "AndroidNativeOpenGl2Renderer dtor"); + if (g_jvm) { + // get the JNI env for this thread + bool isAttached = false; + JNIEnv* env = NULL; + if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&env, NULL); + + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + env = NULL; + } + else { + isAttached = true; + } + } + + env->DeleteGlobalRef(_javaRenderObj); + env->DeleteGlobalRef(_javaRenderClass); + + if (isAttached) { + if (g_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, + "%s: Could not detach thread from JVM", + __FUNCTION__); + } + } + } +} + +int32_t AndroidNativeOpenGl2Renderer::Init() { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__); + if (!g_jvm) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "(%s): Not a valid Java VM pointer.", __FUNCTION__); + return -1; + } + if (!_ptrWindow) { + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, + "(%s): No window have been provided.", __FUNCTION__); + return -1; + } + + // get the JNI env for this thread + bool isAttached = false; + JNIEnv* env = NULL; + if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&env, NULL); + + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + return -1; + } + isAttached = true; + } + + // get the ViEAndroidGLES20 class + jclass javaRenderClassLocal = + env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20"); + if (!javaRenderClassLocal) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: could not find ViEAndroidGLES20", __FUNCTION__); + return -1; + } + + // create a global reference to the class (to tell JNI that + // we are referencing it after this function has returned) + _javaRenderClass = + reinterpret_cast (env->NewGlobalRef(javaRenderClassLocal)); + if (!_javaRenderClass) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: could not create Java SurfaceHolder class reference", + __FUNCTION__); + return -1; + } + + // Delete local class ref, we only use the global ref + env->DeleteLocalRef(javaRenderClassLocal); + + // create a reference to the object (to tell JNI that we are referencing it + // after this function has returned) + _javaRenderObj = env->NewGlobalRef(_ptrWindow); + if (!_javaRenderObj) { + WEBRTC_TRACE( + kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not create Java SurfaceRender object reference", + __FUNCTION__); + return -1; + } + + // Detach this thread if it was attached + if (isAttached) { + if (g_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, + "%s: Could not detach thread from JVM", __FUNCTION__); + } + } + + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", + __FUNCTION__); + return 0; + +} +AndroidStream* +AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel( + int32_t streamId, + int32_t zOrder, + const float left, + const float top, + const float right, + const float bottom, + VideoRenderAndroid& renderer) { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d", + __FUNCTION__, streamId); + AndroidNativeOpenGl2Channel* stream = + new AndroidNativeOpenGl2Channel(streamId, g_jvm, renderer, + _javaRenderObj); + if (stream && stream->Init(zOrder, left, top, right, bottom) == 0) + return stream; + else { + delete stream; + } + return NULL; +} + +AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel( + uint32_t streamId, + JavaVM* jvm, + VideoRenderAndroid& renderer,jobject javaRenderObj): + _id(streamId), + _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()), + _renderer(renderer), _jvm(jvm), _javaRenderObj(javaRenderObj), + _registerNativeCID(NULL), _deRegisterNativeCID(NULL), + _openGLRenderer(streamId) { + +} +AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel() { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, + "AndroidNativeOpenGl2Channel dtor"); + if (_jvm) { + // get the JNI env for this thread + bool isAttached = false; + JNIEnv* env = NULL; + if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _jvm->AttachCurrentThread(&env, NULL); + + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + env = NULL; + } else { + isAttached = true; + } + } + if (env && _deRegisterNativeCID) { + env->CallVoidMethod(_javaRenderObj, _deRegisterNativeCID); + } + + if (isAttached) { + if (_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, + "%s: Could not detach thread from JVM", + __FUNCTION__); + } + } + } + + delete &_renderCritSect; +} + +int32_t AndroidNativeOpenGl2Channel::Init(int32_t zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, + "%s: AndroidNativeOpenGl2Channel", __FUNCTION__); + if (!_jvm) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Not a valid Java VM pointer", __FUNCTION__); + return -1; + } + + // get the JNI env for this thread + bool isAttached = false; + JNIEnv* env = NULL; + if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _jvm->AttachCurrentThread(&env, NULL); + + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + return -1; + } + isAttached = true; + } + + jclass javaRenderClass = + env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20"); + if (!javaRenderClass) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: could not find ViESurfaceRenderer", __FUNCTION__); + return -1; + } + + // get the method ID for the ReDraw function + _redrawCid = env->GetMethodID(javaRenderClass, "ReDraw", "()V"); + if (_redrawCid == NULL) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: could not get ReDraw ID", __FUNCTION__); + return -1; + } + + _registerNativeCID = env->GetMethodID(javaRenderClass, + "RegisterNativeObject", "(J)V"); + if (_registerNativeCID == NULL) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: could not get RegisterNativeObject ID", __FUNCTION__); + return -1; + } + + _deRegisterNativeCID = env->GetMethodID(javaRenderClass, + "DeRegisterNativeObject", "()V"); + if (_deRegisterNativeCID == NULL) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: could not get DeRegisterNativeObject ID", + __FUNCTION__); + return -1; + } + + JNINativeMethod nativeFunctions[2] = { + { "DrawNative", + "(J)V", + (void*) &AndroidNativeOpenGl2Channel::DrawNativeStatic, }, + { "CreateOpenGLNative", + "(JII)I", + (void*) &AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic }, + }; + if (env->RegisterNatives(javaRenderClass, nativeFunctions, 2) == 0) { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, + "%s: Registered native functions", __FUNCTION__); + } + else { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, + "%s: Failed to register native functions", __FUNCTION__); + return -1; + } + + env->CallVoidMethod(_javaRenderObj, _registerNativeCID, (jlong) this); + + // Detach this thread if it was attached + if (isAttached) { + if (_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, + "%s: Could not detach thread from JVM", __FUNCTION__); + } + } + + if (_openGLRenderer.SetCoordinates(zOrder, left, top, right, bottom) != 0) { + return -1; + } + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, + "%s: AndroidNativeOpenGl2Channel done", __FUNCTION__); + return 0; +} + +int32_t AndroidNativeOpenGl2Channel::RenderFrame(const uint32_t /*streamId*/, + const VideoFrame& videoFrame) { + // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__); + _renderCritSect.Enter(); + _bufferToRender = videoFrame; + _renderCritSect.Leave(); + _renderer.ReDraw(); + return 0; +} + +/*Implements AndroidStream + * Calls the Java object and render the buffer in _bufferToRender + */ +void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv) { + //TickTime timeNow=TickTime::Now(); + + //Draw the Surface + jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid); + + // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, + // "%s: time to deliver %lld" ,__FUNCTION__, + // (TickTime::Now()-timeNow).Milliseconds()); +} + +/* + * JNI callback from Java class. Called when the render + * want to render a frame. Called from the GLRenderThread + * Method: DrawNative + * Signature: (J)V + */ +void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic( + JNIEnv * env, jobject, jlong context) { + AndroidNativeOpenGl2Channel* renderChannel = + reinterpret_cast(context); + renderChannel->DrawNative(); +} + +void AndroidNativeOpenGl2Channel::DrawNative() { + _renderCritSect.Enter(); + _openGLRenderer.Render(_bufferToRender); + _renderCritSect.Leave(); +} + +/* + * JNI callback from Java class. Called when the GLSurfaceview + * have created a surface. Called from the GLRenderThread + * Method: CreateOpenGLNativeStatic + * Signature: (JII)I + */ +jint JNICALL AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic( + JNIEnv * env, + jobject, + jlong context, + jint width, + jint height) { + AndroidNativeOpenGl2Channel* renderChannel = + reinterpret_cast (context); + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__); + return renderChannel->CreateOpenGLNative(width, height); +} + +jint AndroidNativeOpenGl2Channel::CreateOpenGLNative( + int width, int height) { + return _openGLRenderer.Setup(width, height); +} + +} // namespace webrtc diff --git a/webrtc/modules/video_render/android/video_render_android_native_opengl2.h b/webrtc/modules/video_render/android/video_render_android_native_opengl2.h new file mode 100644 index 0000000000..8be247b834 --- /dev/null +++ b/webrtc/modules/video_render/android/video_render_android_native_opengl2.h @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_ + +#include + +#include "webrtc/modules/video_render/android/video_render_android_impl.h" +#include "webrtc/modules/video_render/android/video_render_opengles20.h" +#include "webrtc/modules/video_render/video_render_defines.h" + +namespace webrtc { + +class CriticalSectionWrapper; + +class AndroidNativeOpenGl2Channel: public AndroidStream { + public: + AndroidNativeOpenGl2Channel( + uint32_t streamId, + JavaVM* jvm, + VideoRenderAndroid& renderer,jobject javaRenderObj); + ~AndroidNativeOpenGl2Channel(); + + int32_t Init(int32_t zOrder, const float left, const float top, + const float right, const float bottom); + + //Implement VideoRenderCallback + virtual int32_t RenderFrame(const uint32_t streamId, + const VideoFrame& videoFrame); + + //Implements AndroidStream + virtual void DeliverFrame(JNIEnv* jniEnv); + + private: + static jint JNICALL CreateOpenGLNativeStatic( + JNIEnv * env, + jobject, + jlong context, + jint width, + jint height); + jint CreateOpenGLNative(int width, int height); + + static void JNICALL DrawNativeStatic(JNIEnv * env,jobject, jlong context); + void DrawNative(); + uint32_t _id; + CriticalSectionWrapper& _renderCritSect; + + VideoFrame _bufferToRender; + VideoRenderAndroid& _renderer; + JavaVM* _jvm; + jobject _javaRenderObj; + + jmethodID _redrawCid; + jmethodID _registerNativeCID; + jmethodID _deRegisterNativeCID; + VideoRenderOpenGles20 _openGLRenderer; +}; + + +class AndroidNativeOpenGl2Renderer: private VideoRenderAndroid { + public: + AndroidNativeOpenGl2Renderer(const int32_t id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen); + + ~AndroidNativeOpenGl2Renderer(); + static bool UseOpenGL2(void* window); + + int32_t Init(); + virtual AndroidStream* CreateAndroidRenderChannel( + int32_t streamId, + int32_t zOrder, + const float left, + const float top, + const float right, + const float bottom, + VideoRenderAndroid& renderer); + + private: + jobject _javaRenderObj; + jclass _javaRenderClass; +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_ diff --git a/webrtc/modules/video_render/android/video_render_android_surface_view.cc b/webrtc/modules/video_render/android/video_render_android_surface_view.cc new file mode 100644 index 0000000000..ea3b106b1e --- /dev/null +++ b/webrtc/modules/video_render/android/video_render_android_surface_view.cc @@ -0,0 +1,474 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" +#include "webrtc/modules/video_render/android/video_render_android_surface_view.h" +#include "webrtc/system_wrappers/include/critical_section_wrapper.h" +#include "webrtc/system_wrappers/include/tick_util.h" + +#ifdef ANDROID_LOG +#include +#include + +#undef WEBRTC_TRACE +#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__) +#else +#include "webrtc/system_wrappers/include/trace.h" +#endif + +namespace webrtc { + +AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer( + const int32_t id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen) : + VideoRenderAndroid(id,videoRenderType,window,fullscreen), + _javaRenderObj(NULL), + _javaRenderClass(NULL) { +} + +AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer() { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, + "AndroidSurfaceViewRenderer dtor"); + if(g_jvm) { + // get the JNI env for this thread + bool isAttached = false; + JNIEnv* env = NULL; + if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&env, NULL); + + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, + res, + env); + env=NULL; + } + else { + isAttached = true; + } + } + env->DeleteGlobalRef(_javaRenderObj); + env->DeleteGlobalRef(_javaRenderClass); + + if (isAttached) { + if (g_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, + kTraceVideoRenderer, + _id, + "%s: Could not detach thread from JVM", + __FUNCTION__); + } + } + } +} + +int32_t AndroidSurfaceViewRenderer::Init() { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__); + if (!g_jvm) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "(%s): Not a valid Java VM pointer.", + __FUNCTION__); + return -1; + } + if(!_ptrWindow) { + WEBRTC_TRACE(kTraceWarning, + kTraceVideoRenderer, + _id, + "(%s): No window have been provided.", + __FUNCTION__); + return -1; + } + + // get the JNI env for this thread + bool isAttached = false; + JNIEnv* env = NULL; + if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&env, NULL); + + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, + res, + env); + return -1; + } + isAttached = true; + } + + // get the ViESurfaceRender class + jclass javaRenderClassLocal = + env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer"); + if (!javaRenderClassLocal) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not find ViESurfaceRenderer", + __FUNCTION__); + return -1; + } + + // create a global reference to the class (to tell JNI that + // we are referencing it after this function has returned) + _javaRenderClass = + reinterpret_cast(env->NewGlobalRef(javaRenderClassLocal)); + if (!_javaRenderClass) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not create Java ViESurfaceRenderer class reference", + __FUNCTION__); + return -1; + } + + // Delete local class ref, we only use the global ref + env->DeleteLocalRef(javaRenderClassLocal); + + // get the method ID for the constructor + jmethodID cid = env->GetMethodID(_javaRenderClass, + "", + "(Landroid/view/SurfaceView;)V"); + if (cid == NULL) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not get constructor ID", + __FUNCTION__); + return -1; /* exception thrown */ + } + + // construct the object + jobject javaRenderObjLocal = env->NewObject(_javaRenderClass, + cid, + _ptrWindow); + if (!javaRenderObjLocal) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not create Java Render", + __FUNCTION__); + return -1; + } + + // create a reference to the object (to tell JNI that we are referencing it + // after this function has returned) + _javaRenderObj = env->NewGlobalRef(javaRenderObjLocal); + if (!_javaRenderObj) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not create Java SurfaceRender object reference", + __FUNCTION__); + return -1; + } + + // Detach this thread if it was attached + if (isAttached) { + if (g_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, + kTraceVideoRenderer, + _id, + "%s: Could not detach thread from JVM", __FUNCTION__); + } + } + + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", __FUNCTION__); + return 0; +} + +AndroidStream* +AndroidSurfaceViewRenderer::CreateAndroidRenderChannel( + int32_t streamId, + int32_t zOrder, + const float left, + const float top, + const float right, + const float bottom, + VideoRenderAndroid& renderer) { + WEBRTC_TRACE(kTraceDebug, + kTraceVideoRenderer, + _id, + "%s: Id %d", + __FUNCTION__, + streamId); + AndroidSurfaceViewChannel* stream = + new AndroidSurfaceViewChannel(streamId, g_jvm, renderer, _javaRenderObj); + if(stream && stream->Init(zOrder, left, top, right, bottom) == 0) + return stream; + else + delete stream; + return NULL; +} + +AndroidSurfaceViewChannel::AndroidSurfaceViewChannel( + uint32_t streamId, + JavaVM* jvm, + VideoRenderAndroid& renderer, + jobject javaRenderObj) : + _id(streamId), + _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()), + _renderer(renderer), + _jvm(jvm), + _javaRenderObj(javaRenderObj), +#ifndef ANDROID_NDK_8_OR_ABOVE + _javaByteBufferObj(NULL), + _directBuffer(NULL), +#endif + _bitmapWidth(0), + _bitmapHeight(0) { +} + +AndroidSurfaceViewChannel::~AndroidSurfaceViewChannel() { + WEBRTC_TRACE(kTraceInfo, + kTraceVideoRenderer, + _id, + "AndroidSurfaceViewChannel dtor"); + delete &_renderCritSect; + if(_jvm) { + // get the JNI env for this thread + bool isAttached = false; + JNIEnv* env = NULL; + if ( _jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _jvm->AttachCurrentThread(&env, NULL); + + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, + res, + env); + env=NULL; + } + else { + isAttached = true; + } + } + + env->DeleteGlobalRef(_javaByteBufferObj); + if (isAttached) { + if (_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, + kTraceVideoRenderer, + _id, + "%s: Could not detach thread from JVM", + __FUNCTION__); + } + } + } +} + +int32_t AndroidSurfaceViewChannel::Init( + int32_t /*zOrder*/, + const float left, + const float top, + const float right, + const float bottom) { + + WEBRTC_TRACE(kTraceDebug, + kTraceVideoRenderer, + _id, + "%s: AndroidSurfaceViewChannel", + __FUNCTION__); + if (!_jvm) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: Not a valid Java VM pointer", + __FUNCTION__); + return -1; + } + + if( (top > 1 || top < 0) || + (right > 1 || right < 0) || + (bottom > 1 || bottom < 0) || + (left > 1 || left < 0)) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Wrong coordinates", __FUNCTION__); + return -1; + } + + // get the JNI env for this thread + bool isAttached = false; + JNIEnv* env = NULL; + if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _jvm->AttachCurrentThread(&env, NULL); + + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, + res, + env); + return -1; + } + isAttached = true; + } + + jclass javaRenderClass = + env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer"); + if (!javaRenderClass) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not find ViESurfaceRenderer", + __FUNCTION__); + return -1; + } + + // get the method ID for the CreateIntArray + _createByteBufferCid = + env->GetMethodID(javaRenderClass, + "CreateByteBuffer", + "(II)Ljava/nio/ByteBuffer;"); + if (_createByteBufferCid == NULL) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not get CreateByteBuffer ID", + __FUNCTION__); + return -1; /* exception thrown */ + } + + // get the method ID for the DrawByteBuffer function + _drawByteBufferCid = env->GetMethodID(javaRenderClass, + "DrawByteBuffer", + "()V"); + if (_drawByteBufferCid == NULL) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not get DrawByteBuffer ID", + __FUNCTION__); + return -1; /* exception thrown */ + } + + // get the method ID for the SetCoordinates function + _setCoordinatesCid = env->GetMethodID(javaRenderClass, + "SetCoordinates", + "(FFFF)V"); + if (_setCoordinatesCid == NULL) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not get SetCoordinates ID", + __FUNCTION__); + return -1; /* exception thrown */ + } + + env->CallVoidMethod(_javaRenderObj, _setCoordinatesCid, + left, top, right, bottom); + + // Detach this thread if it was attached + if (isAttached) { + if (_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, + kTraceVideoRenderer, + _id, + "%s: Could not detach thread from JVM", + __FUNCTION__); + } + } + + WEBRTC_TRACE(kTraceDebug, + kTraceVideoRenderer, + _id, + "%s: AndroidSurfaceViewChannel done", + __FUNCTION__); + return 0; +} + +int32_t AndroidSurfaceViewChannel::RenderFrame(const uint32_t /*streamId*/, + const VideoFrame& videoFrame) { + // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__); + _renderCritSect.Enter(); + _bufferToRender = videoFrame; + _renderCritSect.Leave(); + _renderer.ReDraw(); + return 0; +} + + +/*Implements AndroidStream + * Calls the Java object and render the buffer in _bufferToRender + */ +void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) { + _renderCritSect.Enter(); + + if (_bitmapWidth != _bufferToRender.width() || + _bitmapHeight != _bufferToRender.height()) { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: New render size %d " + "%d",__FUNCTION__, + _bufferToRender.width(), _bufferToRender.height()); + if (_javaByteBufferObj) { + jniEnv->DeleteGlobalRef(_javaByteBufferObj); + _javaByteBufferObj = NULL; + _directBuffer = NULL; + } + + jobject javaByteBufferObj = + jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid, + _bufferToRender.width(), + _bufferToRender.height()); + _javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj); + if (!_javaByteBufferObj) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not " + "create Java ByteBuffer object reference", __FUNCTION__); + _renderCritSect.Leave(); + return; + } else { + _directBuffer = static_cast + (jniEnv->GetDirectBufferAddress(_javaByteBufferObj)); + _bitmapWidth = _bufferToRender.width(); + _bitmapHeight = _bufferToRender.height(); + } + } + + if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) { + const int conversionResult = + ConvertFromI420(_bufferToRender, kRGB565, 0, _directBuffer); + + if (conversionResult < 0) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion" + " failed.", __FUNCTION__); + _renderCritSect.Leave(); + return; + } + } + _renderCritSect.Leave(); + // Draw the Surface + jniEnv->CallVoidMethod(_javaRenderObj, _drawByteBufferCid); +} + +} // namespace webrtc diff --git a/webrtc/modules/video_render/android/video_render_android_surface_view.h b/webrtc/modules/video_render/android/video_render_android_surface_view.h new file mode 100644 index 0000000000..0f029b54f3 --- /dev/null +++ b/webrtc/modules/video_render/android/video_render_android_surface_view.h @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_ + +#include + +#include "webrtc/modules/video_render/android/video_render_android_impl.h" +#include "webrtc/modules/video_render/video_render_defines.h" + +namespace webrtc { + +class CriticalSectionWrapper; + +class AndroidSurfaceViewChannel : public AndroidStream { + public: + AndroidSurfaceViewChannel(uint32_t streamId, + JavaVM* jvm, + VideoRenderAndroid& renderer, + jobject javaRenderObj); + ~AndroidSurfaceViewChannel(); + + int32_t Init(int32_t zOrder, const float left, const float top, + const float right, const float bottom); + + //Implement VideoRenderCallback + virtual int32_t RenderFrame(const uint32_t streamId, + const VideoFrame& videoFrame); + + //Implements AndroidStream + virtual void DeliverFrame(JNIEnv* jniEnv); + + private: + uint32_t _id; + CriticalSectionWrapper& _renderCritSect; + + VideoFrame _bufferToRender; + VideoRenderAndroid& _renderer; + JavaVM* _jvm; + jobject _javaRenderObj; + + jobject _javaByteBufferObj; + unsigned char* _directBuffer; + jmethodID _createByteBufferCid; + jmethodID _drawByteBufferCid; + + jmethodID _setCoordinatesCid; + int _bitmapWidth; + int _bitmapHeight; +}; + +class AndroidSurfaceViewRenderer : private VideoRenderAndroid { + public: + AndroidSurfaceViewRenderer(const int32_t id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen); + ~AndroidSurfaceViewRenderer(); + int32_t Init(); + virtual AndroidStream* CreateAndroidRenderChannel( + int32_t streamId, + int32_t zOrder, + const float left, + const float top, + const float right, + const float bottom, + VideoRenderAndroid& renderer); + private: + jobject _javaRenderObj; + jclass _javaRenderClass; +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_ diff --git a/webrtc/modules/video_render/android/video_render_opengles20.cc b/webrtc/modules/video_render/android/video_render_opengles20.cc new file mode 100644 index 0000000000..45db56a4f6 --- /dev/null +++ b/webrtc/modules/video_render/android/video_render_opengles20.cc @@ -0,0 +1,397 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include +#include + +#include "webrtc/modules/video_render/android/video_render_opengles20.h" + +//#define ANDROID_LOG + +#ifdef ANDROID_LOG +#include +#include + +#undef WEBRTC_TRACE +#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__) +#else +#include "webrtc/system_wrappers/include/trace.h" +#endif + +namespace webrtc { + +const char VideoRenderOpenGles20::g_indices[] = { 0, 3, 2, 0, 2, 1 }; + +const char VideoRenderOpenGles20::g_vertextShader[] = { + "attribute vec4 aPosition;\n" + "attribute vec2 aTextureCoord;\n" + "varying vec2 vTextureCoord;\n" + "void main() {\n" + " gl_Position = aPosition;\n" + " vTextureCoord = aTextureCoord;\n" + "}\n" }; + +// The fragment shader. +// Do YUV to RGB565 conversion. +const char VideoRenderOpenGles20::g_fragmentShader[] = { + "precision mediump float;\n" + "uniform sampler2D Ytex;\n" + "uniform sampler2D Utex,Vtex;\n" + "varying vec2 vTextureCoord;\n" + "void main(void) {\n" + " float nx,ny,r,g,b,y,u,v;\n" + " mediump vec4 txl,ux,vx;" + " nx=vTextureCoord[0];\n" + " ny=vTextureCoord[1];\n" + " y=texture2D(Ytex,vec2(nx,ny)).r;\n" + " u=texture2D(Utex,vec2(nx,ny)).r;\n" + " v=texture2D(Vtex,vec2(nx,ny)).r;\n" + + //" y = v;\n"+ + " y=1.1643*(y-0.0625);\n" + " u=u-0.5;\n" + " v=v-0.5;\n" + + " r=y+1.5958*v;\n" + " g=y-0.39173*u-0.81290*v;\n" + " b=y+2.017*u;\n" + " gl_FragColor=vec4(r,g,b,1.0);\n" + "}\n" }; + +VideoRenderOpenGles20::VideoRenderOpenGles20(int32_t id) : + _id(id), + _textureWidth(-1), + _textureHeight(-1) { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d", + __FUNCTION__, (int) _id); + + const GLfloat vertices[20] = { + // X, Y, Z, U, V + -1, -1, 0, 0, 1, // Bottom Left + 1, -1, 0, 1, 1, //Bottom Right + 1, 1, 0, 1, 0, //Top Right + -1, 1, 0, 0, 0 }; //Top Left + + memcpy(_vertices, vertices, sizeof(_vertices)); +} + +VideoRenderOpenGles20::~VideoRenderOpenGles20() { +} + +int32_t VideoRenderOpenGles20::Setup(int32_t width, int32_t height) { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, + "%s: width %d, height %d", __FUNCTION__, (int) width, + (int) height); + + printGLString("Version", GL_VERSION); + printGLString("Vendor", GL_VENDOR); + printGLString("Renderer", GL_RENDERER); + printGLString("Extensions", GL_EXTENSIONS); + + int maxTextureImageUnits[2]; + int maxTextureSize[2]; + glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, maxTextureImageUnits); + glGetIntegerv(GL_MAX_TEXTURE_SIZE, maxTextureSize); + + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, + "%s: number of textures %d, size %d", __FUNCTION__, + (int) maxTextureImageUnits[0], (int) maxTextureSize[0]); + + _program = createProgram(g_vertextShader, g_fragmentShader); + if (!_program) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not create program", __FUNCTION__); + return -1; + } + + int positionHandle = glGetAttribLocation(_program, "aPosition"); + checkGlError("glGetAttribLocation aPosition"); + if (positionHandle == -1) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not get aPosition handle", __FUNCTION__); + return -1; + } + + int textureHandle = glGetAttribLocation(_program, "aTextureCoord"); + checkGlError("glGetAttribLocation aTextureCoord"); + if (textureHandle == -1) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not get aTextureCoord handle", __FUNCTION__); + return -1; + } + + // set the vertices array in the shader + // _vertices contains 4 vertices with 5 coordinates. + // 3 for (xyz) for the vertices and 2 for the texture + glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false, + 5 * sizeof(GLfloat), _vertices); + checkGlError("glVertexAttribPointer aPosition"); + + glEnableVertexAttribArray(positionHandle); + checkGlError("glEnableVertexAttribArray positionHandle"); + + // set the texture coordinate array in the shader + // _vertices contains 4 vertices with 5 coordinates. + // 3 for (xyz) for the vertices and 2 for the texture + glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5 + * sizeof(GLfloat), &_vertices[3]); + checkGlError("glVertexAttribPointer maTextureHandle"); + glEnableVertexAttribArray(textureHandle); + checkGlError("glEnableVertexAttribArray textureHandle"); + + glUseProgram(_program); + int i = glGetUniformLocation(_program, "Ytex"); + checkGlError("glGetUniformLocation"); + glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */ + checkGlError("glUniform1i Ytex"); + + i = glGetUniformLocation(_program, "Utex"); + checkGlError("glGetUniformLocation Utex"); + glUniform1i(i, 1); /* Bind Utex to texture unit 1 */ + checkGlError("glUniform1i Utex"); + + i = glGetUniformLocation(_program, "Vtex"); + checkGlError("glGetUniformLocation"); + glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */ + checkGlError("glUniform1i"); + + glViewport(0, 0, width, height); + checkGlError("glViewport"); + return 0; +} + +// SetCoordinates +// Sets the coordinates where the stream shall be rendered. +// Values must be between 0 and 1. +int32_t VideoRenderOpenGles20::SetCoordinates(int32_t zOrder, + const float left, + const float top, + const float right, + const float bottom) { + if ((top > 1 || top < 0) || (right > 1 || right < 0) || + (bottom > 1 || bottom < 0) || (left > 1 || left < 0)) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Wrong coordinates", __FUNCTION__); + return -1; + } + + // X, Y, Z, U, V + // -1, -1, 0, 0, 1, // Bottom Left + // 1, -1, 0, 1, 1, //Bottom Right + // 1, 1, 0, 1, 0, //Top Right + // -1, 1, 0, 0, 0 //Top Left + + // Bottom Left + _vertices[0] = (left * 2) - 1; + _vertices[1] = -1 * (2 * bottom) + 1; + _vertices[2] = zOrder; + + //Bottom Right + _vertices[5] = (right * 2) - 1; + _vertices[6] = -1 * (2 * bottom) + 1; + _vertices[7] = zOrder; + + //Top Right + _vertices[10] = (right * 2) - 1; + _vertices[11] = -1 * (2 * top) + 1; + _vertices[12] = zOrder; + + //Top Left + _vertices[15] = (left * 2) - 1; + _vertices[16] = -1 * (2 * top) + 1; + _vertices[17] = zOrder; + + return 0; +} + +int32_t VideoRenderOpenGles20::Render(const VideoFrame& frameToRender) { + if (frameToRender.IsZeroSize()) { + return -1; + } + + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d", + __FUNCTION__, (int) _id); + + glUseProgram(_program); + checkGlError("glUseProgram"); + + if (_textureWidth != (GLsizei) frameToRender.width() || + _textureHeight != (GLsizei) frameToRender.height()) { + SetupTextures(frameToRender); + } + UpdateTextures(frameToRender); + + glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, g_indices); + checkGlError("glDrawArrays"); + + return 0; +} + +GLuint VideoRenderOpenGles20::loadShader(GLenum shaderType, + const char* pSource) { + GLuint shader = glCreateShader(shaderType); + if (shader) { + glShaderSource(shader, 1, &pSource, NULL); + glCompileShader(shader); + GLint compiled = 0; + glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled); + if (!compiled) { + GLint infoLen = 0; + glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen); + if (infoLen) { + char* buf = (char*) malloc(infoLen); + if (buf) { + glGetShaderInfoLog(shader, infoLen, NULL, buf); + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not compile shader %d: %s", + __FUNCTION__, shaderType, buf); + free(buf); + } + glDeleteShader(shader); + shader = 0; + } + } + } + return shader; +} + +GLuint VideoRenderOpenGles20::createProgram(const char* pVertexSource, + const char* pFragmentSource) { + GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource); + if (!vertexShader) { + return 0; + } + + GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource); + if (!pixelShader) { + return 0; + } + + GLuint program = glCreateProgram(); + if (program) { + glAttachShader(program, vertexShader); + checkGlError("glAttachShader"); + glAttachShader(program, pixelShader); + checkGlError("glAttachShader"); + glLinkProgram(program); + GLint linkStatus = GL_FALSE; + glGetProgramiv(program, GL_LINK_STATUS, &linkStatus); + if (linkStatus != GL_TRUE) { + GLint bufLength = 0; + glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength); + if (bufLength) { + char* buf = (char*) malloc(bufLength); + if (buf) { + glGetProgramInfoLog(program, bufLength, NULL, buf); + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not link program: %s", + __FUNCTION__, buf); + free(buf); + } + } + glDeleteProgram(program); + program = 0; + } + } + return program; +} + +void VideoRenderOpenGles20::printGLString(const char *name, GLenum s) { + const char *v = (const char *) glGetString(s); + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "GL %s = %s\n", + name, v); +} + +void VideoRenderOpenGles20::checkGlError(const char* op) { +#ifdef ANDROID_LOG + for (GLint error = glGetError(); error; error = glGetError()) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "after %s() glError (0x%x)\n", op, error); + } +#else + return; +#endif +} + +static void InitializeTexture(int name, int id, int width, int height) { + glActiveTexture(name); + glBindTexture(GL_TEXTURE_2D, id); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0, + GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL); +} + +void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender) { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, + "%s: width %d, height %d", __FUNCTION__, + frameToRender.width(), frameToRender.height()); + + const GLsizei width = frameToRender.width(); + const GLsizei height = frameToRender.height(); + + glGenTextures(3, _textureIds); //Generate the Y, U and V texture + InitializeTexture(GL_TEXTURE0, _textureIds[0], width, height); + InitializeTexture(GL_TEXTURE1, _textureIds[1], width / 2, height / 2); + InitializeTexture(GL_TEXTURE2, _textureIds[2], width / 2, height / 2); + + checkGlError("SetupTextures"); + + _textureWidth = width; + _textureHeight = height; +} + +// Uploads a plane of pixel data, accounting for stride != width*bpp. +static void GlTexSubImage2D(GLsizei width, GLsizei height, int stride, + const uint8_t* plane) { + if (stride == width) { + // Yay! We can upload the entire plane in a single GL call. + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE, + GL_UNSIGNED_BYTE, + static_cast(plane)); + } else { + // Boo! Since GLES2 doesn't have GL_UNPACK_ROW_LENGTH and Android doesn't + // have GL_EXT_unpack_subimage we have to upload a row at a time. Ick. + for (int row = 0; row < height; ++row) { + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, row, width, 1, GL_LUMINANCE, + GL_UNSIGNED_BYTE, + static_cast(plane + (row * stride))); + } + } +} + +void VideoRenderOpenGles20::UpdateTextures(const VideoFrame& frameToRender) { + const GLsizei width = frameToRender.width(); + const GLsizei height = frameToRender.height(); + + glActiveTexture(GL_TEXTURE0); + glBindTexture(GL_TEXTURE_2D, _textureIds[0]); + GlTexSubImage2D(width, height, frameToRender.stride(kYPlane), + frameToRender.buffer(kYPlane)); + + glActiveTexture(GL_TEXTURE1); + glBindTexture(GL_TEXTURE_2D, _textureIds[1]); + GlTexSubImage2D(width / 2, height / 2, frameToRender.stride(kUPlane), + frameToRender.buffer(kUPlane)); + + glActiveTexture(GL_TEXTURE2); + glBindTexture(GL_TEXTURE_2D, _textureIds[2]); + GlTexSubImage2D(width / 2, height / 2, frameToRender.stride(kVPlane), + frameToRender.buffer(kVPlane)); + + checkGlError("UpdateTextures"); +} + +} // namespace webrtc diff --git a/webrtc/modules/video_render/android/video_render_opengles20.h b/webrtc/modules/video_render/android/video_render_opengles20.h new file mode 100644 index 0000000000..57e2a10d42 --- /dev/null +++ b/webrtc/modules/video_render/android/video_render_opengles20.h @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_ + +#include "webrtc/modules/video_render/video_render_defines.h" + +#include +#include + +namespace webrtc +{ + +class VideoRenderOpenGles20 { + public: + VideoRenderOpenGles20(int32_t id); + ~VideoRenderOpenGles20(); + + int32_t Setup(int32_t widht, int32_t height); + int32_t Render(const VideoFrame& frameToRender); + int32_t SetCoordinates(int32_t zOrder, const float left, const float top, + const float right, const float bottom); + + private: + void printGLString(const char *name, GLenum s); + void checkGlError(const char* op); + GLuint loadShader(GLenum shaderType, const char* pSource); + GLuint createProgram(const char* pVertexSource, + const char* pFragmentSource); + void SetupTextures(const VideoFrame& frameToRender); + void UpdateTextures(const VideoFrame& frameToRender); + + int32_t _id; + GLuint _textureIds[3]; // Texture id of Y,U and V texture. + GLuint _program; + GLsizei _textureWidth; + GLsizei _textureHeight; + + GLfloat _vertices[20]; + static const char g_indices[]; + + static const char g_vertextShader[]; + static const char g_fragmentShader[]; + +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_ diff --git a/webrtc/modules/video_render/external/video_render_external_impl.cc b/webrtc/modules/video_render/external/video_render_external_impl.cc new file mode 100644 index 0000000000..58df07875e --- /dev/null +++ b/webrtc/modules/video_render/external/video_render_external_impl.cc @@ -0,0 +1,195 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/video_render/external/video_render_external_impl.h" + +namespace webrtc { + +VideoRenderExternalImpl::VideoRenderExternalImpl( + const int32_t id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen) : + _critSect(*CriticalSectionWrapper::CreateCriticalSection()), + _fullscreen(fullscreen) +{ +} + +VideoRenderExternalImpl::~VideoRenderExternalImpl() +{ + delete &_critSect; +} + +int32_t VideoRenderExternalImpl::Init() +{ + return 0; +} + +int32_t VideoRenderExternalImpl::ChangeWindow(void* window) +{ + CriticalSectionScoped cs(&_critSect); + return 0; +} + +VideoRenderCallback* +VideoRenderExternalImpl::AddIncomingRenderStream(const uint32_t streamId, + const uint32_t zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_critSect); + return this; +} + +int32_t VideoRenderExternalImpl::DeleteIncomingRenderStream( + const uint32_t streamId) +{ + CriticalSectionScoped cs(&_critSect); + return 0; +} + +int32_t VideoRenderExternalImpl::GetIncomingRenderStreamProperties( + const uint32_t streamId, + uint32_t& zOrder, + float& left, + float& top, + float& right, + float& bottom) const +{ + CriticalSectionScoped cs(&_critSect); + + zOrder = 0; + left = 0; + top = 0; + right = 0; + bottom = 0; + + return 0; +} + +int32_t VideoRenderExternalImpl::StartRender() +{ + CriticalSectionScoped cs(&_critSect); + return 0; +} + +int32_t VideoRenderExternalImpl::StopRender() +{ + CriticalSectionScoped cs(&_critSect); + return 0; +} + +VideoRenderType VideoRenderExternalImpl::RenderType() +{ + return kRenderExternal; +} + +RawVideoType VideoRenderExternalImpl::PerferedVideoType() +{ + return kVideoI420; +} + +bool VideoRenderExternalImpl::FullScreen() +{ + CriticalSectionScoped cs(&_critSect); + return _fullscreen; +} + +int32_t VideoRenderExternalImpl::GetGraphicsMemory( + uint64_t& totalGraphicsMemory, + uint64_t& availableGraphicsMemory) const +{ + totalGraphicsMemory = 0; + availableGraphicsMemory = 0; + return -1; +} + +int32_t VideoRenderExternalImpl::GetScreenResolution( + uint32_t& screenWidth, + uint32_t& screenHeight) const +{ + CriticalSectionScoped cs(&_critSect); + screenWidth = 0; + screenHeight = 0; + return 0; +} + +uint32_t VideoRenderExternalImpl::RenderFrameRate( + const uint32_t streamId) +{ + CriticalSectionScoped cs(&_critSect); + return 0; +} + +int32_t VideoRenderExternalImpl::SetStreamCropping( + const uint32_t streamId, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_critSect); + return 0; +} + +int32_t VideoRenderExternalImpl::ConfigureRenderer( + const uint32_t streamId, + const unsigned int zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_critSect); + return 0; +} + +int32_t VideoRenderExternalImpl::SetTransparentBackground( + const bool enable) +{ + CriticalSectionScoped cs(&_critSect); + return 0; +} + +int32_t VideoRenderExternalImpl::SetText( + const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_critSect); + return 0; +} + +int32_t VideoRenderExternalImpl::SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_critSect); + return 0; +} + +// VideoRenderCallback +int32_t VideoRenderExternalImpl::RenderFrame(const uint32_t streamId, + const VideoFrame& videoFrame) { + return 0; +} +} // namespace webrtc diff --git a/webrtc/modules/video_render/external/video_render_external_impl.h b/webrtc/modules/video_render/external/video_render_external_impl.h new file mode 100644 index 0000000000..a8b663fff7 --- /dev/null +++ b/webrtc/modules/video_render/external/video_render_external_impl.h @@ -0,0 +1,128 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_ + +#include "webrtc/modules/include/module_common_types.h" +#include "webrtc/modules/video_render/i_video_render.h" +#include "webrtc/system_wrappers/include/critical_section_wrapper.h" + +namespace webrtc { + +// Class definitions +class VideoRenderExternalImpl: IVideoRender, public VideoRenderCallback +{ +public: + /* + * Constructor/destructor + */ + + VideoRenderExternalImpl(const int32_t id, + const VideoRenderType videoRenderType, + void* window, const bool fullscreen); + + virtual ~VideoRenderExternalImpl(); + + virtual int32_t Init(); + + virtual int32_t ChangeWindow(void* window); + + /************************************************************************** + * + * Incoming Streams + * + ***************************************************************************/ + + virtual VideoRenderCallback + * AddIncomingRenderStream(const uint32_t streamId, + const uint32_t zOrder, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t + DeleteIncomingRenderStream(const uint32_t streamId); + + virtual int32_t + GetIncomingRenderStreamProperties(const uint32_t streamId, + uint32_t& zOrder, + float& left, float& top, + float& right, float& bottom) const; + + /************************************************************************** + * + * Start/Stop + * + ***************************************************************************/ + + virtual int32_t StartRender(); + + virtual int32_t StopRender(); + + /************************************************************************** + * + * Properties + * + ***************************************************************************/ + + virtual VideoRenderType RenderType(); + + virtual RawVideoType PerferedVideoType(); + + virtual bool FullScreen(); + + virtual int32_t + GetGraphicsMemory(uint64_t& totalGraphicsMemory, + uint64_t& availableGraphicsMemory) const; + + virtual int32_t + GetScreenResolution(uint32_t& screenWidth, + uint32_t& screenHeight) const; + + virtual uint32_t RenderFrameRate(const uint32_t streamId); + + virtual int32_t SetStreamCropping(const uint32_t streamId, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t ConfigureRenderer(const uint32_t streamId, + const unsigned int zOrder, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t SetTransparentBackground(const bool enable); + + virtual int32_t SetText(const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, const float left, + const float top, const float right, + const float bottom); + + // VideoRenderCallback + virtual int32_t RenderFrame(const uint32_t streamId, + const VideoFrame& videoFrame); + +private: + CriticalSectionWrapper& _critSect; + bool _fullscreen; +}; + +} // namespace webrtc + + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_ diff --git a/webrtc/modules/video_render/i_video_render.h b/webrtc/modules/video_render/i_video_render.h new file mode 100644 index 0000000000..e6ec7a4680 --- /dev/null +++ b/webrtc/modules/video_render/i_video_render.h @@ -0,0 +1,129 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_ + +#include "webrtc/modules/video_render/video_render.h" + +namespace webrtc { + +// Class definitions +class IVideoRender +{ +public: + /* + * Constructor/destructor + */ + + virtual ~IVideoRender() {} + + virtual int32_t Init() = 0; + + virtual int32_t ChangeWindow(void* window) = 0; + + /************************************************************************** + * + * Incoming Streams + * + ***************************************************************************/ + + virtual VideoRenderCallback + * AddIncomingRenderStream(const uint32_t streamId, + const uint32_t zOrder, + const float left, + const float top, + const float right, + const float bottom) = 0; + + virtual int32_t + DeleteIncomingRenderStream(const uint32_t streamId) = 0; + + virtual int32_t + GetIncomingRenderStreamProperties(const uint32_t streamId, + uint32_t& zOrder, + float& left, + float& top, + float& right, + float& bottom) const = 0; + // Implemented in common code? + //virtual uint32_t GetNumIncomingRenderStreams() const = 0; + //virtual bool HasIncomingRenderStream(const uint16_t stramId) const = 0; + + + /************************************************************************** + * + * Start/Stop + * + ***************************************************************************/ + + virtual int32_t StartRender() = 0; + + virtual int32_t StopRender() = 0; + + /************************************************************************** + * + * Properties + * + ***************************************************************************/ + virtual VideoRenderType RenderType() = 0; + + virtual RawVideoType PerferedVideoType() = 0; + + virtual bool FullScreen() = 0; + + // TODO: This should be treated in platform specific code only + virtual int32_t + GetGraphicsMemory(uint64_t& totalGraphicsMemory, + uint64_t& availableGraphicsMemory) const = 0; + + virtual int32_t + GetScreenResolution(uint32_t& screenWidth, + uint32_t& screenHeight) const = 0; + + virtual uint32_t RenderFrameRate(const uint32_t streamId) = 0; + + virtual int32_t SetStreamCropping(const uint32_t streamId, + const float left, + const float top, + const float right, + const float bottom) = 0; + + virtual int32_t ConfigureRenderer(const uint32_t streamId, + const unsigned int zOrder, + const float left, + const float top, + const float right, + const float bottom) = 0; + + virtual int32_t SetTransparentBackground(const bool enable) = 0; + + virtual int32_t SetText(const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, + const float top, + const float rigth, + const float bottom) = 0; + + virtual int32_t SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, + const float top, + const float right, + const float bottom) = 0; + +}; +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_ diff --git a/webrtc/modules/video_render/ios/open_gles20.h b/webrtc/modules/video_render/ios/open_gles20.h new file mode 100644 index 0000000000..880ddb5231 --- /dev/null +++ b/webrtc/modules/video_render/ios/open_gles20.h @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_ + +#include + +#include "webrtc/modules/video_render/video_render_defines.h" + +/* + * This OpenGles20 is the class of renderer for VideoFrame into a GLES 2.0 + * windows used in the VideoRenderIosView class. + */ +namespace webrtc { +class OpenGles20 { + public: + OpenGles20(); + ~OpenGles20(); + + bool Setup(int32_t width, int32_t height); + bool Render(const VideoFrame& frame); + + // SetCoordinates + // Sets the coordinates where the stream shall be rendered. + // Values must be between 0 and 1. + bool SetCoordinates(const float z_order, + const float left, + const float top, + const float right, + const float bottom); + + private: + // Compile and load the vertex and fragment shaders defined at the top of + // open_gles20.mm + GLuint LoadShader(GLenum shader_type, const char* shader_source); + + GLuint CreateProgram(const char* vertex_source, const char* fragment_source); + + // Initialize the textures by the frame width and height + void SetupTextures(const VideoFrame& frame); + + // Update the textures by the YUV data from the frame + void UpdateTextures(const VideoFrame& frame); + + GLuint texture_ids_[3]; // Texture id of Y,U and V texture. + GLuint program_; + GLsizei texture_width_; + GLsizei texture_height_; + + GLfloat vertices_[20]; + static const char indices_[]; + static const char vertext_shader_[]; + static const char fragment_shader_[]; +}; +} // namespace webrtc +#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_ diff --git a/webrtc/modules/video_render/ios/open_gles20.mm b/webrtc/modules/video_render/ios/open_gles20.mm new file mode 100644 index 0000000000..d1735280f2 --- /dev/null +++ b/webrtc/modules/video_render/ios/open_gles20.mm @@ -0,0 +1,330 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#if !defined(__has_feature) || !__has_feature(objc_arc) +#error "This file requires ARC support." +#endif + +// This files is mostly copied from +// webrtc/modules/video_render/android/video_render_opengles20.h + +// TODO(sjlee): unify this copy with the android one. +#include "webrtc/modules/video_render/ios/open_gles20.h" +#include "webrtc/system_wrappers/include/trace.h" + +using namespace webrtc; + +const char OpenGles20::indices_[] = {0, 3, 2, 0, 2, 1}; + +const char OpenGles20::vertext_shader_[] = { + "attribute vec4 aPosition;\n" + "attribute vec2 aTextureCoord;\n" + "varying vec2 vTextureCoord;\n" + "void main() {\n" + " gl_Position = aPosition;\n" + " vTextureCoord = aTextureCoord;\n" + "}\n"}; + +// The fragment shader. +// Do YUV to RGB565 conversion. +const char OpenGles20::fragment_shader_[] = { + "precision mediump float;\n" + "uniform sampler2D Ytex;\n" + "uniform sampler2D Utex,Vtex;\n" + "varying vec2 vTextureCoord;\n" + "void main(void) {\n" + " float nx,ny,r,g,b,y,u,v;\n" + " mediump vec4 txl,ux,vx;" + " nx=vTextureCoord[0];\n" + " ny=vTextureCoord[1];\n" + " y=texture2D(Ytex,vec2(nx,ny)).r;\n" + " u=texture2D(Utex,vec2(nx,ny)).r;\n" + " v=texture2D(Vtex,vec2(nx,ny)).r;\n" + " y=1.1643*(y-0.0625);\n" + " u=u-0.5;\n" + " v=v-0.5;\n" + " r=y+1.5958*v;\n" + " g=y-0.39173*u-0.81290*v;\n" + " b=y+2.017*u;\n" + " gl_FragColor=vec4(r,g,b,1.0);\n" + "}\n"}; + +OpenGles20::OpenGles20() : texture_width_(-1), texture_height_(-1) { + texture_ids_[0] = 0; + texture_ids_[1] = 0; + texture_ids_[2] = 0; + + program_ = 0; + + const GLfloat vertices[20] = { + // X, Y, Z, U, V + -1, -1, 0, 0, 1, // Bottom Left + 1, -1, 0, 1, 1, // Bottom Right + 1, 1, 0, 1, 0, // Top Right + -1, 1, 0, 0, 0}; // Top Left + + memcpy(vertices_, vertices, sizeof(vertices_)); +} + +OpenGles20::~OpenGles20() { + if (program_) { + glDeleteTextures(3, texture_ids_); + glDeleteProgram(program_); + } +} + +bool OpenGles20::Setup(int32_t width, int32_t height) { + program_ = CreateProgram(vertext_shader_, fragment_shader_); + if (!program_) { + return false; + } + + int position_handle = glGetAttribLocation(program_, "aPosition"); + int texture_handle = glGetAttribLocation(program_, "aTextureCoord"); + + // set the vertices array in the shader + // vertices_ contains 4 vertices with 5 coordinates. + // 3 for (xyz) for the vertices and 2 for the texture + glVertexAttribPointer( + position_handle, 3, GL_FLOAT, false, 5 * sizeof(GLfloat), vertices_); + + glEnableVertexAttribArray(position_handle); + + // set the texture coordinate array in the shader + // vertices_ contains 4 vertices with 5 coordinates. + // 3 for (xyz) for the vertices and 2 for the texture + glVertexAttribPointer( + texture_handle, 2, GL_FLOAT, false, 5 * sizeof(GLfloat), &vertices_[3]); + glEnableVertexAttribArray(texture_handle); + + glUseProgram(program_); + int i = glGetUniformLocation(program_, "Ytex"); + glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */ + + i = glGetUniformLocation(program_, "Utex"); + glUniform1i(i, 1); /* Bind Utex to texture unit 1 */ + + i = glGetUniformLocation(program_, "Vtex"); + glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */ + + glViewport(0, 0, width, height); + return true; +} + +bool OpenGles20::SetCoordinates(const float z_order, + const float left, + const float top, + const float right, + const float bottom) { + if (top > 1 || top < 0 || right > 1 || right < 0 || bottom > 1 || + bottom < 0 || left > 1 || left < 0) { + return false; + } + + // Bottom Left + vertices_[0] = (left * 2) - 1; + vertices_[1] = -1 * (2 * bottom) + 1; + vertices_[2] = z_order; + + // Bottom Right + vertices_[5] = (right * 2) - 1; + vertices_[6] = -1 * (2 * bottom) + 1; + vertices_[7] = z_order; + + // Top Right + vertices_[10] = (right * 2) - 1; + vertices_[11] = -1 * (2 * top) + 1; + vertices_[12] = z_order; + + // Top Left + vertices_[15] = (left * 2) - 1; + vertices_[16] = -1 * (2 * top) + 1; + vertices_[17] = z_order; + + return true; +} + +bool OpenGles20::Render(const VideoFrame& frame) { + if (texture_width_ != (GLsizei)frame.width() || + texture_height_ != (GLsizei)frame.height()) { + SetupTextures(frame); + } + UpdateTextures(frame); + + glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, indices_); + + return true; +} + +GLuint OpenGles20::LoadShader(GLenum shader_type, const char* shader_source) { + GLuint shader = glCreateShader(shader_type); + if (shader) { + glShaderSource(shader, 1, &shader_source, NULL); + glCompileShader(shader); + + GLint compiled = 0; + glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled); + if (!compiled) { + GLint info_len = 0; + glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &info_len); + if (info_len) { + char* buf = (char*)malloc(info_len); + glGetShaderInfoLog(shader, info_len, NULL, buf); + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + 0, + "%s: Could not compile shader %d: %s", + __FUNCTION__, + shader_type, + buf); + free(buf); + } + glDeleteShader(shader); + shader = 0; + } + } + return shader; +} + +GLuint OpenGles20::CreateProgram(const char* vertex_source, + const char* fragment_source) { + GLuint vertex_shader = LoadShader(GL_VERTEX_SHADER, vertex_source); + if (!vertex_shader) { + return -1; + } + + GLuint fragment_shader = LoadShader(GL_FRAGMENT_SHADER, fragment_source); + if (!fragment_shader) { + return -1; + } + + GLuint program = glCreateProgram(); + if (program) { + glAttachShader(program, vertex_shader); + glAttachShader(program, fragment_shader); + glLinkProgram(program); + GLint link_status = GL_FALSE; + glGetProgramiv(program, GL_LINK_STATUS, &link_status); + if (link_status != GL_TRUE) { + GLint info_len = 0; + glGetProgramiv(program, GL_INFO_LOG_LENGTH, &info_len); + if (info_len) { + char* buf = (char*)malloc(info_len); + glGetProgramInfoLog(program, info_len, NULL, buf); + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + 0, + "%s: Could not link program: %s", + __FUNCTION__, + buf); + free(buf); + } + glDeleteProgram(program); + program = 0; + } + } + + if (vertex_shader) { + glDeleteShader(vertex_shader); + } + + if (fragment_shader) { + glDeleteShader(fragment_shader); + } + + return program; +} + +static void InitializeTexture(int name, int id, int width, int height) { + glActiveTexture(name); + glBindTexture(GL_TEXTURE_2D, id); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glPixelStorei(GL_UNPACK_ALIGNMENT, 1); + glTexImage2D(GL_TEXTURE_2D, + 0, + GL_LUMINANCE, + width, + height, + 0, + GL_LUMINANCE, + GL_UNSIGNED_BYTE, + NULL); +} + +void OpenGles20::SetupTextures(const VideoFrame& frame) { + const GLsizei width = frame.width(); + const GLsizei height = frame.height(); + + if (!texture_ids_[0]) { + glGenTextures(3, texture_ids_); // Generate the Y, U and V texture + } + + InitializeTexture(GL_TEXTURE0, texture_ids_[0], width, height); + InitializeTexture(GL_TEXTURE1, texture_ids_[1], width / 2, height / 2); + InitializeTexture(GL_TEXTURE2, texture_ids_[2], width / 2, height / 2); + + texture_width_ = width; + texture_height_ = height; +} + +// Uploads a plane of pixel data, accounting for stride != width*bpp. +static void GlTexSubImage2D(GLsizei width, + GLsizei height, + int stride, + const uint8_t* plane) { + if (stride == width) { + // Yay! We can upload the entire plane in a single GL call. + glTexSubImage2D(GL_TEXTURE_2D, + 0, + 0, + 0, + width, + height, + GL_LUMINANCE, + GL_UNSIGNED_BYTE, + static_cast(plane)); + } else { + // Boo! Since GLES2 doesn't have GL_UNPACK_ROW_LENGTH and iOS doesn't + // have GL_EXT_unpack_subimage we have to upload a row at a time. Ick. + for (int row = 0; row < height; ++row) { + glTexSubImage2D(GL_TEXTURE_2D, + 0, + 0, + row, + width, + 1, + GL_LUMINANCE, + GL_UNSIGNED_BYTE, + static_cast(plane + (row * stride))); + } + } +} + +void OpenGles20::UpdateTextures(const VideoFrame& frame) { + const GLsizei width = frame.width(); + const GLsizei height = frame.height(); + + glActiveTexture(GL_TEXTURE0); + glBindTexture(GL_TEXTURE_2D, texture_ids_[0]); + GlTexSubImage2D(width, height, frame.stride(kYPlane), frame.buffer(kYPlane)); + + glActiveTexture(GL_TEXTURE1); + glBindTexture(GL_TEXTURE_2D, texture_ids_[1]); + GlTexSubImage2D( + width / 2, height / 2, frame.stride(kUPlane), frame.buffer(kUPlane)); + + glActiveTexture(GL_TEXTURE2); + glBindTexture(GL_TEXTURE_2D, texture_ids_[2]); + GlTexSubImage2D( + width / 2, height / 2, frame.stride(kVPlane), frame.buffer(kVPlane)); +} diff --git a/webrtc/modules/video_render/ios/video_render_ios_channel.h b/webrtc/modules/video_render/ios/video_render_ios_channel.h new file mode 100644 index 0000000000..a15ba393dc --- /dev/null +++ b/webrtc/modules/video_render/ios/video_render_ios_channel.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_ + +#include "webrtc/modules/video_render/video_render_defines.h" +#include "webrtc/modules/video_render/ios/video_render_ios_view.h" + +namespace webrtc { + +class VideoRenderIosGles20; + +class VideoRenderIosChannel : public VideoRenderCallback { + public: + explicit VideoRenderIosChannel(VideoRenderIosView* view); + virtual ~VideoRenderIosChannel(); + + // Implementation of VideoRenderCallback. + int32_t RenderFrame(const uint32_t stream_id, + const VideoFrame& video_frame) override; + + int SetStreamSettings(const float z_order, + const float left, + const float top, + const float right, + const float bottom); + bool IsUpdated(); + bool RenderOffScreenBuffer(); + + private: + VideoRenderIosView* view_; + VideoFrame* current_frame_; + bool buffer_is_updated_; +}; + +} // namespace webrtc +#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_ diff --git a/webrtc/modules/video_render/ios/video_render_ios_channel.mm b/webrtc/modules/video_render/ios/video_render_ios_channel.mm new file mode 100644 index 0000000000..b2b15857f9 --- /dev/null +++ b/webrtc/modules/video_render/ios/video_render_ios_channel.mm @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#if !defined(__has_feature) || !__has_feature(objc_arc) +#error "This file requires ARC support." +#endif + +#include "webrtc/modules/video_render/ios/video_render_ios_channel.h" + +using namespace webrtc; + +VideoRenderIosChannel::VideoRenderIosChannel(VideoRenderIosView* view) + : view_(view), current_frame_(new VideoFrame()), buffer_is_updated_(false) { +} + +VideoRenderIosChannel::~VideoRenderIosChannel() { delete current_frame_; } + +int32_t VideoRenderIosChannel::RenderFrame(const uint32_t stream_id, + const VideoFrame& video_frame) { + current_frame_->CopyFrame(video_frame); + current_frame_->set_render_time_ms(0); + buffer_is_updated_ = true; + + return 0; +} + +bool VideoRenderIosChannel::RenderOffScreenBuffer() { + if (![view_ renderFrame:current_frame_]) { + return false; + } + + buffer_is_updated_ = false; + + return true; +} + +bool VideoRenderIosChannel::IsUpdated() { return buffer_is_updated_; } + +int VideoRenderIosChannel::SetStreamSettings(const float z_order, + const float left, + const float top, + const float right, + const float bottom) { + if (![view_ setCoordinatesForZOrder:z_order + Left:left + Top:bottom + Right:right + Bottom:top]) { + + return -1; + } + + return 0; +} diff --git a/webrtc/modules/video_render/ios/video_render_ios_gles20.h b/webrtc/modules/video_render/ios/video_render_ios_gles20.h new file mode 100644 index 0000000000..d4e04e79d7 --- /dev/null +++ b/webrtc/modules/video_render/ios/video_render_ios_gles20.h @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_ + +#include +#include +#include + +#include "webrtc/base/platform_thread.h" +#include "webrtc/modules/video_render/ios/video_render_ios_channel.h" +#include "webrtc/modules/video_render/ios/video_render_ios_view.h" + +namespace webrtc { + +class CriticalSectionWrapper; +class EventTimerWrapper; + +class VideoRenderIosGles20 { + public: + VideoRenderIosGles20(VideoRenderIosView* view, + bool full_screen, + int render_id); + virtual ~VideoRenderIosGles20(); + + int Init(); + VideoRenderIosChannel* CreateEaglChannel(int channel, + int z_order, + float left, + float top, + float right, + float bottom); + int DeleteEaglChannel(int channel); + bool HasChannel(int channel); + bool ScreenUpdateProcess(); + int GetWindowRect(Rect& rect); // NOLINT + + int GetScreenResolution(uint& screen_width, uint& screen_height); // NOLINT + int SetStreamCropping(const uint stream_id, + const float left, + const float top, + const float right, + const float bottom); + + int ChangeWindow(void* new_window); + int StartRender(); + int StopRender(); + + protected: + static bool ScreenUpdateThreadProc(void* obj); + + private: + bool RenderOffScreenBuffers(); + int SwapAndDisplayBuffers(); + + private: + std::unique_ptr gles_crit_sec_; + EventTimerWrapper* screen_update_event_; + // TODO(pbos): Remove unique_ptr and use member directly. + std::unique_ptr screen_update_thread_; + + VideoRenderIosView* view_; + Rect window_rect_; + int window_width_; + int window_height_; + bool is_full_screen_; + GLint backing_width_; + GLint backing_height_; + GLuint view_renderbuffer_; + GLuint view_framebuffer_; + GLuint depth_renderbuffer_; + std::map agl_channels_; + std::multimap z_order_to_channel_; + EAGLContext* gles_context_; + bool is_rendering_; +}; +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_ diff --git a/webrtc/modules/video_render/ios/video_render_ios_gles20.mm b/webrtc/modules/video_render/ios/video_render_ios_gles20.mm new file mode 100644 index 0000000000..6ad5db8b8c --- /dev/null +++ b/webrtc/modules/video_render/ios/video_render_ios_gles20.mm @@ -0,0 +1,285 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#if !defined(__has_feature) || !__has_feature(objc_arc) +#error "This file requires ARC support." +#endif + +#include "webrtc/modules/video_render/ios/video_render_ios_gles20.h" +#include "webrtc/system_wrappers/include/critical_section_wrapper.h" +#include "webrtc/system_wrappers/include/event_wrapper.h" + +using namespace webrtc; + +VideoRenderIosGles20::VideoRenderIosGles20(VideoRenderIosView* view, + bool full_screen, + int render_id) + : gles_crit_sec_(CriticalSectionWrapper::CreateCriticalSection()), + screen_update_event_(0), + view_(view), + window_rect_(), + window_width_(0), + window_height_(0), + is_full_screen_(full_screen), + agl_channels_(), + z_order_to_channel_(), + gles_context_([view context]), + is_rendering_(true) { + screen_update_thread_.reset(new rtc::PlatformThread( + ScreenUpdateThreadProc, this, "ScreenUpdateGles20")); + screen_update_event_ = EventTimerWrapper::Create(); + GetWindowRect(window_rect_); +} + +VideoRenderIosGles20::~VideoRenderIosGles20() { + // Signal event to exit thread, then delete it + rtc::PlatformThread* thread_wrapper = screen_update_thread_.release(); + + if (thread_wrapper) { + screen_update_event_->Set(); + screen_update_event_->StopTimer(); + + thread_wrapper->Stop(); + delete thread_wrapper; + delete screen_update_event_; + screen_update_event_ = NULL; + is_rendering_ = FALSE; + } + + // Delete all channels + std::map::iterator it = agl_channels_.begin(); + while (it != agl_channels_.end()) { + delete it->second; + agl_channels_.erase(it); + it = agl_channels_.begin(); + } + agl_channels_.clear(); + + // Clean the zOrder map + std::multimap::iterator z_it = z_order_to_channel_.begin(); + while (z_it != z_order_to_channel_.end()) { + z_order_to_channel_.erase(z_it); + z_it = z_order_to_channel_.begin(); + } + z_order_to_channel_.clear(); +} + +int VideoRenderIosGles20::Init() { + CriticalSectionScoped cs(gles_crit_sec_.get()); + + if (!view_) { + view_ = [[VideoRenderIosView alloc] init]; + } + + if (![view_ createContext]) { + return -1; + } + + screen_update_thread_->Start(); + screen_update_thread_->SetPriority(rtc::kRealtimePriority); + + // Start the event triggering the render process + unsigned int monitor_freq = 60; + screen_update_event_->StartTimer(true, 1000 / monitor_freq); + + window_width_ = window_rect_.right - window_rect_.left; + window_height_ = window_rect_.bottom - window_rect_.top; + + return 0; +} + +VideoRenderIosChannel* VideoRenderIosGles20::CreateEaglChannel(int channel, + int z_order, + float left, + float top, + float right, + float bottom) { + CriticalSectionScoped cs(gles_crit_sec_.get()); + + if (HasChannel(channel)) { + return NULL; + } + + VideoRenderIosChannel* new_eagl_channel = new VideoRenderIosChannel(view_); + + if (new_eagl_channel->SetStreamSettings(z_order, left, top, right, bottom) == + -1) { + return NULL; + } + + agl_channels_[channel] = new_eagl_channel; + z_order_to_channel_.insert(std::pair(z_order, channel)); + + return new_eagl_channel; +} + +int VideoRenderIosGles20::DeleteEaglChannel(int channel) { + CriticalSectionScoped cs(gles_crit_sec_.get()); + + std::map::iterator it; + it = agl_channels_.find(channel); + if (it != agl_channels_.end()) { + delete it->second; + agl_channels_.erase(it); + } else { + return -1; + } + + std::multimap::iterator z_it = z_order_to_channel_.begin(); + while (z_it != z_order_to_channel_.end()) { + if (z_it->second == channel) { + z_order_to_channel_.erase(z_it); + break; + } + z_it++; + } + + return 0; +} + +bool VideoRenderIosGles20::HasChannel(int channel) { + CriticalSectionScoped cs(gles_crit_sec_.get()); + + std::map::iterator it = + agl_channels_.find(channel); + + if (it != agl_channels_.end()) { + return true; + } + + return false; +} + +// Rendering process +bool VideoRenderIosGles20::ScreenUpdateThreadProc(void* obj) { + return static_cast(obj)->ScreenUpdateProcess(); +} + +bool VideoRenderIosGles20::ScreenUpdateProcess() { + screen_update_event_->Wait(100); + + CriticalSectionScoped cs(gles_crit_sec_.get()); + + if (!is_rendering_) { + return false; + } + + if (!screen_update_thread_) { + return false; + } + + if (GetWindowRect(window_rect_) == -1) { + return true; + } + + if (window_width_ != (window_rect_.right - window_rect_.left) || + window_height_ != (window_rect_.bottom - window_rect_.top)) { + window_width_ = window_rect_.right - window_rect_.left; + window_height_ = window_rect_.bottom - window_rect_.top; + } + + // Check if there are any updated buffers + bool updated = false; + + std::map::iterator it = agl_channels_.begin(); + while (it != agl_channels_.end()) { + VideoRenderIosChannel* agl_channel = it->second; + + updated = agl_channel->IsUpdated(); + if (updated) { + break; + } + it++; + } + + if (updated) { + // At least one buffer has been updated, we need to repaint the texture + // Loop through all channels starting highest zOrder ending with lowest. + for (std::multimap::reverse_iterator r_it = + z_order_to_channel_.rbegin(); + r_it != z_order_to_channel_.rend(); + r_it++) { + int channel_id = r_it->second; + std::map::iterator it = + agl_channels_.find(channel_id); + + VideoRenderIosChannel* agl_channel = it->second; + + agl_channel->RenderOffScreenBuffer(); + } + + [view_ presentFramebuffer]; + } + + return true; +} + +int VideoRenderIosGles20::GetWindowRect(Rect& rect) { + CriticalSectionScoped cs(gles_crit_sec_.get()); + + if (!view_) { + return -1; + } + + CGRect bounds = [view_ bounds]; + rect.top = bounds.origin.y; + rect.left = bounds.origin.x; + rect.bottom = bounds.size.height + bounds.origin.y; + rect.right = bounds.size.width + bounds.origin.x; + + return 0; +} + +int VideoRenderIosGles20::ChangeWindow(void* new_window) { + CriticalSectionScoped cs(gles_crit_sec_.get()); + + view_ = (__bridge VideoRenderIosView*)new_window; + + return 0; +} + +int VideoRenderIosGles20::StartRender() { + is_rendering_ = true; + return 0; +} + +int VideoRenderIosGles20::StopRender() { + is_rendering_ = false; + return 0; +} + +int VideoRenderIosGles20::GetScreenResolution(uint& screen_width, + uint& screen_height) { + screen_width = [view_ bounds].size.width; + screen_height = [view_ bounds].size.height; + return 0; +} + +int VideoRenderIosGles20::SetStreamCropping(const uint stream_id, + const float left, + const float top, + const float right, + const float bottom) { + // Check if there are any updated buffers + // bool updated = false; + uint counter = 0; + + std::map::iterator it = agl_channels_.begin(); + while (it != agl_channels_.end()) { + if (counter == stream_id) { + VideoRenderIosChannel* agl_channel = it->second; + agl_channel->SetStreamSettings(0, left, top, right, bottom); + } + counter++; + it++; + } + + return 0; +} diff --git a/webrtc/modules/video_render/ios/video_render_ios_impl.h b/webrtc/modules/video_render/ios/video_render_ios_impl.h new file mode 100644 index 0000000000..04a7493300 --- /dev/null +++ b/webrtc/modules/video_render/ios/video_render_ios_impl.h @@ -0,0 +1,105 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_ + +#include +#include +#include + +#include "webrtc/modules/video_render/i_video_render.h" + +namespace webrtc { + +class VideoRenderIosGles20; +class CriticalSectionWrapper; + +class VideoRenderIosImpl : IVideoRender { + public: + explicit VideoRenderIosImpl(const int32_t id, + void* window, + const bool full_screen); + + ~VideoRenderIosImpl(); + + // Implementation of IVideoRender. + int32_t Init() override; + int32_t ChangeWindow(void* window) override; + + VideoRenderCallback* AddIncomingRenderStream(const uint32_t stream_id, + const uint32_t z_order, + const float left, + const float top, + const float right, + const float bottom) override; + + int32_t DeleteIncomingRenderStream(const uint32_t stream_id) override; + + int32_t GetIncomingRenderStreamProperties(const uint32_t stream_id, + uint32_t& z_order, + float& left, + float& top, + float& right, + float& bottom) const override; + + int32_t StartRender() override; + int32_t StopRender() override; + + VideoRenderType RenderType() override; + RawVideoType PerferedVideoType() override; + bool FullScreen() override; + int32_t GetGraphicsMemory( + uint64_t& total_graphics_memory, + uint64_t& available_graphics_memory) const override; // NOLINT + int32_t GetScreenResolution( + uint32_t& screen_width, + uint32_t& screen_height) const override; // NOLINT + uint32_t RenderFrameRate(const uint32_t stream_id); + int32_t SetStreamCropping(const uint32_t stream_id, + const float left, + const float top, + const float right, + const float bottom) override; + int32_t ConfigureRenderer(const uint32_t stream_id, + const unsigned int z_order, + const float left, + const float top, + const float right, + const float bottom) override; + int32_t SetTransparentBackground(const bool enable) override; + int32_t SetText(const uint8_t text_id, + const uint8_t* text, + const int32_t text_length, + const uint32_t text_color_ref, + const uint32_t background_color_ref, + const float left, + const float top, + const float right, + const float bottom) override; + int32_t SetBitmap(const void* bit_map, + const uint8_t picture_id, + const void* color_key, + const float left, + const float top, + const float right, + const float bottom); + int32_t FullScreenRender(void* window, const bool enable); + + private: + int32_t id_; + void* ptr_window_; + bool full_screen_; + + CriticalSectionWrapper* crit_sec_; + std::unique_ptr ptr_ios_render_; +}; +} // namespace webrtc +#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_ diff --git a/webrtc/modules/video_render/ios/video_render_ios_impl.mm b/webrtc/modules/video_render/ios/video_render_ios_impl.mm new file mode 100644 index 0000000000..0ef411d56f --- /dev/null +++ b/webrtc/modules/video_render/ios/video_render_ios_impl.mm @@ -0,0 +1,170 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#if !defined(__has_feature) || !__has_feature(objc_arc) +#error "This file requires ARC support." +#endif + +#include "webrtc/modules/video_render/ios/video_render_ios_impl.h" +#include "webrtc/modules/video_render/ios/video_render_ios_gles20.h" +#include "webrtc/system_wrappers/include/critical_section_wrapper.h" +#include "webrtc/system_wrappers/include/trace.h" + +using namespace webrtc; + +#define IOS_UNSUPPORTED() \ + WEBRTC_TRACE(kTraceError, \ + kTraceVideoRenderer, \ + id_, \ + "%s is not supported on the iOS platform.", \ + __FUNCTION__); \ + return -1; + +VideoRenderIosImpl::VideoRenderIosImpl(const int32_t id, + void* window, + const bool full_screen) + : id_(id), + ptr_window_(window), + full_screen_(full_screen), + crit_sec_(CriticalSectionWrapper::CreateCriticalSection()) {} + +VideoRenderIosImpl::~VideoRenderIosImpl() { + delete crit_sec_; +} + +int32_t VideoRenderIosImpl::Init() { + CriticalSectionScoped cs(crit_sec_); + + ptr_ios_render_.reset(new VideoRenderIosGles20( + (__bridge VideoRenderIosView*)ptr_window_, full_screen_, id_)); + + return ptr_ios_render_->Init(); + ; +} + +int32_t VideoRenderIosImpl::ChangeWindow(void* window) { + CriticalSectionScoped cs(crit_sec_); + if (window == NULL) { + return -1; + } + + ptr_window_ = window; + + return ptr_ios_render_->ChangeWindow(ptr_window_); +} + +VideoRenderCallback* VideoRenderIosImpl::AddIncomingRenderStream( + const uint32_t stream_id, + const uint32_t z_order, + const float left, + const float top, + const float right, + const float bottom) { + CriticalSectionScoped cs(crit_sec_); + if (!ptr_window_) { + return NULL; + } + + return ptr_ios_render_->CreateEaglChannel( + stream_id, z_order, left, top, right, bottom); +} + +int32_t VideoRenderIosImpl::DeleteIncomingRenderStream( + const uint32_t stream_id) { + CriticalSectionScoped cs(crit_sec_); + + return ptr_ios_render_->DeleteEaglChannel(stream_id); +} + +int32_t VideoRenderIosImpl::GetIncomingRenderStreamProperties( + const uint32_t stream_id, + uint32_t& z_order, + float& left, + float& top, + float& right, + float& bottom) const { + IOS_UNSUPPORTED(); +} + +int32_t VideoRenderIosImpl::StartRender() { + return ptr_ios_render_->StartRender(); +} + +int32_t VideoRenderIosImpl::StopRender() { + return ptr_ios_render_->StopRender(); +} + +VideoRenderType VideoRenderIosImpl::RenderType() { return kRenderiOS; } + +RawVideoType VideoRenderIosImpl::PerferedVideoType() { return kVideoI420; } + +bool VideoRenderIosImpl::FullScreen() { IOS_UNSUPPORTED(); } + +int32_t VideoRenderIosImpl::GetGraphicsMemory( + uint64_t& totalGraphicsMemory, + uint64_t& availableGraphicsMemory) const { + IOS_UNSUPPORTED(); +} + +int32_t VideoRenderIosImpl::GetScreenResolution(uint32_t& screenWidth, + uint32_t& screenHeight) const { + return ptr_ios_render_->GetScreenResolution(screenWidth, screenHeight); +} + +uint32_t VideoRenderIosImpl::RenderFrameRate(const uint32_t streamId) { + IOS_UNSUPPORTED(); +} + +int32_t VideoRenderIosImpl::SetStreamCropping(const uint32_t streamId, + const float left, + const float top, + const float right, + const float bottom) { + return ptr_ios_render_->SetStreamCropping(streamId, left, top, right, bottom); +} + +int32_t VideoRenderIosImpl::ConfigureRenderer(const uint32_t streamId, + const unsigned int zOrder, + const float left, + const float top, + const float right, + const float bottom) { + IOS_UNSUPPORTED(); +} + +int32_t VideoRenderIosImpl::SetTransparentBackground(const bool enable) { + IOS_UNSUPPORTED(); +} + +int32_t VideoRenderIosImpl::SetText(const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, + const float top, + const float right, + const float bottom) { + IOS_UNSUPPORTED(); +} + +int32_t VideoRenderIosImpl::SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, + const float top, + const float right, + const float bottom) { + IOS_UNSUPPORTED(); +} + +int32_t VideoRenderIosImpl::FullScreenRender(void* window, const bool enable) { + IOS_UNSUPPORTED(); +} diff --git a/webrtc/modules/video_render/ios/video_render_ios_view.h b/webrtc/modules/video_render/ios/video_render_ios_view.h new file mode 100644 index 0000000000..d110bc78bd --- /dev/null +++ b/webrtc/modules/video_render/ios/video_render_ios_view.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_ + +#import +#import + +#include "webrtc/modules/video_render/ios/open_gles20.h" + +@interface VideoRenderIosView : UIView + +- (BOOL)createContext; +- (BOOL)presentFramebuffer; +- (BOOL)renderFrame:(webrtc::VideoFrame*)frameToRender; +- (BOOL)setCoordinatesForZOrder:(const float)zOrder + Left:(const float)left + Top:(const float)top + Right:(const float)right + Bottom:(const float)bottom; + +@property(nonatomic, retain) EAGLContext* context; + +@end + +#endif // WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_ diff --git a/webrtc/modules/video_render/ios/video_render_ios_view.mm b/webrtc/modules/video_render/ios/video_render_ios_view.mm new file mode 100644 index 0000000000..b106ffa5c4 --- /dev/null +++ b/webrtc/modules/video_render/ios/video_render_ios_view.mm @@ -0,0 +1,163 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#if !defined(__has_feature) || !__has_feature(objc_arc) +#error "This file requires ARC support." +#endif + +#include + +#include "webrtc/modules/video_render/ios/video_render_ios_view.h" +#include "webrtc/system_wrappers/include/trace.h" + +using namespace webrtc; + +@implementation VideoRenderIosView { + EAGLContext* _context; + std::unique_ptr _gles_renderer20; + int _frameBufferWidth; + int _frameBufferHeight; + unsigned int _defaultFrameBuffer; + unsigned int _colorRenderBuffer; +} + +@synthesize context = context_; + ++ (Class)layerClass { + return [CAEAGLLayer class]; +} + +- (id)initWithCoder:(NSCoder*)coder { + // init super class + self = [super initWithCoder:coder]; + if (self) { + _gles_renderer20.reset(new OpenGles20()); + } + return self; +} + +- (id)init { + // init super class + self = [super init]; + if (self) { + _gles_renderer20.reset(new OpenGles20()); + } + return self; +} + +- (id)initWithFrame:(CGRect)frame { + // init super class + self = [super initWithFrame:frame]; + if (self) { + _gles_renderer20.reset(new OpenGles20()); + } + return self; +} + +- (void)dealloc { + if (_defaultFrameBuffer) { + glDeleteFramebuffers(1, &_defaultFrameBuffer); + _defaultFrameBuffer = 0; + } + + if (_colorRenderBuffer) { + glDeleteRenderbuffers(1, &_colorRenderBuffer); + _colorRenderBuffer = 0; + } + + [EAGLContext setCurrentContext:nil]; +} + +- (NSString*)description { + return [NSString stringWithFormat: + @"A WebRTC implemented subclass of UIView." + "+Class method is overwritten, along with custom methods"]; +} + +- (BOOL)createContext { + // create OpenGLES context from self layer class + CAEAGLLayer* eagl_layer = (CAEAGLLayer*)self.layer; + eagl_layer.opaque = YES; + eagl_layer.drawableProperties = + [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:NO], + kEAGLDrawablePropertyRetainedBacking, + kEAGLColorFormatRGBA8, + kEAGLDrawablePropertyColorFormat, + nil]; + _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; + + if (!_context) { + return NO; + } + + if (![EAGLContext setCurrentContext:_context]) { + return NO; + } + + // generates and binds the OpenGLES buffers + glGenFramebuffers(1, &_defaultFrameBuffer); + glBindFramebuffer(GL_FRAMEBUFFER, _defaultFrameBuffer); + + // Create color render buffer and allocate backing store. + glGenRenderbuffers(1, &_colorRenderBuffer); + glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderBuffer); + [_context renderbufferStorage:GL_RENDERBUFFER + fromDrawable:(CAEAGLLayer*)self.layer]; + glGetRenderbufferParameteriv( + GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_frameBufferWidth); + glGetRenderbufferParameteriv( + GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_frameBufferHeight); + glFramebufferRenderbuffer(GL_FRAMEBUFFER, + GL_COLOR_ATTACHMENT0, + GL_RENDERBUFFER, + _colorRenderBuffer); + + if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) { + return NO; + } + + // set the frame buffer + glBindFramebuffer(GL_FRAMEBUFFER, _defaultFrameBuffer); + glViewport(0, 0, self.frame.size.width, self.frame.size.height); + + return _gles_renderer20->Setup([self bounds].size.width, + [self bounds].size.height); +} + +- (BOOL)presentFramebuffer { + if (![_context presentRenderbuffer:GL_RENDERBUFFER]) { + WEBRTC_TRACE(kTraceWarning, + kTraceVideoRenderer, + 0, + "%s:%d [context present_renderbuffer] " + "returned false", + __FUNCTION__, + __LINE__); + } + return YES; +} + +- (BOOL)renderFrame:(VideoFrame*)frameToRender { + if (![EAGLContext setCurrentContext:_context]) { + return NO; + } + + return _gles_renderer20->Render(*frameToRender); +} + +- (BOOL)setCoordinatesForZOrder:(const float)zOrder + Left:(const float)left + Top:(const float)top + Right:(const float)right + Bottom:(const float)bottom { + return _gles_renderer20->SetCoordinates(zOrder, left, top, right, bottom); +} + +@end diff --git a/webrtc/modules/video_render/linux/video_render_linux_impl.cc b/webrtc/modules/video_render/linux/video_render_linux_impl.cc new file mode 100644 index 0000000000..7e53dfdf80 --- /dev/null +++ b/webrtc/modules/video_render/linux/video_render_linux_impl.cc @@ -0,0 +1,261 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/video_render/linux/video_render_linux_impl.h" + +#include "webrtc/modules/video_render/linux/video_x11_render.h" +#include "webrtc/system_wrappers/include/critical_section_wrapper.h" +#include "webrtc/system_wrappers/include/trace.h" + +#include + +namespace webrtc { + +VideoRenderLinuxImpl::VideoRenderLinuxImpl( + const int32_t id, + const VideoRenderType videoRenderType, + void* window, const bool fullscreen) : + _id(id), + _renderLinuxCritsect( + *CriticalSectionWrapper::CreateCriticalSection()), + _ptrWindow(window), _ptrX11Render(NULL) +{ +} + +VideoRenderLinuxImpl::~VideoRenderLinuxImpl() +{ + if (_ptrX11Render) + delete _ptrX11Render; + + delete &_renderLinuxCritsect; +} + +int32_t VideoRenderLinuxImpl::Init() +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s", + __FUNCTION__); + + CriticalSectionScoped cs(&_renderLinuxCritsect); + _ptrX11Render = new VideoX11Render((Window) _ptrWindow); + if (!_ptrX11Render) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s", + "Failed to create instance of VideoX11Render object"); + return -1; + } + int retVal = _ptrX11Render->Init(); + if (retVal == -1) + { + return -1; + } + + return 0; + +} + +int32_t VideoRenderLinuxImpl::ChangeWindow(void* window) +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s", + __FUNCTION__); + + CriticalSectionScoped cs(&_renderLinuxCritsect); + _ptrWindow = window; + + if (_ptrX11Render) + { + return _ptrX11Render->ChangeWindow((Window) window); + } + + return -1; +} + +VideoRenderCallback* VideoRenderLinuxImpl::AddIncomingRenderStream( + const uint32_t streamId, + const uint32_t zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s", + __FUNCTION__); + CriticalSectionScoped cs(&_renderLinuxCritsect); + + VideoRenderCallback* renderCallback = NULL; + if (_ptrX11Render) + { + VideoX11Channel* renderChannel = + _ptrX11Render->CreateX11RenderChannel(streamId, zOrder, left, + top, right, bottom); + if (!renderChannel) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "Render channel creation failed for stream id: %d", + streamId); + return NULL; + } + renderCallback = (VideoRenderCallback *) renderChannel; + } + else + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "_ptrX11Render is NULL"); + return NULL; + } + return renderCallback; +} + +int32_t VideoRenderLinuxImpl::DeleteIncomingRenderStream( + const uint32_t streamId) +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s", + __FUNCTION__); + CriticalSectionScoped cs(&_renderLinuxCritsect); + + if (_ptrX11Render) + { + return _ptrX11Render->DeleteX11RenderChannel(streamId); + } + return -1; +} + +int32_t VideoRenderLinuxImpl::GetIncomingRenderStreamProperties( + const uint32_t streamId, + uint32_t& zOrder, + float& left, + float& top, + float& right, + float& bottom) const +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s", + __FUNCTION__); + CriticalSectionScoped cs(&_renderLinuxCritsect); + + if (_ptrX11Render) + { + return _ptrX11Render->GetIncomingStreamProperties(streamId, zOrder, + left, top, right, + bottom); + } + return -1; +} + +int32_t VideoRenderLinuxImpl::StartRender() +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s", + __FUNCTION__); + return 0; +} + +int32_t VideoRenderLinuxImpl::StopRender() +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s", + __FUNCTION__); + return 0; +} + +VideoRenderType VideoRenderLinuxImpl::RenderType() +{ + return kRenderX11; +} + +RawVideoType VideoRenderLinuxImpl::PerferedVideoType() +{ + return kVideoI420; +} + +bool VideoRenderLinuxImpl::FullScreen() +{ + return false; +} + +int32_t VideoRenderLinuxImpl::GetGraphicsMemory( + uint64_t& /*totalGraphicsMemory*/, + uint64_t& /*availableGraphicsMemory*/) const +{ + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Linux", __FUNCTION__); + return -1; +} + +int32_t VideoRenderLinuxImpl::GetScreenResolution( + uint32_t& /*screenWidth*/, + uint32_t& /*screenHeight*/) const +{ + return -1; +} + +uint32_t VideoRenderLinuxImpl::RenderFrameRate(const uint32_t /*streamId*/) +{ + return -1; +} + +int32_t VideoRenderLinuxImpl::SetStreamCropping( + const uint32_t /*streamId*/, + const float /*left*/, + const float /*top*/, + const float /*right*/, + const float /*bottom*/) +{ + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Linux", __FUNCTION__); + return -1; +} + +int32_t VideoRenderLinuxImpl::SetTransparentBackground(const bool /*enable*/) +{ + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Linux", __FUNCTION__); + return -1; +} + +int32_t VideoRenderLinuxImpl::ConfigureRenderer( + const uint32_t streamId, + const unsigned int zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Linux", __FUNCTION__); + return -1; +} + +int32_t VideoRenderLinuxImpl::SetText( + const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, const float top, + const float rigth, + const float bottom) +{ + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Linux", __FUNCTION__); + return -1; +} + +int32_t VideoRenderLinuxImpl::SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, + const float top, + const float right, + const float bottom) +{ + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Linux", __FUNCTION__); + return -1; +} + +} // namespace webrtc diff --git a/webrtc/modules/video_render/linux/video_render_linux_impl.h b/webrtc/modules/video_render/linux/video_render_linux_impl.h new file mode 100644 index 0000000000..0e9ae54c18 --- /dev/null +++ b/webrtc/modules/video_render/linux/video_render_linux_impl.h @@ -0,0 +1,128 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_ + +#include "webrtc/modules/video_render/i_video_render.h" + +namespace webrtc { +class CriticalSectionWrapper; + +class VideoX11Render; + +// Class definitions +class VideoRenderLinuxImpl: IVideoRender +{ +public: + /* + * Constructor/destructor + */ + + VideoRenderLinuxImpl(const int32_t id, + const VideoRenderType videoRenderType, + void* window, const bool fullscreen); + + virtual ~VideoRenderLinuxImpl(); + + virtual int32_t Init(); + + virtual int32_t ChangeWindow(void* window); + + /************************************************************************** + * + * Incoming Streams + * + ***************************************************************************/ + + virtual VideoRenderCallback + * AddIncomingRenderStream(const uint32_t streamId, + const uint32_t zOrder, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t + DeleteIncomingRenderStream(const uint32_t streamId); + + virtual int32_t + GetIncomingRenderStreamProperties(const uint32_t streamId, + uint32_t& zOrder, + float& left, float& top, + float& right, float& bottom) const; + + /************************************************************************** + * + * Start/Stop + * + ***************************************************************************/ + + virtual int32_t StartRender(); + + virtual int32_t StopRender(); + + /************************************************************************** + * + * Properties + * + ***************************************************************************/ + + virtual VideoRenderType RenderType(); + + virtual RawVideoType PerferedVideoType(); + + virtual bool FullScreen(); + + virtual int32_t + GetGraphicsMemory(uint64_t& totalGraphicsMemory, + uint64_t& availableGraphicsMemory) const; + + virtual int32_t + GetScreenResolution(uint32_t& screenWidth, + uint32_t& screenHeight) const; + + virtual uint32_t RenderFrameRate(const uint32_t streamId); + + virtual int32_t SetStreamCropping(const uint32_t streamId, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t SetTransparentBackground(const bool enable); + + virtual int32_t ConfigureRenderer(const uint32_t streamId, + const unsigned int zOrder, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t SetText(const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, const float top, + const float rigth, const float bottom); + + virtual int32_t SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, const float top, + const float right, const float bottom); + +private: + int32_t _id; + CriticalSectionWrapper& _renderLinuxCritsect; + + void* _ptrWindow; + + // X11 Render + VideoX11Render* _ptrX11Render; +}; + +} // namespace webrtc +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_ diff --git a/webrtc/modules/video_render/linux/video_x11_channel.cc b/webrtc/modules/video_render/linux/video_x11_channel.cc new file mode 100644 index 0000000000..8d86b7c72a --- /dev/null +++ b/webrtc/modules/video_render/linux/video_x11_channel.cc @@ -0,0 +1,315 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/video_render/linux/video_x11_channel.h" + +#include "webrtc/system_wrappers/include/critical_section_wrapper.h" +#include "webrtc/system_wrappers/include/trace.h" + +namespace webrtc { + +#define DISP_MAX 128 + +static Display *dispArray[DISP_MAX]; +static int dispCount = 0; + + +VideoX11Channel::VideoX11Channel(int32_t id) : + _crit(*CriticalSectionWrapper::CreateCriticalSection()), _display(NULL), + _shminfo(), _image(NULL), _window(0L), _gc(NULL), + _width(DEFAULT_RENDER_FRAME_WIDTH), + _height(DEFAULT_RENDER_FRAME_HEIGHT), _outWidth(0), _outHeight(0), + _xPos(0), _yPos(0), _prepared(false), _dispCount(0), _buffer(NULL), + _top(0.0), _left(0.0), _right(0.0), _bottom(0.0), + _Id(id) +{ +} + +VideoX11Channel::~VideoX11Channel() +{ + if (_prepared) + { + _crit.Enter(); + ReleaseWindow(); + _crit.Leave(); + } + delete &_crit; +} + +int32_t VideoX11Channel::RenderFrame(const uint32_t streamId, + const VideoFrame& videoFrame) { + CriticalSectionScoped cs(&_crit); + if (_width != videoFrame.width() || _height + != videoFrame.height()) { + if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) { + return -1; + } + } + return DeliverFrame(videoFrame); +} + +int32_t VideoX11Channel::FrameSizeChange(int32_t width, + int32_t height, + int32_t /*numberOfStreams */) +{ + CriticalSectionScoped cs(&_crit); + if (_prepared) + { + RemoveRenderer(); + } + if (CreateLocalRenderer(width, height) == -1) + { + return -1; + } + + return 0; +} + +int32_t VideoX11Channel::DeliverFrame(const VideoFrame& videoFrame) { + CriticalSectionScoped cs(&_crit); + if (!_prepared) { + return 0; + } + + if (!dispArray[_dispCount]) { + return -1; + } + + ConvertFromI420(videoFrame, kARGB, 0, _buffer); + + // Put image in window. + XShmPutImage(_display, _window, _gc, _image, 0, 0, _xPos, _yPos, _width, + _height, True); + + // Very important for the image to update properly! + XSync(_display, False); + return 0; +} + +int32_t VideoX11Channel::GetFrameSize(int32_t& width, int32_t& height) +{ + width = _width; + height = _height; + + return 0; +} + +int32_t VideoX11Channel::Init(Window window, float left, float top, + float right, float bottom) +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s", + __FUNCTION__); + CriticalSectionScoped cs(&_crit); + + _window = window; + _left = left; + _right = right; + _top = top; + _bottom = bottom; + + _display = XOpenDisplay(NULL); // Use default display + if (!_window || !_display) + { + return -1; + } + + if (dispCount < DISP_MAX) + { + dispArray[dispCount] = _display; + _dispCount = dispCount; + dispCount++; + } + else + { + return -1; + } + + if ((1 < left || left < 0) || (1 < top || top < 0) || (1 < right || right + < 0) || (1 < bottom || bottom < 0)) + { + return -1; + } + + // calculate position and size of rendered video + int x, y; + unsigned int winWidth, winHeight, borderwidth, depth; + Window rootret; + if (XGetGeometry(_display, _window, &rootret, &x, &y, &winWidth, + &winHeight, &borderwidth, &depth) == 0) + { + return -1; + } + + _xPos = (int32_t) (winWidth * left); + _yPos = (int32_t) (winHeight * top); + _outWidth = (int32_t) (winWidth * (right - left)); + _outHeight = (int32_t) (winHeight * (bottom - top)); + if (_outWidth % 2) + _outWidth++; // the renderer want's sizes that are multiples of two + if (_outHeight % 2) + _outHeight++; + + _gc = XCreateGC(_display, _window, 0, 0); + if (!_gc) { + // Failed to create the graphics context. + assert(false); + return -1; + } + + if (CreateLocalRenderer(winWidth, winHeight) == -1) + { + return -1; + } + return 0; + +} + +int32_t VideoX11Channel::ChangeWindow(Window window) +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s", + __FUNCTION__); + CriticalSectionScoped cs(&_crit); + + // Stop the rendering, if we are rendering... + RemoveRenderer(); + _window = window; + + // calculate position and size of rendered video + int x, y; + unsigned int winWidth, winHeight, borderwidth, depth; + Window rootret; + if (XGetGeometry(_display, _window, &rootret, &x, &y, &winWidth, + &winHeight, &borderwidth, &depth) == -1) + { + return -1; + } + _xPos = (int) (winWidth * _left); + _yPos = (int) (winHeight * _top); + _outWidth = (int) (winWidth * (_right - _left)); + _outHeight = (int) (winHeight * (_bottom - _top)); + if (_outWidth % 2) + _outWidth++; // the renderer want's sizes that are multiples of two + if (_outHeight % 2) + _outHeight++; + + // Prepare rendering using the + if (CreateLocalRenderer(_width, _height) == -1) + { + return -1; + } + return 0; +} + +int32_t VideoX11Channel::ReleaseWindow() +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s", + __FUNCTION__); + CriticalSectionScoped cs(&_crit); + + RemoveRenderer(); + if (_gc) { + XFreeGC(_display, _gc); + _gc = NULL; + } + if (_display) + { + XCloseDisplay(_display); + _display = NULL; + } + return 0; +} + +int32_t VideoX11Channel::CreateLocalRenderer(int32_t width, int32_t height) +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s", + __FUNCTION__); + CriticalSectionScoped cs(&_crit); + + if (!_window || !_display) + { + return -1; + } + + if (_prepared) + { + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _Id, + "Renderer already prepared, exits."); + return -1; + } + + _width = width; + _height = height; + + // create shared memory image + _image = XShmCreateImage(_display, CopyFromParent, 24, ZPixmap, NULL, + &_shminfo, _width, _height); // this parameter needs to be the same for some reason. + _shminfo.shmid = shmget(IPC_PRIVATE, (_image->bytes_per_line + * _image->height), IPC_CREAT | 0777); + _shminfo.shmaddr = _image->data = (char*) shmat(_shminfo.shmid, 0, 0); + if (_image->data == reinterpret_cast(-1)) + { + return -1; + } + _buffer = (unsigned char*) _image->data; + _shminfo.readOnly = False; + + // attach image to display + if (!XShmAttach(_display, &_shminfo)) + { + //printf("XShmAttach failed !\n"); + return -1; + } + XSync(_display, False); + + _prepared = true; + return 0; +} + +int32_t VideoX11Channel::RemoveRenderer() +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s", + __FUNCTION__); + + if (!_prepared) + { + return 0; + } + _prepared = false; + + // Free the memory. + XShmDetach(_display, &_shminfo); + XDestroyImage( _image ); + _image = NULL; + shmdt(_shminfo.shmaddr); + _shminfo.shmaddr = NULL; + _buffer = NULL; + shmctl(_shminfo.shmid, IPC_RMID, 0); + _shminfo.shmid = 0; + return 0; +} + +int32_t VideoX11Channel::GetStreamProperties(uint32_t& zOrder, + float& left, float& top, + float& right, float& bottom) const +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s", + __FUNCTION__); + + zOrder = 0; // no z-order support yet + left = _left; + top = _top; + right = _right; + bottom = _bottom; + + return 0; +} + + +} // namespace webrtc diff --git a/webrtc/modules/video_render/linux/video_x11_channel.h b/webrtc/modules/video_render/linux/video_x11_channel.h new file mode 100644 index 0000000000..6eb402e12e --- /dev/null +++ b/webrtc/modules/video_render/linux/video_x11_channel.h @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_ + +#include +#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" +#include "webrtc/modules/video_render/video_render_defines.h" + +#include +#include +#include + +namespace webrtc { +class CriticalSectionWrapper; + +#define DEFAULT_RENDER_FRAME_WIDTH 352 +#define DEFAULT_RENDER_FRAME_HEIGHT 288 + + +class VideoX11Channel: public VideoRenderCallback +{ +public: + VideoX11Channel(int32_t id); + + virtual ~VideoX11Channel(); + + virtual int32_t RenderFrame(const uint32_t streamId, + const VideoFrame& videoFrame); + + int32_t FrameSizeChange(int32_t width, int32_t height, + int32_t numberOfStreams); + int32_t DeliverFrame(const VideoFrame& videoFrame); + int32_t GetFrameSize(int32_t& width, int32_t& height); + int32_t Init(Window window, float left, float top, float right, + float bottom); + int32_t ChangeWindow(Window window); + int32_t + GetStreamProperties(uint32_t& zOrder, float& left, + float& top, float& right, float& bottom) const; + int32_t ReleaseWindow(); + + bool IsPrepared() + { + return _prepared; + } + +private: + + int32_t + CreateLocalRenderer(int32_t width, int32_t height); + int32_t RemoveRenderer(); + + //FIXME a better place for this method? the GetWidthHeight no longer + // supported by common_video. + int GetWidthHeight(VideoType type, int bufferSize, int& width, + int& height); + + CriticalSectionWrapper& _crit; + + Display* _display; + XShmSegmentInfo _shminfo; + XImage* _image; + Window _window; + GC _gc; + int32_t _width; // incoming frame width + int32_t _height; // incoming frame height + int32_t _outWidth; // render frame width + int32_t _outHeight; // render frame height + int32_t _xPos; // position within window + int32_t _yPos; + bool _prepared; // true if ready to use + int32_t _dispCount; + + unsigned char* _buffer; + float _top; + float _left; + float _right; + float _bottom; + + int32_t _Id; + +}; + + +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_ diff --git a/webrtc/modules/video_render/linux/video_x11_render.cc b/webrtc/modules/video_render/linux/video_x11_render.cc new file mode 100644 index 0000000000..5eb4f36f95 --- /dev/null +++ b/webrtc/modules/video_render/linux/video_x11_render.cc @@ -0,0 +1,153 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/video_render/linux/video_x11_channel.h" +#include "webrtc/modules/video_render/linux/video_x11_render.h" + +#include "webrtc/system_wrappers/include/critical_section_wrapper.h" +#include "webrtc/system_wrappers/include/trace.h" + +namespace webrtc { + +VideoX11Render::VideoX11Render(Window window) : + _window(window), + _critSect(*CriticalSectionWrapper::CreateCriticalSection()) +{ +} + +VideoX11Render::~VideoX11Render() +{ + delete &_critSect; +} + +int32_t VideoX11Render::Init() +{ + CriticalSectionScoped cs(&_critSect); + + _streamIdToX11ChannelMap.clear(); + + return 0; +} + +int32_t VideoX11Render::ChangeWindow(Window window) +{ + CriticalSectionScoped cs(&_critSect); + VideoX11Channel* renderChannel = NULL; + + std::map::iterator iter = + _streamIdToX11ChannelMap.begin(); + + while (iter != _streamIdToX11ChannelMap.end()) + { + renderChannel = iter->second; + if (renderChannel) + { + renderChannel->ChangeWindow(window); + } + iter++; + } + + _window = window; + + return 0; +} + +VideoX11Channel* VideoX11Render::CreateX11RenderChannel( + int32_t streamId, + int32_t zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_critSect); + VideoX11Channel* renderChannel = NULL; + + std::map::iterator iter = + _streamIdToX11ChannelMap.find(streamId); + + if (iter == _streamIdToX11ChannelMap.end()) + { + renderChannel = new VideoX11Channel(streamId); + if (!renderChannel) + { + WEBRTC_TRACE( + kTraceError, + kTraceVideoRenderer, + -1, + "Failed to create VideoX11Channel for streamId : %d", + streamId); + return NULL; + } + renderChannel->Init(_window, left, top, right, bottom); + _streamIdToX11ChannelMap[streamId] = renderChannel; + } + else + { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, + "Render Channel already exists for streamId: %d", streamId); + renderChannel = iter->second; + } + + return renderChannel; +} + +int32_t VideoX11Render::DeleteX11RenderChannel(int32_t streamId) +{ + CriticalSectionScoped cs(&_critSect); + + std::map::iterator iter = + _streamIdToX11ChannelMap.find(streamId); + if (iter != _streamIdToX11ChannelMap.end()) + { + VideoX11Channel *renderChannel = iter->second; + if (renderChannel) + { + renderChannel->ReleaseWindow(); + delete renderChannel; + renderChannel = NULL; + } + _streamIdToX11ChannelMap.erase(iter); + } + + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, + "No VideoX11Channel object exists for stream id: %d", + streamId); + return -1; +} + +int32_t VideoX11Render::GetIncomingStreamProperties( + int32_t streamId, + uint32_t& zOrder, + float& left, + float& top, + float& right, + float& bottom) +{ + CriticalSectionScoped cs(&_critSect); + + std::map::iterator iter = + _streamIdToX11ChannelMap.find(streamId); + if (iter != _streamIdToX11ChannelMap.end()) + { + VideoX11Channel *renderChannel = iter->second; + if (renderChannel) + { + renderChannel->GetStreamProperties(zOrder, left, top, right, bottom); + } + } + + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, + "No VideoX11Channel object exists for stream id: %d", + streamId); + return -1; +} + +} // namespace webrtc diff --git a/webrtc/modules/video_render/linux/video_x11_render.h b/webrtc/modules/video_render/linux/video_x11_render.h new file mode 100644 index 0000000000..23b83bd67b --- /dev/null +++ b/webrtc/modules/video_render/linux/video_x11_render.h @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_ + +#include "webrtc/modules/video_render/video_render_defines.h" + +#include +#include + +namespace webrtc { +class CriticalSectionWrapper; + +class VideoX11Channel; + +class VideoX11Render +{ + +public: + VideoX11Render(Window window); + ~VideoX11Render(); + + int32_t Init(); + int32_t ChangeWindow(Window window); + + VideoX11Channel* CreateX11RenderChannel(int32_t streamId, + int32_t zOrder, + const float left, + const float top, + const float right, + const float bottom); + + int32_t DeleteX11RenderChannel(int32_t streamId); + + int32_t GetIncomingStreamProperties(int32_t streamId, + uint32_t& zOrder, + float& left, float& top, + float& right, float& bottom); + +private: + Window _window; + CriticalSectionWrapper& _critSect; + std::map _streamIdToX11ChannelMap; + +}; + + +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_ diff --git a/webrtc/modules/video_render/mac/cocoa_full_screen_window.h b/webrtc/modules/video_render/mac/cocoa_full_screen_window.h new file mode 100644 index 0000000000..c8e98bba67 --- /dev/null +++ b/webrtc/modules/video_render/mac/cocoa_full_screen_window.h @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// +// cocoa_full_screen_window.h +// +// + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_ + +#import +//#define GRAB_ALL_SCREENS 1 + +@interface CocoaFullScreenWindow : NSObject { + NSWindow* _window; +} + +-(id)init; +-(void)grabFullScreen; +-(void)releaseFullScreen; +-(NSWindow*)window; + +@end + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_ diff --git a/webrtc/modules/video_render/mac/cocoa_full_screen_window.mm b/webrtc/modules/video_render/mac/cocoa_full_screen_window.mm new file mode 100644 index 0000000000..b57223b4df --- /dev/null +++ b/webrtc/modules/video_render/mac/cocoa_full_screen_window.mm @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/modules/video_render/mac/cocoa_full_screen_window.h" +#include "webrtc/system_wrappers/include/trace.h" + +using namespace webrtc; + +@implementation CocoaFullScreenWindow + +-(id)init{ + + self = [super init]; + if(!self){ + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d COULD NOT CREATE INSTANCE", __FUNCTION__, __LINE__); + return nil; + } + + + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, 0, "%s:%d Created instance", __FUNCTION__, __LINE__); + return self; +} + +-(void)grabFullScreen{ + +#ifdef GRAB_ALL_SCREENS + if(CGCaptureAllDisplays() != kCGErrorSuccess) +#else + if(CGDisplayCapture(kCGDirectMainDisplay) != kCGErrorSuccess) +#endif + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not capture main level", __FUNCTION__, __LINE__); + } + + // get the shielding window level + int windowLevel = CGShieldingWindowLevel(); + + // get the screen rect of main display + NSRect screenRect = [[NSScreen mainScreen]frame]; + + _window = [[NSWindow alloc]initWithContentRect:screenRect + styleMask:NSBorderlessWindowMask + backing:NSBackingStoreBuffered + defer:NO + screen:[NSScreen mainScreen]]; + + [_window setLevel:windowLevel]; + [_window setBackgroundColor:[NSColor blackColor]]; + [_window makeKeyAndOrderFront:nil]; + +} + +-(void)releaseFullScreen +{ + [_window orderOut:self]; + +#ifdef GRAB_ALL_SCREENS + if(CGReleaseAllDisplays() != kCGErrorSuccess) +#else + if(CGDisplayRelease(kCGDirectMainDisplay) != kCGErrorSuccess) +#endif + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not release the displays", __FUNCTION__, __LINE__); + } +} + +- (NSWindow*)window +{ + return _window; +} + +- (void) dealloc +{ + [self releaseFullScreen]; + [super dealloc]; +} + + + +@end diff --git a/webrtc/modules/video_render/mac/cocoa_render_view.h b/webrtc/modules/video_render/mac/cocoa_render_view.h new file mode 100644 index 0000000000..15a8108dec --- /dev/null +++ b/webrtc/modules/video_render/mac/cocoa_render_view.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// +// cocoa_render_view.h +// + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_ + +#import +#import +#import +#import + +@interface CocoaRenderView : NSOpenGLView { + NSOpenGLContext* _nsOpenGLContext; +} + +-(void)initCocoaRenderView:(NSOpenGLPixelFormat*)fmt; +-(void)initCocoaRenderViewFullScreen:(NSOpenGLPixelFormat*)fmt; +-(NSOpenGLContext*)nsOpenGLContext; +@end + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_ diff --git a/webrtc/modules/video_render/mac/cocoa_render_view.mm b/webrtc/modules/video_render/mac/cocoa_render_view.mm new file mode 100644 index 0000000000..4631ff31a4 --- /dev/null +++ b/webrtc/modules/video_render/mac/cocoa_render_view.mm @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import +#import + +#include "webrtc/modules/video_render/mac/cocoa_render_view.h" +#include "webrtc/system_wrappers/include/trace.h" + +using namespace webrtc; + +@implementation CocoaRenderView + +-(void)initCocoaRenderView:(NSOpenGLPixelFormat*)fmt{ + + self = [super initWithFrame:[self frame] pixelFormat:fmt]; + if (self == nil){ + + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not create instance", __FUNCTION__, __LINE__); + } + + + _nsOpenGLContext = [self openGLContext]; + +} + +-(NSOpenGLContext*)nsOpenGLContext { + return _nsOpenGLContext; +} + +-(void)initCocoaRenderViewFullScreen:(NSOpenGLPixelFormat*)fmt{ + + NSRect screenRect = [[NSScreen mainScreen]frame]; +// [_windowRef setFrame:screenRect]; +// [_windowRef setBounds:screenRect]; + self = [super initWithFrame:screenRect pixelFormat:fmt]; + if (self == nil){ + + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not create instance", __FUNCTION__, __LINE__); + } + + _nsOpenGLContext = [self openGLContext]; + +} + +@end + + diff --git a/webrtc/modules/video_render/mac/video_render_agl.cc b/webrtc/modules/video_render/mac/video_render_agl.cc new file mode 100644 index 0000000000..3243563b2b --- /dev/null +++ b/webrtc/modules/video_render/mac/video_render_agl.cc @@ -0,0 +1,1987 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/engine_configurations.h" + +#if defined(CARBON_RENDERING) + +#include "webrtc/modules/video_render/mac/video_render_agl.h" + +// includes +#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" +#include "webrtc/system_wrappers/include/critical_section_wrapper.h" +#include "webrtc/system_wrappers/include/event_wrapper.h" +#include "webrtc/system_wrappers/include/trace.h" + +namespace webrtc { + +/* + * + * VideoChannelAGL + * + */ + +#pragma mark VideoChannelAGL constructor + +VideoChannelAGL::VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner) : + _aglContext( aglContext), + _id( iId), + _owner( owner), + _width( 0), + _height( 0), + _stretchedWidth( 0), + _stretchedHeight( 0), + _startWidth( 0.0f), + _startHeight( 0.0f), + _stopWidth( 0.0f), + _stopHeight( 0.0f), + _xOldWidth( 0), + _yOldHeight( 0), + _oldStretchedHeight(0), + _oldStretchedWidth( 0), + _buffer( 0), + _bufferSize( 0), + _incomingBufferSize(0), + _bufferIsUpdated( false), + _sizeInitialized( false), + _numberOfStreams( 0), + _bVideoSizeStartedChanging(false), + _pixelFormat( GL_RGBA), + _pixelDataType( GL_UNSIGNED_INT_8_8_8_8), + _texture( 0) + +{ + //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Constructor", __FUNCTION__, __LINE__); +} + +VideoChannelAGL::~VideoChannelAGL() +{ + //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Destructor", __FUNCTION__, __LINE__); + if (_buffer) + { + delete [] _buffer; + _buffer = NULL; + } + + aglSetCurrentContext(_aglContext); + + if (_texture != 0) + { + glDeleteTextures(1, (const GLuint*) &_texture); + _texture = 0; + } +} + +int32_t VideoChannelAGL::RenderFrame(const uint32_t streamId, + VideoFrame& videoFrame) { + _owner->LockAGLCntx(); + if (_width != videoFrame.width() || + _height != videoFrame.height()) { + if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d FrameSize + Change returned an error", __FUNCTION__, __LINE__); + _owner->UnlockAGLCntx(); + return -1; + } + } + + _owner->UnlockAGLCntx(); + return DeliverFrame(videoFrame); +} + +int VideoChannelAGL::UpdateSize(int /*width*/, int /*height*/) +{ + _owner->LockAGLCntx(); + _owner->UnlockAGLCntx(); + return 0; +} + +int VideoChannelAGL::UpdateStretchSize(int stretchHeight, int stretchWidth) +{ + + _owner->LockAGLCntx(); + _stretchedHeight = stretchHeight; + _stretchedWidth = stretchWidth; + _owner->UnlockAGLCntx(); + return 0; +} + +int VideoChannelAGL::FrameSizeChange(int width, int height, int numberOfStreams) +{ + // We'll get a new frame size from VideoAPI, prepare the buffer + + _owner->LockAGLCntx(); + + if (width == _width && _height == height) + { + // We already have a correct buffer size + _numberOfStreams = numberOfStreams; + _owner->UnlockAGLCntx(); + return 0; + } + + _width = width; + _height = height; + + // Delete the old buffer, create a new one with correct size. + if (_buffer) + { + delete [] _buffer; + _bufferSize = 0; + } + + _incomingBufferSize = CalcBufferSize(kI420, _width, _height); + _bufferSize = CalcBufferSize(kARGB, _width, _height);//_width * _height * bytesPerPixel; + _buffer = new unsigned char [_bufferSize]; + memset(_buffer, 0, _bufferSize * sizeof(unsigned char)); + + if (aglSetCurrentContext(_aglContext) == false) + { + _owner->UnlockAGLCntx(); + return -1; + } + + // Delete a possible old texture + if (_texture != 0) + { + glDeleteTextures(1, (const GLuint*) &_texture); + _texture = 0; + } + + // Create a new texture + glGenTextures(1, (GLuint *) &_texture); + + GLenum glErr = glGetError(); + + if (glErr != GL_NO_ERROR) + { + } + + // Do the setup for both textures + // Note: we setup two textures even if we're not running full screen + glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); + + // Set texture parameters + glTexParameterf(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_PRIORITY, 1.0); + + glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + + glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + //glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_NEAREST); + //glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + + glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE); + + glPixelStorei(GL_UNPACK_ALIGNMENT, 1); + + glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE, GL_STORAGE_SHARED_APPLE); + + // Maximum width/height for a texture + GLint texSize; + glGetIntegerv(GL_MAX_TEXTURE_SIZE, &texSize); + + if (texSize < _width || texSize < _height) + { + // Image too big for memory + _owner->UnlockAGLCntx(); + return -1; + } + + // Set up th texture type and size + glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, // target + 0, // level + GL_RGBA, // internal format + _width, // width + _height, // height + 0, // border 0/1 = off/on + _pixelFormat, // format, GL_BGRA + _pixelDataType, // data type, GL_UNSIGNED_INT_8_8_8_8 + _buffer); // pixel data + + glErr = glGetError(); + if (glErr != GL_NO_ERROR) + { + _owner->UnlockAGLCntx(); + return -1; + } + + _owner->UnlockAGLCntx(); + return 0; +} + +// Called from video engine when a new frame should be rendered. +int VideoChannelAGL::DeliverFrame(const VideoFrame& videoFrame) { + _owner->LockAGLCntx(); + + if (_texture == 0) { + _owner->UnlockAGLCntx(); + return 0; + } + + if (CalcBufferSize(kI420, videoFrame.width(), videoFrame.height()) != + _incomingBufferSize) { + _owner->UnlockAGLCntx(); + return -1; + } + + // Setting stride = width. + int rgbret = ConvertFromYV12(videoFrame, kBGRA, 0, _buffer); + if (rgbret < 0) { + _owner->UnlockAGLCntx(); + return -1; + } + + aglSetCurrentContext(_aglContext); + + // Put the new frame into the graphic card texture. + // Make sure this texture is the active one + glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); + GLenum glErr = glGetError(); + if (glErr != GL_NO_ERROR) { + _owner->UnlockAGLCntx(); + return -1; + } + + // Copy buffer to texture + glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT, + 0, // Level, not use + 0, // start point x, (low left of pic) + 0, // start point y, + _width, // width + _height, // height + _pixelFormat, // pictue format for _buffer + _pixelDataType, // data type of _buffer + (const GLvoid*) _buffer); // the pixel data + + if (glGetError() != GL_NO_ERROR) { + _owner->UnlockAGLCntx(); + return -1; + } + + _bufferIsUpdated = true; + _owner->UnlockAGLCntx(); + + return 0; +} + +int VideoChannelAGL::RenderOffScreenBuffer() +{ + + _owner->LockAGLCntx(); + + if (_texture == 0) + { + _owner->UnlockAGLCntx(); + return 0; + } + + GLfloat xStart = 2.0f * _startWidth - 1.0f; + GLfloat xStop = 2.0f * _stopWidth - 1.0f; + GLfloat yStart = 1.0f - 2.0f * _stopHeight; + GLfloat yStop = 1.0f - 2.0f * _startHeight; + + aglSetCurrentContext(_aglContext); + glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); + + if(_stretchedWidth != _oldStretchedWidth || _stretchedHeight != _oldStretchedHeight) + { + glViewport(0, 0, _stretchedWidth, _stretchedHeight); + } + _oldStretchedHeight = _stretchedHeight; + _oldStretchedWidth = _stretchedWidth; + + // Now really put the texture into the framebuffer + glLoadIdentity(); + + glEnable(GL_TEXTURE_RECTANGLE_EXT); + + glBegin(GL_POLYGON); + { + glTexCoord2f(0.0, 0.0); glVertex2f(xStart, yStop); + glTexCoord2f(_width, 0.0); glVertex2f(xStop, yStop); + glTexCoord2f(_width, _height); glVertex2f(xStop, yStart); + glTexCoord2f(0.0, _height); glVertex2f(xStart, yStart); + } + glEnd(); + + glDisable(GL_TEXTURE_RECTANGLE_EXT); + + _bufferIsUpdated = false; + + _owner->UnlockAGLCntx(); + return 0; +} + +int VideoChannelAGL::IsUpdated(bool& isUpdated) +{ + _owner->LockAGLCntx(); + isUpdated = _bufferIsUpdated; + _owner->UnlockAGLCntx(); + + return 0; +} + +int VideoChannelAGL::SetStreamSettings(int /*streamId*/, float startWidth, float startHeight, float stopWidth, float stopHeight) +{ + + _owner->LockAGLCntx(); + + _startWidth = startWidth; + _stopWidth = stopWidth; + _startHeight = startHeight; + _stopHeight = stopHeight; + + int oldWidth = _width; + int oldHeight = _height; + int oldNumberOfStreams = _numberOfStreams; + + _width = 0; + _height = 0; + + int retVal = FrameSizeChange(oldWidth, oldHeight, oldNumberOfStreams); + + _owner->UnlockAGLCntx(); + + return retVal; +} + +int VideoChannelAGL::SetStreamCropSettings(int /*streamId*/, float /*startWidth*/, float /*startHeight*/, float /*stopWidth*/, float /*stopHeight*/) +{ + return -1; +} + +#pragma mark VideoRenderAGL WindowRef constructor + +VideoRenderAGL::VideoRenderAGL(WindowRef windowRef, bool fullscreen, int iId) : +_hiviewRef( 0), +_windowRef( windowRef), +_fullScreen( fullscreen), +_id( iId), +_renderCritSec(*CriticalSectionWrapper::CreateCriticalSection()), +_screenUpdateEvent( 0), +_isHIViewRef( false), +_aglContext( 0), +_windowWidth( 0), +_windowHeight( 0), +_lastWindowWidth( -1), +_lastWindowHeight( -1), +_lastHiViewWidth( -1), +_lastHiViewHeight( -1), +_currentParentWindowHeight( 0), +_currentParentWindowWidth( 0), +_currentParentWindowBounds( ), +_windowHasResized( false), +_lastParentWindowBounds( ), +_currentHIViewBounds( ), +_lastHIViewBounds( ), +_windowRect( ), +_aglChannels( ), +_zOrderToChannel( ), +_hiviewEventHandlerRef( NULL), +_windowEventHandlerRef( NULL), +_currentViewBounds( ), +_lastViewBounds( ), +_renderingIsPaused( false), + +{ + //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s"); + + _screenUpdateThread.reset( + new rtc::PlatformThread(ScreenUpdateThreadProc, this, "ScreenUpdate")); + _screenUpdateEvent = EventWrapper::Create(); + + if(!IsValidWindowPtr(_windowRef)) + { + //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Invalid WindowRef:0x%x", __FUNCTION__, __LINE__, _windowRef); + } + else + { + //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d WindowRef 0x%x is valid", __FUNCTION__, __LINE__, _windowRef); + } + + GetWindowRect(_windowRect); + + _lastViewBounds.origin.x = 0; + _lastViewBounds.origin.y = 0; + _lastViewBounds.size.width = 0; + _lastViewBounds.size.height = 0; + +} + +// this is a static function. It has been registered (in class constructor) to be called on various window redrawing or resizing. +// Since it is a static method, I have passed in "this" as the userData (one and only allowed) parameter, then calling member methods on it. +#pragma mark WindowRef Event Handler +pascal OSStatus VideoRenderAGL::sHandleWindowResized (EventHandlerCallRef /*nextHandler*/, + EventRef theEvent, + void* userData) +{ + WindowRef windowRef = NULL; + + int eventType = GetEventKind(theEvent); + + // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/i686-win32/include/quicktime/CarbonEvents.h for a list of codes + GetEventParameter (theEvent, + kEventParamDirectObject, + typeWindowRef, + NULL, + sizeof (WindowRef), + NULL, + &windowRef); + + VideoRenderAGL* obj = (VideoRenderAGL*)(userData); + + bool updateUI = true; + if(kEventWindowBoundsChanged == eventType) + { + } + else if(kEventWindowBoundsChanging == eventType) + { + } + else if(kEventWindowZoomed == eventType) + { + } + else if(kEventWindowExpanding == eventType) + { + } + else if(kEventWindowExpanded == eventType) + { + } + else if(kEventWindowClickResizeRgn == eventType) + { + } + else if(kEventWindowClickDragRgn == eventType) + { + } + else + { + updateUI = false; + } + + if(true == updateUI) + { + obj->ParentWindowResized(windowRef); + obj->UpdateClipping(); + obj->RenderOffScreenBuffers(); + } + + return noErr; +} + +#pragma mark VideoRenderAGL HIViewRef constructor + +VideoRenderAGL::VideoRenderAGL(HIViewRef windowRef, bool fullscreen, int iId) : +_hiviewRef( windowRef), +_windowRef( 0), +_fullScreen( fullscreen), +_id( iId), +_renderCritSec(*CriticalSectionWrapper::CreateCriticalSection()), +_screenUpdateEvent( 0), +_isHIViewRef( false), +_aglContext( 0), +_windowWidth( 0), +_windowHeight( 0), +_lastWindowWidth( -1), +_lastWindowHeight( -1), +_lastHiViewWidth( -1), +_lastHiViewHeight( -1), +_currentParentWindowHeight( 0), +_currentParentWindowWidth( 0), +_currentParentWindowBounds( ), +_windowHasResized( false), +_lastParentWindowBounds( ), +_currentHIViewBounds( ), +_lastHIViewBounds( ), +_windowRect( ), +_aglChannels( ), +_zOrderToChannel( ), +_hiviewEventHandlerRef( NULL), +_windowEventHandlerRef( NULL), +_currentViewBounds( ), +_lastViewBounds( ), +_renderingIsPaused( false), +{ + //WEBRTC_TRACE(kTraceDebug, "%s:%d Constructor", __FUNCTION__, __LINE__); + // _renderCritSec = CriticalSectionWrapper::CreateCriticalSection(); + + _screenUpdateThread.reset(new rtc::PlatformThread( + ScreenUpdateThreadProc, this, "ScreenUpdateThread")); + _screenUpdateEvent = EventWrapper::Create(); + + GetWindowRect(_windowRect); + + _lastViewBounds.origin.x = 0; + _lastViewBounds.origin.y = 0; + _lastViewBounds.size.width = 0; + _lastViewBounds.size.height = 0; + +#ifdef NEW_HIVIEW_PARENT_EVENT_HANDLER + // This gets the parent window of the HIViewRef that's passed in and installs a WindowRef event handler on it + // The event handler looks for window resize events and adjusts the offset of the controls. + + //WEBRTC_TRACE(kTraceDebug, "%s:%d Installing Eventhandler for hiviewRef's parent window", __FUNCTION__, __LINE__); + + + static const EventTypeSpec windowEventTypes[] = + { + kEventClassWindow, kEventWindowBoundsChanged, + kEventClassWindow, kEventWindowBoundsChanging, + kEventClassWindow, kEventWindowZoomed, + kEventClassWindow, kEventWindowExpanded, + kEventClassWindow, kEventWindowClickResizeRgn, + kEventClassWindow, kEventWindowClickDragRgn + }; + + WindowRef parentWindow = HIViewGetWindow(windowRef); + + InstallWindowEventHandler (parentWindow, + NewEventHandlerUPP (sHandleWindowResized), + GetEventTypeCount(windowEventTypes), + windowEventTypes, + (void *) this, // this is an arbitrary parameter that will be passed on to your event handler when it is called later + &_windowEventHandlerRef); + +#endif + +#ifdef NEW_HIVIEW_EVENT_HANDLER + //WEBRTC_TRACE(kTraceDebug, "%s:%d Installing Eventhandler for hiviewRef", __FUNCTION__, __LINE__); + + static const EventTypeSpec hiviewEventTypes[] = + { + kEventClassControl, kEventControlBoundsChanged, + kEventClassControl, kEventControlDraw + // kEventControlDragLeave + // kEventControlDragReceive + // kEventControlGetFocusPart + // kEventControlApplyBackground + // kEventControlDraw + // kEventControlHit + + }; + + HIViewInstallEventHandler(_hiviewRef, + NewEventHandlerUPP(sHandleHiViewResized), + GetEventTypeCount(hiviewEventTypes), + hiviewEventTypes, + (void *) this, + &_hiviewEventHandlerRef); + +#endif +} + +// this is a static function. It has been registered (in constructor) to be called on various window redrawing or resizing. +// Since it is a static method, I have passed in "this" as the userData (one and only allowed) parameter, then calling member methods on it. +#pragma mark HIViewRef Event Handler +pascal OSStatus VideoRenderAGL::sHandleHiViewResized (EventHandlerCallRef nextHandler, EventRef theEvent, void* userData) +{ + //static int callbackCounter = 1; + HIViewRef hiviewRef = NULL; + + // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/i686-win32/include/quicktime/CarbonEvents.h for a list of codes + int eventType = GetEventKind(theEvent); + OSStatus status = noErr; + status = GetEventParameter (theEvent, + kEventParamDirectObject, + typeControlRef, + NULL, + sizeof (ControlRef), + NULL, + &hiviewRef); + + VideoRenderAGL* obj = (VideoRenderAGL*)(userData); + WindowRef parentWindow = HIViewGetWindow(hiviewRef); + bool updateUI = true; + + if(kEventControlBoundsChanged == eventType) + { + } + else if(kEventControlDraw == eventType) + { + } + else + { + updateUI = false; + } + + if(true == updateUI) + { + obj->ParentWindowResized(parentWindow); + obj->UpdateClipping(); + obj->RenderOffScreenBuffers(); + } + + return status; +} + +VideoRenderAGL::~VideoRenderAGL() +{ + + //WEBRTC_TRACE(kTraceDebug, "%s:%d Destructor", __FUNCTION__, __LINE__); + + +#ifdef USE_EVENT_HANDLERS + // remove event handlers + OSStatus status; + if(_isHIViewRef) + { + status = RemoveEventHandler(_hiviewEventHandlerRef); + } + else + { + status = RemoveEventHandler(_windowEventHandlerRef); + } + if(noErr != status) + { + if(_isHIViewRef) + { + + //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove hiview event handler: %d", __FUNCTION__, __LINE__, (int)_hiviewEventHandlerRef); + } + else + { + //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove window event handler %d", __FUNCTION__, __LINE__, (int)_windowEventHandlerRef); + } + } + +#endif + + OSStatus status; +#ifdef NEW_HIVIEW_PARENT_EVENT_HANDLER + if(_windowEventHandlerRef) + { + status = RemoveEventHandler(_windowEventHandlerRef); + if(status != noErr) + { + //WEBRTC_TRACE(kTraceDebug, "%s:%d failed to remove window event handler %d", __FUNCTION__, __LINE__, (int)_windowEventHandlerRef); + } + } +#endif + +#ifdef NEW_HIVIEW_EVENT_HANDLER + if(_hiviewEventHandlerRef) + { + status = RemoveEventHandler(_hiviewEventHandlerRef); + if(status != noErr) + { + //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove hiview event handler: %d", __FUNCTION__, __LINE__, (int)_hiviewEventHandlerRef); + } + } +#endif + + // Signal event to exit thread, then delete it + rtc::PlatformThread* tmpPtr = _screenUpdateThread.release(); + + if (tmpPtr) + { + _screenUpdateEvent->Set(); + _screenUpdateEvent->StopTimer(); + + tmpPtr->Stop(); + delete tmpPtr; + delete _screenUpdateEvent; + _screenUpdateEvent = NULL; + } + + if (_aglContext != 0) + { + aglSetCurrentContext(_aglContext); + aglDestroyContext(_aglContext); + _aglContext = 0; + } + + // Delete all channels + std::map::iterator it = _aglChannels.begin(); + while (it!= _aglChannels.end()) + { + delete it->second; + _aglChannels.erase(it); + it = _aglChannels.begin(); + } + _aglChannels.clear(); + + // Clean the zOrder map + std::multimap::iterator zIt = _zOrderToChannel.begin(); + while(zIt != _zOrderToChannel.end()) + { + _zOrderToChannel.erase(zIt); + zIt = _zOrderToChannel.begin(); + } + _zOrderToChannel.clear(); + + //delete _renderCritSec; + + +} + +int VideoRenderAGL::GetOpenGLVersion(int& aglMajor, int& aglMinor) +{ + aglGetVersion((GLint *) &aglMajor, (GLint *) &aglMinor); + return 0; +} + +int VideoRenderAGL::Init() +{ + LockAGLCntx(); + + // Start rendering thread... + if (!_screenUpdateThread) + { + UnlockAGLCntx(); + //WEBRTC_TRACE(kTraceError, "%s:%d Thread not created", __FUNCTION__, __LINE__); + return -1; + } + _screenUpdateThread->Start(); + _screenUpdateThread->SetPriority(rtc::kRealtimePriority); + + // Start the event triggering the render process + unsigned int monitorFreq = 60; + _screenUpdateEvent->StartTimer(true, 1000/monitorFreq); + + // Create mixing textures + if (CreateMixingContext() == -1) + { + //WEBRTC_TRACE(kTraceError, "%s:%d Could not create a mixing context", __FUNCTION__, __LINE__); + UnlockAGLCntx(); + return -1; + } + + UnlockAGLCntx(); + return 0; +} + +VideoChannelAGL* VideoRenderAGL::CreateAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight) +{ + + LockAGLCntx(); + + //WEBRTC_TRACE(kTraceInfo, "%s:%d Creating AGL channel: %d", __FUNCTION__, __LINE__, channel); + + if (HasChannel(channel)) + { + //WEBRTC_TRACE(kTraceError, "%s:%d Channel already exists", __FUNCTION__, __LINE__); + UnlockAGLCntx();k + return NULL; + } + + if (_zOrderToChannel.find(zOrder) != _zOrderToChannel.end()) + { + // There are already one channel using this zOrder + // TODO: Allow multiple channels with same zOrder + } + + VideoChannelAGL* newAGLChannel = new VideoChannelAGL(_aglContext, _id, this); + + if (newAGLChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1) + { + if (newAGLChannel) + { + delete newAGLChannel; + newAGLChannel = NULL; + } + //WEBRTC_LOG(kTraceError, "Could not create AGL channel"); + //WEBRTC_TRACE(kTraceError, "%s:%d Could not create AGL channel", __FUNCTION__, __LINE__); + UnlockAGLCntx(); + return NULL; + } +k + _aglChannels[channel] = newAGLChannel; + _zOrderToChannel.insert(std::pair(zOrder, channel)); + + UnlockAGLCntx(); + return newAGLChannel; +} + +int VideoRenderAGL::DeleteAllAGLChannels() +{ + CriticalSectionScoped cs(&_renderCritSec); + + //WEBRTC_TRACE(kTraceInfo, "%s:%d Deleting all AGL channels", __FUNCTION__, __LINE__); + //int i = 0 ; + std::map::iterator it; + it = _aglChannels.begin(); + + while (it != _aglChannels.end()) + { + VideoChannelAGL* channel = it->second; + if (channel) + delete channel; + + _aglChannels.erase(it); + it = _aglChannels.begin(); + } + _aglChannels.clear(); + return 0; +} + +int VideoRenderAGL::DeleteAGLChannel(int channel) +{ + CriticalSectionScoped cs(&_renderCritSec); + //WEBRTC_TRACE(kTraceDebug, "%s:%d Deleting AGL channel %d", __FUNCTION__, __LINE__, channel); + + std::map::iterator it; + it = _aglChannels.find(channel); + if (it != _aglChannels.end()) + { + delete it->second; + _aglChannels.erase(it); + } + else + { + //WEBRTC_TRACE(kTraceWarning, "%s:%d Channel not found", __FUNCTION__, __LINE__); + return -1; + } + + std::multimap::iterator zIt = _zOrderToChannel.begin(); + while( zIt != _zOrderToChannel.end()) + { + if (zIt->second == channel) + { + _zOrderToChannel.erase(zIt); + break; + } + zIt++;// = _zOrderToChannel.begin(); + } + + return 0; +} + +int VideoRenderAGL::StopThread() +{ + CriticalSectionScoped cs(&_renderCritSec); + rtc::PlatformThread* tmpPtr = _screenUpdateThread.release(); + + if (tmpPtr) + { + _screenUpdateEvent->Set(); + _renderCritSec.Leave(); + tmpPtr->Stop(); + delete tmpPtr; + _renderCritSec.Enter(); + } + + delete _screenUpdateEvent; + _screenUpdateEvent = NULL; + + return 0; +} + +bool VideoRenderAGL::IsFullScreen() +{ + CriticalSectionScoped cs(&_renderCritSec); + return _fullScreen; +} + +bool VideoRenderAGL::HasChannels() +{ + + CriticalSectionScoped cs(&_renderCritSec); + + if (_aglChannels.begin() != _aglChannels.end()) + { + return true; + } + + return false; +} + +bool VideoRenderAGL::HasChannel(int channel) +{ + CriticalSectionScoped cs(&_renderCritSec); + + std::map::iterator it = _aglChannels.find(channel); + if (it != _aglChannels.end()) + { + return true; + } + + return false; +} + +int VideoRenderAGL::GetChannels(std::list& channelList) +{ + + CriticalSectionScoped cs(&_renderCritSec); + std::map::iterator it = _aglChannels.begin(); + + while (it != _aglChannels.end()) + { + channelList.push_back(it->first); + it++; + } + + return 0; +} + +VideoChannelAGL* VideoRenderAGL::ConfigureAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight) +{ + + CriticalSectionScoped cs(&_renderCritSec); + + std::map::iterator it = _aglChannels.find(channel); + + if (it != _aglChannels.end()) + { + VideoChannelAGL* aglChannel = it->second; + if (aglChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1) + { + return NULL; + } + + std::multimap::iterator it = _zOrderToChannel.begin(); + while(it != _zOrderToChannel.end()) + { + if (it->second == channel) + { + if (it->first != zOrder) + { + _zOrderToChannel.erase(it); + _zOrderToChannel.insert(std::pair(zOrder, channel)); + } + break; + } + it++; + } + return aglChannel; + } + + return NULL; +} + +bool VideoRenderAGL::ScreenUpdateThreadProc(void* obj) +{ + return static_cast(obj)->ScreenUpdateProcess(); +} + +bool VideoRenderAGL::ScreenUpdateProcess() +{ + _screenUpdateEvent->Wait(100); + + LockAGLCntx(); + + if (!_screenUpdateThread) + { + UnlockAGLCntx(); + return false; + } + + if (aglSetCurrentContext(_aglContext) == GL_FALSE) + { + UnlockAGLCntx(); + return true; + } + + if (GetWindowRect(_windowRect) == -1) + { + UnlockAGLCntx(); + return true; + } + + if (_windowWidth != (_windowRect.right - _windowRect.left) + || _windowHeight != (_windowRect.bottom - _windowRect.top)) + { + // We have a new window size, update the context. + if (aglUpdateContext(_aglContext) == GL_FALSE) + { + UnlockAGLCntx(); + return true; + } + _windowWidth = _windowRect.right - _windowRect.left; + _windowHeight = _windowRect.bottom - _windowRect.top; + } + + // this section will poll to see if the window size has changed + // this is causing problem w/invalid windowRef + // this code has been modified and exists now in the window event handler +#ifndef NEW_HIVIEW_PARENT_EVENT_HANDLER + if (_isHIViewRef) + { + + if(FALSE == HIViewIsValid(_hiviewRef)) + { + + //WEBRTC_TRACE(kTraceDebug, "%s:%d Invalid windowRef", __FUNCTION__, __LINE__); + UnlockAGLCntx(); + return true; + } + WindowRef window = HIViewGetWindow(_hiviewRef); + + if(FALSE == IsValidWindowPtr(window)) + { + //WEBRTC_TRACE(kTraceDebug, "%s:%d Invalide hiviewRef", __FUNCTION__, __LINE__); + UnlockAGLCntx(); + return true; + } + if (window == NULL) + { + //WEBRTC_TRACE(kTraceDebug, "%s:%d WindowRef = NULL", __FUNCTION__, __LINE__); + UnlockAGLCntx(); + return true; + } + + if(FALSE == MacIsWindowVisible(window)) + { + //WEBRTC_TRACE(kTraceDebug, "%s:%d MacIsWindowVisible == FALSE. Returning early", __FUNCTION__, __LINE__); + UnlockAGLCntx(); + return true; + } + + HIRect viewBounds; // Placement and size for HIView + int windowWidth = 0; // Parent window width + int windowHeight = 0; // Parent window height + + // NOTE: Calling GetWindowBounds with kWindowStructureRgn will crash intermittentaly if the OS decides it needs to push it into the back for a moment. + // To counter this, we get the titlebar height on class construction and then add it to the content region here. Content regions seems not to crash + Rect contentBounds = + { 0, 0, 0, 0}; // The bounds for the parent window + +#if defined(USE_CONTENT_RGN) + GetWindowBounds(window, kWindowContentRgn, &contentBounds); +#elif defined(USE_STRUCT_RGN) + GetWindowBounds(window, kWindowStructureRgn, &contentBounds); +#endif + + Rect globalBounds = + { 0, 0, 0, 0}; // The bounds for the parent window + globalBounds.top = contentBounds.top; + globalBounds.right = contentBounds.right; + globalBounds.bottom = contentBounds.bottom; + globalBounds.left = contentBounds.left; + + windowHeight = globalBounds.bottom - globalBounds.top; + windowWidth = globalBounds.right - globalBounds.left; + + // Get the size of the HIViewRef + HIViewGetBounds(_hiviewRef, &viewBounds); + HIViewConvertRect(&viewBounds, _hiviewRef, NULL); + + // Check if this is the first call.. + if (_lastWindowHeight == -1 && + _lastWindowWidth == -1) + { + _lastWindowWidth = windowWidth; + _lastWindowHeight = windowHeight; + + _lastViewBounds.origin.x = viewBounds.origin.x; + _lastViewBounds.origin.y = viewBounds.origin.y; + _lastViewBounds.size.width = viewBounds.size.width; + _lastViewBounds.size.height = viewBounds.size.height; + } + sfasdfasdf + + bool resized = false; + + // Check if parent window size has changed + if (windowHeight != _lastWindowHeight || + windowWidth != _lastWindowWidth) + { + resized = true; + } + + // Check if the HIView has new size or is moved in the parent window + if (_lastViewBounds.origin.x != viewBounds.origin.x || + _lastViewBounds.origin.y != viewBounds.origin.y || + _lastViewBounds.size.width != viewBounds.size.width || + _lastViewBounds.size.height != viewBounds.size.height) + { + // The HiView is resized or has moved. + resized = true; + } + + if (resized) + { + + //WEBRTC_TRACE(kTraceDebug, "%s:%d Window has resized", __FUNCTION__, __LINE__); + + // Calculate offset between the windows + // {x, y, widht, height}, x,y = lower left corner + const GLint offs[4] = + { (int)(0.5f + viewBounds.origin.x), + (int)(0.5f + windowHeight - (viewBounds.origin.y + viewBounds.size.height)), + viewBounds.size.width, viewBounds.size.height}; + + //WEBRTC_TRACE(kTraceDebug, "%s:%d contentBounds t:%d r:%d b:%d l:%d", __FUNCTION__, __LINE__, + contentBounds.top, contentBounds.right, contentBounds.bottom, contentBounds.left); + //WEBRTC_TRACE(kTraceDebug, "%s:%d windowHeight=%d", __FUNCTION__, __LINE__, windowHeight); + //WEBRTC_TRACE(kTraceDebug, "%s:%d offs[4] = %d, %d, %d, %d", __FUNCTION__, __LINE__, offs[0], offs[1], offs[2], offs[3]); + + aglSetDrawable (_aglContext, GetWindowPort(window)); + aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs); + aglEnable(_aglContext, AGL_BUFFER_RECT); + + // We need to change the viewport too if the HIView size has changed + glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height); + + } + _lastWindowWidth = windowWidth; + _lastWindowHeight = windowHeight; + + _lastViewBounds.origin.x = viewBounds.origin.x; + _lastViewBounds.origin.y = viewBounds.origin.y; + _lastViewBounds.size.width = viewBounds.size.width; + _lastViewBounds.size.height = viewBounds.size.height; + + } +#endif + if (_fullScreen) + { + // TODO + // We use double buffers, must always update + //RenderOffScreenBuffersToBackBuffer(); + } + else + { + // Check if there are any updated buffers + bool updated = false; + + // TODO: check if window size is updated! + // TODO Improvement: Walk through the zOrder Map to only render the ones in need of update + std::map::iterator it = _aglChannels.begin(); + while (it != _aglChannels.end()) + { + + VideoChannelAGL* aglChannel = it->second; + aglChannel->UpdateStretchSize(_windowHeight, _windowWidth); + aglChannel->IsUpdated(updated); + if (updated) + { + break; + } + it++; + } + + if (updated) + { + // At least on buffers is updated, we need to repaint the texture + if (RenderOffScreenBuffers() != -1) + { + // MF + //SwapAndDisplayBuffers(); + } + else + { + // Error updating the mixing texture, don't swap. + } + } + } + + UnlockAGLCntx(); + + //WEBRTC_LOG(kTraceDebug, "Leaving ScreenUpdateProcess()"); + return true; +} + +void VideoRenderAGL::ParentWindowResized(WindowRef window) +{ + //WEBRTC_LOG(kTraceDebug, "%s HIViewRef:%d owner window has resized", __FUNCTION__, (int)_hiviewRef); + + LockAGLCntx(); +k + // set flag + _windowHasResized = false; + + if(FALSE == HIViewIsValid(_hiviewRef)) + { + //WEBRTC_LOG(kTraceDebug, "invalid windowRef"); + UnlockAGLCntx(); + return; + } + + if(FALSE == IsValidWindowPtr(window)) + { + //WEBRTC_LOG(kTraceError, "invalid windowRef"); + UnlockAGLCntx(); + return; + } + + if (window == NULL) + { + //WEBRTC_LOG(kTraceError, "windowRef = NULL"); + UnlockAGLCntx(); + return; + } + + if(FALSE == MacIsWindowVisible(window)) + { + //WEBRTC_LOG(kTraceDebug, "MacIsWindowVisible = FALSE. Returning early."); + UnlockAGLCntx(); + return; + } + + Rect contentBounds = + { 0, 0, 0, 0}; + +#if defined(USE_CONTENT_RGN) + GetWindowBounds(window, kWindowContentRgn, &contentBounds); +#elif defined(USE_STRUCT_RGN) + GetWindowBounds(window, kWindowStructureRgn, &contentBounds); +#endif + + //WEBRTC_LOG(kTraceDebug, "%s contentBounds t:%d r:%d b:%d l:%d", __FUNCTION__, contentBounds.top, contentBounds.right, contentBounds.bottom, contentBounds.left); + + // update global vars + _currentParentWindowBounds.top = contentBounds.top; + _currentParentWindowBounds.left = contentBounds.left; + _currentParentWindowBounds.bottom = contentBounds.bottom; + _currentParentWindowBounds.right = contentBounds.right; + + _currentParentWindowWidth = _currentParentWindowBounds.right - _currentParentWindowBounds.left; + _currentParentWindowHeight = _currentParentWindowBounds.bottom - _currentParentWindowBounds.top; + + _windowHasResized = true; + + // ********* update AGL offsets + HIRect viewBounds; + HIViewGetBounds(_hiviewRef, &viewBounds); + HIViewConvertRect(&viewBounds, _hiviewRef, NULL); + + const GLint offs[4] = + { (int)(0.5f + viewBounds.origin.x), + (int)(0.5f + _currentParentWindowHeight - (viewBounds.origin.y + viewBounds.size.height)), + viewBounds.size.width, viewBounds.size.height}; + //WEBRTC_LOG(kTraceDebug, "%s _currentParentWindowHeight=%d", __FUNCTION__, _currentParentWindowHeight); + //WEBRTC_LOG(kTraceDebug, "%s offs[4] = %d, %d, %d, %d", __FUNCTION__, offs[0], offs[1], offs[2], offs[3]); + + aglSetCurrentContext(_aglContext); + aglSetDrawable (_aglContext, GetWindowPort(window)); + aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs); + aglEnable(_aglContext, AGL_BUFFER_RECT); + + // We need to change the viewport too if the HIView size has changed + glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height); + + UnlockAGLCntx(); + + return; +} + +int VideoRenderAGL::CreateMixingContext() +{ + + LockAGLCntx(); + + //WEBRTC_LOG(kTraceDebug, "Entering CreateMixingContext()"); + + // Use both AGL_ACCELERATED and AGL_NO_RECOVERY to make sure + // a hardware renderer is used and not a software renderer. + + GLint attributes[] = + { + AGL_DOUBLEBUFFER, + AGL_WINDOW, + AGL_RGBA, + AGL_NO_RECOVERY, + AGL_ACCELERATED, + AGL_RED_SIZE, 8, + AGL_GREEN_SIZE, 8, + AGL_BLUE_SIZE, 8, + AGL_ALPHA_SIZE, 8, + AGL_DEPTH_SIZE, 24, + AGL_NONE, + }; + + AGLPixelFormat aglPixelFormat; + + // ***** Set up the OpenGL Context ***** + + // Get a pixel format for the attributes above + aglPixelFormat = aglChoosePixelFormat(NULL, 0, attributes); + if (NULL == aglPixelFormat) + { + //WEBRTC_LOG(kTraceError, "Could not create pixel format"); + UnlockAGLCntx(); + return -1; + } + + // Create an AGL context + _aglContext = aglCreateContext(aglPixelFormat, NULL); + if (_aglContext == NULL) + { + //WEBRTC_LOG(kTraceError, "Could no create AGL context"); + UnlockAGLCntx(); + return -1; + } + + // Release the pixel format memory + aglDestroyPixelFormat(aglPixelFormat); + + // Set the current AGL context for the rest of the settings + if (aglSetCurrentContext(_aglContext) == false) + { + //WEBRTC_LOG(kTraceError, "Could not set current context: %d", aglGetError()); + UnlockAGLCntx(); + return -1; + } + + if (_isHIViewRef) + { + //--------------------------- + // BEGIN: new test code +#if 0 + // Don't use this one! + // There seems to be an OS X bug that can't handle + // movements and resizing of the parent window + // and or the HIView + if (aglSetHIViewRef(_aglContext,_hiviewRef) == false) + { + //WEBRTC_LOG(kTraceError, "Could not set WindowRef: %d", aglGetError()); + UnlockAGLCntx(); + return -1; + } +#else + + // Get the parent window for this control + WindowRef window = GetControlOwner(_hiviewRef); + + Rect globalBounds = + { 0,0,0,0}; // The bounds for the parent window + HIRect viewBounds; // Placemnt in the parent window and size. + int windowHeight = 0; + + // Rect titleBounds = {0,0,0,0}; + // GetWindowBounds(window, kWindowTitleBarRgn, &titleBounds); + // _titleBarHeight = titleBounds.top - titleBounds.bottom; + // if(0 == _titleBarHeight) + // { + // //WEBRTC_LOG(kTraceError, "Titlebar height = 0"); + // //return -1; + // } + + + // Get the bounds for the parent window +#if defined(USE_CONTENT_RGN) + GetWindowBounds(window, kWindowContentRgn, &globalBounds); +#elif defined(USE_STRUCT_RGN) + GetWindowBounds(window, kWindowStructureRgn, &globalBounds); +#endif + windowHeight = globalBounds.bottom - globalBounds.top; + + // Get the bounds for the HIView + HIViewGetBounds(_hiviewRef, &viewBounds); + + HIViewConvertRect(&viewBounds, _hiviewRef, NULL); + + const GLint offs[4] = + { (int)(0.5f + viewBounds.origin.x), + (int)(0.5f + windowHeight - (viewBounds.origin.y + viewBounds.size.height)), + viewBounds.size.width, viewBounds.size.height}; + + //WEBRTC_LOG(kTraceDebug, "%s offs[4] = %d, %d, %d, %d", __FUNCTION__, offs[0], offs[1], offs[2], offs[3]); + + + aglSetDrawable (_aglContext, GetWindowPort(window)); + aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs); + aglEnable(_aglContext, AGL_BUFFER_RECT); + + GLint surfaceOrder = 1; // 1: above window, -1 below. + //OSStatus status = aglSetInteger(_aglContext, AGL_SURFACE_ORDER, &surfaceOrder); + aglSetInteger(_aglContext, AGL_SURFACE_ORDER, &surfaceOrder); + + glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height); +#endif + + } + else + { + if(GL_FALSE == aglSetDrawable (_aglContext, GetWindowPort(_windowRef))) + { + //WEBRTC_LOG(kTraceError, "Could not set WindowRef: %d", aglGetError()); + UnlockAGLCntx(); + return -1; + } + } + + _windowWidth = _windowRect.right - _windowRect.left; + _windowHeight = _windowRect.bottom - _windowRect.top; + + // opaque surface + int surfaceOpacity = 1; + if (aglSetInteger(_aglContext, AGL_SURFACE_OPACITY, (const GLint *) &surfaceOpacity) == false) + { + //WEBRTC_LOG(kTraceError, "Could not set surface opacity: %d", aglGetError()); + UnlockAGLCntx(); + return -1; + } + + // 1 -> sync to screen rat, slow... + //int swapInterval = 0; // 0 don't sync with vertical trace + int swapInterval = 0; // 1 sync with vertical trace + if (aglSetInteger(_aglContext, AGL_SWAP_INTERVAL, (const GLint *) &swapInterval) == false) + { + //WEBRTC_LOG(kTraceError, "Could not set swap interval: %d", aglGetError()); + UnlockAGLCntx(); + return -1; + } + + // Update the rect with the current size + if (GetWindowRect(_windowRect) == -1) + { + //WEBRTC_LOG(kTraceError, "Could not get window size"); + UnlockAGLCntx(); + return -1; + } + + // Disable not needed functionality to increase performance + glDisable(GL_DITHER); + glDisable(GL_ALPHA_TEST); + glDisable(GL_STENCIL_TEST); + glDisable(GL_FOG); + glDisable(GL_TEXTURE_2D); + glPixelZoom(1.0, 1.0); + + glDisable(GL_BLEND); + glDisable(GL_DEPTH_TEST); + glDepthMask(GL_FALSE); + glDisable(GL_CULL_FACE); + + glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT); + + GLenum glErr = glGetError(); + + if (glErr) + { + } + + UpdateClipping(); + + //WEBRTC_LOG(kTraceDebug, "Leaving CreateMixingContext()"); + + UnlockAGLCntx(); + return 0; +} + +int VideoRenderAGL::RenderOffScreenBuffers() +{ + LockAGLCntx(); + + // Get the current window size, it might have changed since last render. + if (GetWindowRect(_windowRect) == -1) + { + //WEBRTC_LOG(kTraceError, "Could not get window rect"); + UnlockAGLCntx(); + return -1; + } + + if (aglSetCurrentContext(_aglContext) == false) + { + //WEBRTC_LOG(kTraceError, "Could not set current context for rendering"); + UnlockAGLCntx(); + return -1; + } + + // HERE - onl if updated! + glClear(GL_COLOR_BUFFER_BIT); + + // Loop through all channels starting highest zOrder ending with lowest. + for (std::multimap::reverse_iterator rIt = _zOrderToChannel.rbegin(); + rIt != _zOrderToChannel.rend(); + rIt++) + { + int channelId = rIt->second; + std::map::iterator it = _aglChannels.find(channelId); + + VideoChannelAGL* aglChannel = it->second; + + aglChannel->RenderOffScreenBuffer(); + } + + SwapAndDisplayBuffers(); + + UnlockAGLCntx(); + return 0; +} + +int VideoRenderAGL::SwapAndDisplayBuffers() +{ + + LockAGLCntx(); + if (_fullScreen) + { + // TODO: + // Swap front and back buffers, rendering taking care of in the same call + //aglSwapBuffers(_aglContext); + // Update buffer index to the idx for the next rendering! + //_textureIdx = (_textureIdx + 1) & 1; + } + else + { + // Single buffer rendering, only update context. + glFlush(); + aglSwapBuffers(_aglContext); + HIViewSetNeedsDisplay(_hiviewRef, true); + } + + UnlockAGLCntx(); + return 0; +} + +int VideoRenderAGL::GetWindowRect(Rect& rect) +{ + + LockAGLCntx(); + + if (_isHIViewRef) + { + if (_hiviewRef) + { + HIRect HIViewRect1; + if(FALSE == HIViewIsValid(_hiviewRef)) + { + rect.top = 0; + rect.left = 0; + rect.right = 0; + rect.bottom = 0; + //WEBRTC_LOG(kTraceError,"GetWindowRect() HIViewIsValid() returned false"); + UnlockAGLCntx(); + } + HIViewGetBounds(_hiviewRef,&HIViewRect1); + HIRectConvert(&HIViewRect1, 1, NULL, 2, NULL); + if(HIViewRect1.origin.x < 0) + { + rect.top = 0; + //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.top = 0"); + } + else + { + rect.top = HIViewRect1.origin.x; + } + + if(HIViewRect1.origin.y < 0) + { + rect.left = 0; + //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.left = 0"); + } + else + { + rect.left = HIViewRect1.origin.y; + } + + if(HIViewRect1.size.width < 0) + { + rect.right = 0; + //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.right = 0"); + } + else + { + rect.right = HIViewRect1.size.width; + } + + if(HIViewRect1.size.height < 0) + { + rect.bottom = 0; + //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.bottom = 0"); + } + else + { + rect.bottom = HIViewRect1.size.height; + } + + ////WEBRTC_LOG(kTraceDebug,"GetWindowRect() HIViewRef: rect.top = %d, rect.left = %d, rect.right = %d, rect.bottom =%d in GetWindowRect", rect.top,rect.left,rect.right,rect.bottom); + UnlockAGLCntx(); + } + else + { + //WEBRTC_LOG(kTraceError, "invalid HIViewRef"); + UnlockAGLCntx(); + } + } + else + { + if (_windowRef) + { + GetWindowBounds(_windowRef, kWindowContentRgn, &rect); + UnlockAGLCntx(); + } + else + { + //WEBRTC_LOG(kTraceError, "No WindowRef"); + UnlockAGLCntx(); + } + } +} + +int VideoRenderAGL::UpdateClipping() +{ + //WEBRTC_LOG(kTraceDebug, "Entering UpdateClipping()"); + LockAGLCntx(); + + if(_isHIViewRef) + { + if(FALSE == HIViewIsValid(_hiviewRef)) + { + //WEBRTC_LOG(kTraceError, "UpdateClipping() _isHIViewRef is invalid. Returning -1"); + UnlockAGLCntx(); + return -1; + } + + RgnHandle visibleRgn = NewRgn(); + SetEmptyRgn (visibleRgn); + + if(-1 == CalculateVisibleRegion((ControlRef)_hiviewRef, visibleRgn, true)) + { + } + + if(GL_FALSE == aglSetCurrentContext(_aglContext)) + { + GLenum glErr = aglGetError(); + //WEBRTC_LOG(kTraceError, "aglSetCurrentContext returned FALSE with error code %d at line %d", glErr, __LINE__); + } + + if(GL_FALSE == aglEnable(_aglContext, AGL_CLIP_REGION)) + { + GLenum glErr = aglGetError(); + //WEBRTC_LOG(kTraceError, "aglEnable returned FALSE with error code %d at line %d\n", glErr, __LINE__); + } + + if(GL_FALSE == aglSetInteger(_aglContext, AGL_CLIP_REGION, (const GLint*)visibleRgn)) + { + GLenum glErr = aglGetError(); + //WEBRTC_LOG(kTraceError, "aglSetInteger returned FALSE with error code %d at line %d\n", glErr, __LINE__); + } + + DisposeRgn(visibleRgn); + } + else + { + //WEBRTC_LOG(kTraceDebug, "Not using a hiviewref!\n"); + } + + //WEBRTC_LOG(kTraceDebug, "Leaving UpdateClipping()"); + UnlockAGLCntx(); + return true; +} + +int VideoRenderAGL::CalculateVisibleRegion(ControlRef control, RgnHandle &visibleRgn, bool clipChildren) +{ + + // LockAGLCntx(); + + //WEBRTC_LOG(kTraceDebug, "Entering CalculateVisibleRegion()"); + OSStatus osStatus = 0; + OSErr osErr = 0; + + RgnHandle tempRgn = NewRgn(); + if (IsControlVisible(control)) + { + RgnHandle childRgn = NewRgn(); + WindowRef window = GetControlOwner(control); + ControlRef rootControl; + GetRootControl(window, &rootControl); // 'wvnc' + ControlRef masterControl; + osStatus = GetSuperControl(rootControl, &masterControl); + // //WEBRTC_LOG(kTraceDebug, "IBM GetSuperControl=%d", osStatus); + + if (masterControl != NULL) + { + CheckValidRegion(visibleRgn); + // init visibleRgn with region of 'wvnc' + osStatus = GetControlRegion(rootControl, kControlStructureMetaPart, visibleRgn); + // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d : %d", osStatus, __LINE__); + //GetSuperControl(rootControl, &rootControl); + ControlRef tempControl = control, lastControl = 0; + while (tempControl != masterControl) // current control != master + + { + CheckValidRegion(tempRgn); + + // //WEBRTC_LOG(kTraceDebug, "IBM tempControl=%d masterControl=%d", tempControl, masterControl); + ControlRef subControl; + + osStatus = GetControlRegion(tempControl, kControlStructureMetaPart, tempRgn); // intersect the region of the current control with visibleRgn + // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d : %d", osStatus, __LINE__); + CheckValidRegion(tempRgn); + + osErr = HIViewConvertRegion(tempRgn, tempControl, rootControl); + // //WEBRTC_LOG(kTraceDebug, "IBM HIViewConvertRegion=%d : %d", osErr, __LINE__); + CheckValidRegion(tempRgn); + + SectRgn(tempRgn, visibleRgn, visibleRgn); + CheckValidRegion(tempRgn); + CheckValidRegion(visibleRgn); + if (EmptyRgn(visibleRgn)) // if the region is empty, bail + break; + + if (clipChildren || tempControl != control) // clip children if true, cut out the tempControl if it's not one passed to this function + + { + UInt16 numChildren; + osStatus = CountSubControls(tempControl, &numChildren); // count the subcontrols + // //WEBRTC_LOG(kTraceDebug, "IBM CountSubControls=%d : %d", osStatus, __LINE__); + + // //WEBRTC_LOG(kTraceDebug, "IBM numChildren=%d", numChildren); + for (int i = 0; i < numChildren; i++) + { + osErr = GetIndexedSubControl(tempControl, numChildren - i, &subControl); // retrieve the subcontrol in order by zorder + // //WEBRTC_LOG(kTraceDebug, "IBM GetIndexedSubControls=%d : %d", osErr, __LINE__); + if ( subControl == lastControl ) // break because of zorder + + { + // //WEBRTC_LOG(kTraceDebug, "IBM breaking because of zorder %d", __LINE__); + break; + } + + if (!IsControlVisible(subControl)) // dont' clip invisible controls + + { + // //WEBRTC_LOG(kTraceDebug, "IBM continue. Control is not visible %d", __LINE__); + continue; + } + + if(!subControl) continue; + + osStatus = GetControlRegion(subControl, kControlStructureMetaPart, tempRgn); //get the region of the current control and union to childrg + // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d %d", osStatus, __LINE__); + CheckValidRegion(tempRgn); + if(osStatus != 0) + { + // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! osStatus=%d. Continuing. %d", osStatus, __LINE__); + continue; + } + if(!tempRgn) + { + // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !tempRgn %d", osStatus, __LINE__); + continue; + } + + osStatus = HIViewConvertRegion(tempRgn, subControl, rootControl); + CheckValidRegion(tempRgn); + // //WEBRTC_LOG(kTraceDebug, "IBM HIViewConvertRegion=%d %d", osStatus, __LINE__); + if(osStatus != 0) + { + // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! osStatus=%d. Continuing. %d", osStatus, __LINE__); + continue; + } + if(!rootControl) + { + // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !rootControl %d", osStatus, __LINE__); + continue; + } + + UnionRgn(tempRgn, childRgn, childRgn); + CheckValidRegion(tempRgn); + CheckValidRegion(childRgn); + CheckValidRegion(visibleRgn); + if(!childRgn) + { + // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !childRgn %d", osStatus, __LINE__); + continue; + } + + } // next child control + } + lastControl = tempControl; + GetSuperControl(tempControl, &subControl); + tempControl = subControl; + } + + DiffRgn(visibleRgn, childRgn, visibleRgn); + CheckValidRegion(visibleRgn); + CheckValidRegion(childRgn); + DisposeRgn(childRgn); + } + else + { + CopyRgn(tempRgn, visibleRgn); + CheckValidRegion(tempRgn); + CheckValidRegion(visibleRgn); + } + DisposeRgn(tempRgn); + } + + //WEBRTC_LOG(kTraceDebug, "Leaving CalculateVisibleRegion()"); + //_aglCritPtr->Leave(); + return 0; +} + +bool VideoRenderAGL::CheckValidRegion(RgnHandle rHandle) +{ + + Handle hndSize = (Handle)rHandle; + long size = GetHandleSize(hndSize); + if(0 == size) + { + + OSErr memErr = MemError(); + if(noErr != memErr) + { + // //WEBRTC_LOG(kTraceError, "IBM ERROR Could not get size of handle. MemError() returned %d", memErr); + } + else + { + // //WEBRTC_LOG(kTraceError, "IBM ERROR Could not get size of handle yet MemError() returned noErr"); + } + + } + else + { + // //WEBRTC_LOG(kTraceDebug, "IBM handleSize = %d", size); + } + + if(false == IsValidRgnHandle(rHandle)) + { + // //WEBRTC_LOG(kTraceError, "IBM ERROR Invalid Region found : $%d", rHandle); + assert(false); + } + + int err = QDError(); + switch(err) + { + case 0: + break; + case -147: + //WEBRTC_LOG(kTraceError, "ERROR region too big"); + assert(false); + break; + + case -149: + //WEBRTC_LOG(kTraceError, "ERROR not enough stack"); + assert(false); + break; + + default: + //WEBRTC_LOG(kTraceError, "ERROR Unknown QDError %d", err); + assert(false); + break; + } + + return true; +} + +int VideoRenderAGL::ChangeWindow(void* newWindowRef) +{ + + LockAGLCntx(); + + UnlockAGLCntx(); + return -1; +} + +int32_t VideoRenderAGL::StartRender() +{ + + LockAGLCntx(); + const unsigned int MONITOR_FREQ = 60; + if(TRUE == _renderingIsPaused) + { + //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Rendering is paused. Restarting now", __FUNCTION__, __LINE__); + + // we already have the thread. Most likely StopRender() was called and they were paused + if(FALSE == _screenUpdateThread->Start()) + { + //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateThread", __FUNCTION__, __LINE__); + UnlockAGLCntx(); + return -1; + } + _screenUpdateThread->SetPriority(rtc::kRealtimePriority); + if(FALSE == _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ)) + { + //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateEvent", __FUNCTION__, __LINE__); + UnlockAGLCntx(); + return -1; + } + + return 0; + } + + _screenUpdateThread.reset( + new rtc::PlatformThread(ScreenUpdateThreadProc, this, "ScreenUpdate")); + _screenUpdateEvent = EventWrapper::Create(); + + if (!_screenUpdateThread) + { + //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateThread", __FUNCTION__, __LINE__); + UnlockAGLCntx(); + return -1; + } + + _screenUpdateThread->Start(); + _screenUpdateThread->SetPriority(rtc::kRealtimePriority); + _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ); + + //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Started screenUpdateThread", __FUNCTION__, __LINE__); + + UnlockAGLCntx(); + return 0; +} + +int32_t VideoRenderAGL::StopRender() +{ + LockAGLCntx(); + + if(!_screenUpdateThread || !_screenUpdateEvent) + { + _renderingIsPaused = TRUE; + UnlockAGLCntx(); + return 0; + } + + if(FALSE == _screenUpdateThread->Stop() || FALSE == _screenUpdateEvent->StopTimer()) + { + _renderingIsPaused = FALSE; + //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Could not stop either: screenUpdateThread or screenUpdateEvent", __FUNCTION__, __LINE__); + UnlockAGLCntx(); + return -1; + } + + _renderingIsPaused = TRUE; + + //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Stopped screenUpdateThread", __FUNCTION__, __LINE__); + UnlockAGLCntx(); + return 0; +} + +int32_t VideoRenderAGL::DeleteAGLChannel(const uint32_t streamID) +{ + + LockAGLCntx(); + + std::map::iterator it; + it = _aglChannels.begin(); + + while (it != _aglChannels.end()) + { + VideoChannelAGL* channel = it->second; + //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Deleting channel %d", __FUNCTION__, __LINE__, streamID); + delete channel; + it++; + } + _aglChannels.clear(); + + UnlockAGLCntx(); + return 0; +} + +int32_t VideoRenderAGL::GetChannelProperties(const uint16_t streamId, + uint32_t& zOrder, + float& left, + float& top, + float& right, + float& bottom) +{ + + LockAGLCntx(); + UnlockAGLCntx(); + return -1; + +} + +void VideoRenderAGL::LockAGLCntx() +{ + _renderCritSec.Enter(); +} +void VideoRenderAGL::UnlockAGLCntx() +{ + _renderCritSec.Leave(); +} + +} // namespace webrtc + +#endif // CARBON_RENDERING diff --git a/webrtc/modules/video_render/mac/video_render_agl.h b/webrtc/modules/video_render/mac/video_render_agl.h new file mode 100644 index 0000000000..c0a60597e9 --- /dev/null +++ b/webrtc/modules/video_render/mac/video_render_agl.h @@ -0,0 +1,178 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/engine_configurations.h" + +#if defined(CARBON_RENDERING) + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_ + +#include "webrtc/base/platform_thread.h" +#include "webrtc/modules/video_render/video_render_defines.h" + +#define NEW_HIVIEW_PARENT_EVENT_HANDLER 1 +#define NEW_HIVIEW_EVENT_HANDLER 1 +#define USE_STRUCT_RGN + +#include +#include +#include +#include +#include +#include +#include +#include + +class VideoRenderAGL; + +namespace webrtc { +class CriticalSectionWrapper; +class EventWrapper; + +class VideoChannelAGL : public VideoRenderCallback { + public: + + VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner); + virtual ~VideoChannelAGL(); + virtual int FrameSizeChange(int width, int height, int numberOfStreams); + virtual int DeliverFrame(const VideoFrame& videoFrame); + virtual int UpdateSize(int width, int height); + int SetStreamSettings(int streamId, float startWidth, float startHeight, + float stopWidth, float stopHeight); + int SetStreamCropSettings(int streamId, float startWidth, float startHeight, + float stopWidth, float stopHeight); + int RenderOffScreenBuffer(); + int IsUpdated(bool& isUpdated); + virtual int UpdateStretchSize(int stretchHeight, int stretchWidth); + virtual int32_t RenderFrame(const uint32_t streamId, VideoFrame& videoFrame); + + private: + + AGLContext _aglContext; + int _id; + VideoRenderAGL* _owner; + int _width; + int _height; + int _stretchedWidth; + int _stretchedHeight; + float _startHeight; + float _startWidth; + float _stopWidth; + float _stopHeight; + int _xOldWidth; + int _yOldHeight; + int _oldStretchedHeight; + int _oldStretchedWidth; + unsigned char* _buffer; + size_t _bufferSize; + size_t _incomingBufferSize; + bool _bufferIsUpdated; + bool _sizeInitialized; + int _numberOfStreams; + bool _bVideoSizeStartedChanging; + GLenum _pixelFormat; + GLenum _pixelDataType; + unsigned int _texture; +}; + +class VideoRenderAGL { + public: + VideoRenderAGL(WindowRef windowRef, bool fullscreen, int iId); + VideoRenderAGL(HIViewRef windowRef, bool fullscreen, int iId); + ~VideoRenderAGL(); + + int Init(); + VideoChannelAGL* CreateAGLChannel(int channel, int zOrder, float startWidth, + float startHeight, float stopWidth, + float stopHeight); + VideoChannelAGL* ConfigureAGLChannel(int channel, int zOrder, + float startWidth, float startHeight, + float stopWidth, float stopHeight); + int DeleteAGLChannel(int channel); + int DeleteAllAGLChannels(); + int StopThread(); + bool IsFullScreen(); + bool HasChannels(); + bool HasChannel(int channel); + int GetChannels(std::list& channelList); + void LockAGLCntx(); + void UnlockAGLCntx(); + + static int GetOpenGLVersion(int& aglMajor, int& aglMinor); + + // ********** new module functions ************ // + int ChangeWindow(void* newWindowRef); + int32_t StartRender(); + int32_t StopRender(); + int32_t DeleteAGLChannel(const uint32_t streamID); + int32_t GetChannelProperties(const uint16_t streamId, uint32_t& zOrder, + float& left, float& top, float& right, + float& bottom); + + protected: + static bool ScreenUpdateThreadProc(void* obj); + bool ScreenUpdateProcess(); + int GetWindowRect(Rect& rect); + + private: + int CreateMixingContext(); + int RenderOffScreenBuffers(); + int SwapAndDisplayBuffers(); + int UpdateClipping(); + int CalculateVisibleRegion(ControlRef control, RgnHandle& visibleRgn, + bool clipChildren); + bool CheckValidRegion(RgnHandle rHandle); + void ParentWindowResized(WindowRef window); + + // Carbon GUI event handlers + static pascal OSStatus sHandleWindowResized( + EventHandlerCallRef nextHandler, EventRef theEvent, void* userData); + static pascal OSStatus sHandleHiViewResized( + EventHandlerCallRef nextHandler, EventRef theEvent, void* userData); + + HIViewRef _hiviewRef; + WindowRef _windowRef; + bool _fullScreen; + int _id; + webrtc::CriticalSectionWrapper& _renderCritSec; + // TODO(pbos): Remove unique_ptr and use PlatformThread directly. + std::unique_ptr _screenUpdateThread; + webrtc::EventWrapper* _screenUpdateEvent; + bool _isHIViewRef; + AGLContext _aglContext; + int _windowWidth; + int _windowHeight; + int _lastWindowWidth; + int _lastWindowHeight; + int _lastHiViewWidth; + int _lastHiViewHeight; + int _currentParentWindowHeight; + int _currentParentWindowWidth; + Rect _currentParentWindowBounds; + bool _windowHasResized; + Rect _lastParentWindowBounds; + Rect _currentHIViewBounds; + Rect _lastHIViewBounds; + Rect _windowRect; + std::map _aglChannels; + std::multimap _zOrderToChannel; + EventHandlerRef _hiviewEventHandlerRef; + EventHandlerRef _windowEventHandlerRef; + HIRect _currentViewBounds; + HIRect _lastViewBounds; + bool _renderingIsPaused; +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_ + +#endif // CARBON_RENDERING diff --git a/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.cc b/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.cc new file mode 100644 index 0000000000..f85be5fb5e --- /dev/null +++ b/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.cc @@ -0,0 +1,280 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/engine_configurations.h" +#if defined(CARBON_RENDERING) + +#include +#include "webrtc/modules/video_render/mac/video_render_agl.h" +#include "webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h" +#include "webrtc/system_wrappers/include/critical_section_wrapper.h" +#include "webrtc/system_wrappers/include/trace.h" + +namespace webrtc { + +VideoRenderMacCarbonImpl::VideoRenderMacCarbonImpl(const int32_t id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen) : +_id(id), +_renderMacCarbonCritsect(*CriticalSectionWrapper::CreateCriticalSection()), +_fullScreen(fullscreen), +_ptrWindow(window) +{ + + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__); + +} + +VideoRenderMacCarbonImpl::~VideoRenderMacCarbonImpl() +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Destructor %s:%d", __FUNCTION__, __LINE__); + delete &_renderMacCarbonCritsect; +} + +int32_t +VideoRenderMacCarbonImpl::Init() +{ + CriticalSectionScoped cs(&_renderMacCarbonCritsect); + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__); + + if (!_ptrWindow) + { + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__); + return -1; + } + + // We don't know if the user passed us a WindowRef or a HIViewRef, so test. + bool referenceIsValid = false; + + // Check if it's a valid WindowRef + //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d _ptrWindowRef before WindowRef cast: %x", __FUNCTION__, __LINE__, _ptrWindowRef); + WindowRef* windowRef = static_cast(_ptrWindow); + //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d _ptrWindowRef after cast: %x", __FUNCTION__, __LINE__, _ptrWindowRef); + if (IsValidWindowPtr(*windowRef)) + { + _ptrCarbonRender = new VideoRenderAGL(*windowRef, _fullScreen, _id); + referenceIsValid = true; + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Successfully initialized CarbonRenderer with WindowRef:%x", __FUNCTION__, __LINE__, *windowRef); + } + else + { + HIViewRef* hiviewRef = static_cast(_ptrWindow); + if (HIViewIsValid(*hiviewRef)) + { + _ptrCarbonRender = new VideoRenderAGL(*hiviewRef, _fullScreen, _id); + referenceIsValid = true; + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Successfully initialized CarbonRenderer with HIViewRef:%x", __FUNCTION__, __LINE__, hiviewRef); + } + } + + if(!referenceIsValid) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Invalid WindowRef/HIViewRef Returning -1", __FUNCTION__, __LINE__); + return -1; + } + + if(!_ptrCarbonRender) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to create an instance of VideoRenderAGL. Returning -1", __FUNCTION__, __LINE__); + } + + int retVal = _ptrCarbonRender->Init(); + if (retVal == -1) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to init CarbonRenderer", __FUNCTION__, __LINE__); + return -1; + } + + return 0; +} + +int32_t +VideoRenderMacCarbonImpl::ChangeWindow(void* window) +{ + return -1; + CriticalSectionScoped cs(&_renderMacCarbonCritsect); + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s changing ID to ", __FUNCTION__, window); + + if (window == NULL) + { + return -1; + } + _ptrWindow = window; + + + _ptrWindow = window; + + return 0; +} + +VideoRenderCallback* +VideoRenderMacCarbonImpl::AddIncomingRenderStream(const uint32_t streamId, + const uint32_t zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + + CriticalSectionScoped cs(&_renderMacCarbonCritsect); + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__); + VideoChannelAGL* AGLChannel = NULL; + + if(!_ptrWindow) + { + } + + if(!AGLChannel) + { + AGLChannel = _ptrCocoaRender->CreateNSGLChannel(streamId, zOrder, left, top, right, bottom); + } + + return AGLChannel; + +} + +int32_t +VideoRenderMacCarbonImpl::DeleteIncomingRenderStream(const uint32_t streamId) +{ + + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__); + CriticalSectionScoped cs(&_renderMacCarbonCritsect); + _ptrCarbonRender->DeleteAGLChannel(streamId); + + return 0; +} + +int32_t +VideoRenderMacCarbonImpl::GetIncomingRenderStreamProperties(const uint32_t streamId, + uint32_t& zOrder, + float& left, + float& top, + float& right, + float& bottom) const +{ + return -1; + return _ptrCarbonRender->GetChannelProperties(streamId, zOrder, left, top, right, bottom); +} + +int32_t +VideoRenderMacCarbonImpl::StartRender() +{ + return _ptrCarbonRender->StartRender(); +} + +int32_t +VideoRenderMacCarbonImpl::StopRender() +{ + return _ptrCarbonRender->StopRender(); +} + +VideoRenderType +VideoRenderMacCarbonImpl::RenderType() +{ + return kRenderCarbon; +} + +RawVideoType +VideoRenderMacCarbonImpl::PerferedVideoType() +{ + return kVideoI420; +} + +bool +VideoRenderMacCarbonImpl::FullScreen() +{ + return false; +} + +int32_t +VideoRenderMacCarbonImpl::GetGraphicsMemory(uint64_t& totalGraphicsMemory, + uint64_t& availableGraphicsMemory) const +{ + totalGraphicsMemory = 0; + availableGraphicsMemory = 0; + return 0; +} + +int32_t +VideoRenderMacCarbonImpl::GetScreenResolution(uint32_t& screenWidth, + uint32_t& screenHeight) const +{ + CriticalSectionScoped cs(&_renderMacCarbonCritsect); + //NSScreen* mainScreen = [NSScreen mainScreen]; + + //NSRect frame = [mainScreen frame]; + + //screenWidth = frame.size.width; + //screenHeight = frame.size.height; + return 0; +} + +uint32_t +VideoRenderMacCarbonImpl::RenderFrameRate(const uint32_t streamId) +{ + CriticalSectionScoped cs(&_renderMacCarbonCritsect); + return 0; +} + +int32_t +VideoRenderMacCarbonImpl::SetStreamCropping(const uint32_t streamId, + const float left, + const float top, + const float right, + const float bottom) +{ + return 0; +} + +int32_t VideoRenderMacCarbonImpl::ConfigureRenderer(const uint32_t streamId, + const unsigned int zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + return 0; +} + +int32_t +VideoRenderMacCarbonImpl::SetTransparentBackground(const bool enable) +{ + return 0; +} + +int32_t VideoRenderMacCarbonImpl::SetText(const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, + const float top, + const float right, + const float bottom) +{ + return 0; +} + +int32_t VideoRenderMacCarbonImpl::SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, + const float top, + const float right, + const float bottom) +{ + return 0; +} + + +} // namespace webrtc + +#endif // CARBON_RENDERING diff --git a/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h b/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h new file mode 100644 index 0000000000..9ad3a6cdd1 --- /dev/null +++ b/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h @@ -0,0 +1,146 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/engine_configurations.h" +#if defined(CARBON_RENDERING) + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_ + +#include "webrtc/modules/video_render/i_video_render.h" + +namespace webrtc { + +class CriticalSectionWrapper; +class VideoRenderAGL; + +// Class definitions +class VideoRenderMacCarbonImpl : IVideoRender +{ +public: + /* + * Constructor/destructor + */ + + VideoRenderMacCarbonImpl(const int32_t id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen); + + virtual ~VideoRenderMacCarbonImpl(); + + virtual int32_t Init(); + + virtual int32_t ChangeWindow(void* window); + + /************************************************************************** + * + * Incoming Streams + * + ***************************************************************************/ + virtual VideoRenderCallback* AddIncomingRenderStream(const uint32_t streamId, + const uint32_t zOrder, + const float left, + const float top, + const float right, + const float bottom); + + virtual int32_t DeleteIncomingRenderStream(const uint32_t streamId); + + virtual int32_t GetIncomingRenderStreamProperties(const uint32_t streamId, + uint32_t& zOrder, + float& left, + float& top, + float& right, + float& bottom) const; + + /************************************************************************** + * + * Start/Stop + * + ***************************************************************************/ + + virtual int32_t StartRender(); + + virtual int32_t StopRender(); + + /************************************************************************** + * + * Properties + * + ***************************************************************************/ + + virtual VideoRenderType RenderType(); + + virtual RawVideoType PerferedVideoType(); + + virtual bool FullScreen(); + + virtual int32_t GetGraphicsMemory(uint64_t& totalGraphicsMemory, + uint64_t& availableGraphicsMemory) const; + + virtual int32_t GetScreenResolution(uint32_t& screenWidth, + uint32_t& screenHeight) const; + + virtual uint32_t RenderFrameRate(const uint32_t streamId); + + virtual int32_t SetStreamCropping(const uint32_t streamId, + const float left, + const float top, + const float right, + const float bottom); + + virtual int32_t ConfigureRenderer(const uint32_t streamId, + const unsigned int zOrder, + const float left, + const float top, + const float right, + const float bottom); + + virtual int32_t SetTransparentBackground(const bool enable); + + virtual int32_t SetText(const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, + const float top, + const float right, + const float bottom); + + virtual int32_t SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, + const float top, + const float right, + const float bottom); + + virtual int32_t FullScreenRender(void* window, const bool enable) + { + // not supported in Carbon at this time + return -1; + } + +private: + int32_t _id; + CriticalSectionWrapper& _renderMacCarbonCritsect; + bool _fullScreen; + void* _ptrWindow; + VideoRenderAGL* _ptrCarbonRender; + +}; + + +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_ +#endif // CARBON_RENDERING diff --git a/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h b/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h new file mode 100644 index 0000000000..21add272bb --- /dev/null +++ b/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h @@ -0,0 +1,141 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/engine_configurations.h" + +#if defined(COCOA_RENDERING) + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_ + +#include "webrtc/modules/video_render/i_video_render.h" + +namespace webrtc { +class CriticalSectionWrapper; +class VideoRenderNSOpenGL; + +// Class definitions +class VideoRenderMacCocoaImpl : IVideoRender +{ +public: + /* + * Constructor/destructor + */ + + VideoRenderMacCocoaImpl(const int32_t id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen); + + virtual ~VideoRenderMacCocoaImpl(); + + virtual int32_t Init(); + + virtual int32_t ChangeWindow(void* window); + + /************************************************************************** + * + * Incoming Streams + * + ***************************************************************************/ + virtual VideoRenderCallback* AddIncomingRenderStream(const uint32_t streamId, + const uint32_t zOrder, + const float left, + const float top, + const float right, + const float bottom); + + virtual int32_t DeleteIncomingRenderStream(const uint32_t streamId); + + virtual int32_t GetIncomingRenderStreamProperties(const uint32_t streamId, + uint32_t& zOrder, + float& left, + float& top, + float& right, + float& bottom) const; + + /************************************************************************** + * + * Start/Stop + * + ***************************************************************************/ + + virtual int32_t StartRender(); + + virtual int32_t StopRender(); + + /************************************************************************** + * + * Properties + * + ***************************************************************************/ + + virtual VideoRenderType RenderType(); + + virtual RawVideoType PerferedVideoType(); + + virtual bool FullScreen(); + + virtual int32_t GetGraphicsMemory(uint64_t& totalGraphicsMemory, + uint64_t& availableGraphicsMemory) const; + + virtual int32_t GetScreenResolution(uint32_t& screenWidth, + uint32_t& screenHeight) const; + + virtual uint32_t RenderFrameRate(const uint32_t streamId); + + virtual int32_t SetStreamCropping(const uint32_t streamId, + const float left, + const float top, + const float right, + const float bottom); + + virtual int32_t ConfigureRenderer(const uint32_t streamId, + const unsigned int zOrder, + const float left, + const float top, + const float right, + const float bottom); + + virtual int32_t SetTransparentBackground(const bool enable); + + virtual int32_t SetText(const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, + const float top, + const float right, + const float bottom); + + virtual int32_t SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, + const float top, + const float right, + const float bottom); + + virtual int32_t FullScreenRender(void* window, const bool enable); + +private: + int32_t _id; + CriticalSectionWrapper& _renderMacCocoaCritsect; + bool _fullScreen; + void* _ptrWindow; + VideoRenderNSOpenGL* _ptrCocoaRender; + +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_ +#endif // COCOA_RENDERING diff --git a/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm b/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm new file mode 100644 index 0000000000..5b017fecc0 --- /dev/null +++ b/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm @@ -0,0 +1,253 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/engine_configurations.h" +#if defined(COCOA_RENDERING) + +#include "webrtc/modules/video_render/mac/cocoa_render_view.h" +#include "webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h" +#include "webrtc/modules/video_render/mac/video_render_nsopengl.h" +#include "webrtc/system_wrappers/include/critical_section_wrapper.h" +#include "webrtc/system_wrappers/include/trace.h" + +namespace webrtc { + +VideoRenderMacCocoaImpl::VideoRenderMacCocoaImpl(const int32_t id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen) : +_id(id), +_renderMacCocoaCritsect(*CriticalSectionWrapper::CreateCriticalSection()), +_fullScreen(fullscreen), +_ptrWindow(window) +{ + + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__); +} + +VideoRenderMacCocoaImpl::~VideoRenderMacCocoaImpl() +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Destructor %s:%d", __FUNCTION__, __LINE__); + delete &_renderMacCocoaCritsect; + if (_ptrCocoaRender) + { + delete _ptrCocoaRender; + _ptrCocoaRender = NULL; + } +} + +int32_t +VideoRenderMacCocoaImpl::Init() +{ + + CriticalSectionScoped cs(&_renderMacCocoaCritsect); + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__); + + // cast ptrWindow from void* to CocoaRenderer. Void* was once NSOpenGLView, and CocoaRenderer is NSOpenGLView. + _ptrCocoaRender = new VideoRenderNSOpenGL((CocoaRenderView*)_ptrWindow, _fullScreen, _id); + if (!_ptrWindow) + { + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__); + return -1; + } + int retVal = _ptrCocoaRender->Init(); + if (retVal == -1) + { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Failed to init %s:%d", __FUNCTION__, __LINE__); + return -1; + } + + return 0; +} + +int32_t +VideoRenderMacCocoaImpl::ChangeWindow(void* window) +{ + + CriticalSectionScoped cs(&_renderMacCocoaCritsect); + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s changing ID to ", __FUNCTION__, window); + + if (window == NULL) + { + return -1; + } + _ptrWindow = window; + + + _ptrWindow = window; + _ptrCocoaRender->ChangeWindow((CocoaRenderView*)_ptrWindow); + + return 0; +} + +VideoRenderCallback* +VideoRenderMacCocoaImpl::AddIncomingRenderStream(const uint32_t streamId, + const uint32_t zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_renderMacCocoaCritsect); + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__); + VideoChannelNSOpenGL* nsOpenGLChannel = NULL; + + if(!_ptrWindow) + { + } + + if(!nsOpenGLChannel) + { + nsOpenGLChannel = _ptrCocoaRender->CreateNSGLChannel(streamId, zOrder, left, top, right, bottom); + } + + return nsOpenGLChannel; + +} + +int32_t +VideoRenderMacCocoaImpl::DeleteIncomingRenderStream(const uint32_t streamId) +{ + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__); + CriticalSectionScoped cs(&_renderMacCocoaCritsect); + _ptrCocoaRender->DeleteNSGLChannel(streamId); + + return 0; +} + +int32_t +VideoRenderMacCocoaImpl::GetIncomingRenderStreamProperties(const uint32_t streamId, + uint32_t& zOrder, + float& left, + float& top, + float& right, + float& bottom) const +{ + return _ptrCocoaRender->GetChannelProperties(streamId, zOrder, left, top, right, bottom); +} + +int32_t +VideoRenderMacCocoaImpl::StartRender() +{ + return _ptrCocoaRender->StartRender(); +} + +int32_t +VideoRenderMacCocoaImpl::StopRender() +{ + return _ptrCocoaRender->StopRender(); +} + +VideoRenderType +VideoRenderMacCocoaImpl::RenderType() +{ + return kRenderCocoa; +} + +RawVideoType +VideoRenderMacCocoaImpl::PerferedVideoType() +{ + return kVideoI420; +} + +bool +VideoRenderMacCocoaImpl::FullScreen() +{ + return false; +} + +int32_t +VideoRenderMacCocoaImpl::GetGraphicsMemory(uint64_t& totalGraphicsMemory, + uint64_t& availableGraphicsMemory) const +{ + totalGraphicsMemory = 0; + availableGraphicsMemory = 0; + return 0; +} + +int32_t +VideoRenderMacCocoaImpl::GetScreenResolution(uint32_t& screenWidth, + uint32_t& screenHeight) const +{ + CriticalSectionScoped cs(&_renderMacCocoaCritsect); + NSScreen* mainScreen = [NSScreen mainScreen]; + + NSRect frame = [mainScreen frame]; + + screenWidth = frame.size.width; + screenHeight = frame.size.height; + return 0; +} + +uint32_t +VideoRenderMacCocoaImpl::RenderFrameRate(const uint32_t streamId) +{ + CriticalSectionScoped cs(&_renderMacCocoaCritsect); + return 0; +} + +int32_t +VideoRenderMacCocoaImpl::SetStreamCropping(const uint32_t streamId, + const float left, + const float top, + const float right, + const float bottom) +{ + return 0; +} + +int32_t VideoRenderMacCocoaImpl::ConfigureRenderer(const uint32_t streamId, + const unsigned int zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + return 0; +} + +int32_t +VideoRenderMacCocoaImpl::SetTransparentBackground(const bool enable) +{ + return 0; +} + +int32_t VideoRenderMacCocoaImpl::SetText(const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, + const float top, + const float right, + const float bottom) +{ + return _ptrCocoaRender->SetText(textId, text, textLength, textColorRef, backgroundColorRef, left, top, right, bottom); +} + +int32_t VideoRenderMacCocoaImpl::SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, + const float top, + const float right, + const float bottom) +{ + return 0; +} + +int32_t VideoRenderMacCocoaImpl::FullScreenRender(void* window, const bool enable) +{ + return -1; +} + +} // namespace webrtc + +#endif // COCOA_RENDERING diff --git a/webrtc/modules/video_render/mac/video_render_nsopengl.h b/webrtc/modules/video_render/mac/video_render_nsopengl.h new file mode 100644 index 0000000000..457557dad6 --- /dev/null +++ b/webrtc/modules/video_render/mac/video_render_nsopengl.h @@ -0,0 +1,192 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/engine_configurations.h" +#if defined(COCOA_RENDERING) + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_ + +#import +#import +#import +#import +#include +#include +#include +#include + +#include "webrtc/base/thread_annotations.h" +#include "webrtc/modules/video_render/video_render_defines.h" + +#import "webrtc/modules/video_render/mac/cocoa_full_screen_window.h" +#import "webrtc/modules/video_render/mac/cocoa_render_view.h" + +class Trace; + +namespace rtc { +class PlatformThread; +} // namespace rtc + +namespace webrtc { +class EventTimerWrapper; +class VideoRenderNSOpenGL; +class CriticalSectionWrapper; + +class VideoChannelNSOpenGL : public VideoRenderCallback { +public: + VideoChannelNSOpenGL(NSOpenGLContext *nsglContext, int iId, VideoRenderNSOpenGL* owner); + virtual ~VideoChannelNSOpenGL(); + + // A new frame is delivered + virtual int DeliverFrame(const VideoFrame& videoFrame); + + // Called when the incoming frame size and/or number of streams in mix + // changes. + virtual int FrameSizeChange(int width, int height, int numberOfStreams); + + virtual int UpdateSize(int width, int height); + + // Setup + int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight); + int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight); + + // Called when it's time to render the last frame for the channel + int RenderOffScreenBuffer(); + + // Returns true if a new buffer has been delivered to the texture + int IsUpdated(bool& isUpdated); + virtual int UpdateStretchSize(int stretchHeight, int stretchWidth); + + // ********** new module functions ************ // + virtual int32_t RenderFrame(const uint32_t streamId, + const VideoFrame& videoFrame); + + // ********** new module helper functions ***** // + int ChangeContext(NSOpenGLContext *nsglContext); + int32_t GetChannelProperties(float& left, + float& top, + float& right, + float& bottom); + +private: + + NSOpenGLContext* _nsglContext; + const int _id; + VideoRenderNSOpenGL* _owner; + int32_t _width; + int32_t _height; + float _startWidth; + float _startHeight; + float _stopWidth; + float _stopHeight; + int _stretchedWidth; + int _stretchedHeight; + int _oldStretchedHeight; + int _oldStretchedWidth; + unsigned char* _buffer; + size_t _bufferSize; + size_t _incomingBufferSize; + bool _bufferIsUpdated; + int _numberOfStreams; + GLenum _pixelFormat; + GLenum _pixelDataType; + unsigned int _texture; +}; + +class VideoRenderNSOpenGL +{ + +public: // methods + VideoRenderNSOpenGL(CocoaRenderView *windowRef, bool fullScreen, int iId); + ~VideoRenderNSOpenGL(); + + static int GetOpenGLVersion(int& nsglMajor, int& nsglMinor); + + // Allocates textures + int Init(); + VideoChannelNSOpenGL* CreateNSGLChannel(int streamID, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight); + VideoChannelNSOpenGL* ConfigureNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight); + int DeleteNSGLChannel(int channel); + int DeleteAllNSGLChannels(); + int StopThread(); + bool IsFullScreen(); + bool HasChannels(); + bool HasChannel(int channel); + int GetChannels(std::list& channelList); + void LockAGLCntx() EXCLUSIVE_LOCK_FUNCTION(_nsglContextCritSec); + void UnlockAGLCntx() UNLOCK_FUNCTION(_nsglContextCritSec); + + // ********** new module functions ************ // + int ChangeWindow(CocoaRenderView* newWindowRef); + int32_t StartRender(); + int32_t StopRender(); + int32_t DeleteNSGLChannel(const uint32_t streamID); + int32_t GetChannelProperties(const uint16_t streamId, + uint32_t& zOrder, + float& left, + float& top, + float& right, + float& bottom); + + int32_t SetText(const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, + const float top, + const float right, + const float bottom); + + // ********** new module helper functions ***** // + int configureNSOpenGLEngine(); + int configureNSOpenGLView(); + int setRenderTargetWindow(); + int setRenderTargetFullScreen(); + +protected: // methods + static bool ScreenUpdateThreadProc(void* obj); + bool ScreenUpdateProcess(); + int GetWindowRect(Rect& rect); + +private: // methods + + int CreateMixingContext(); + int RenderOffScreenBuffers(); + int DisplayBuffers(); + +private: // variables + + + CocoaRenderView* _windowRef; + bool _fullScreen; + int _id; + CriticalSectionWrapper& _nsglContextCritSec; + // TODO(pbos): Remove unique_ptr and use PlatformThread directly. + std::unique_ptr _screenUpdateThread; + EventTimerWrapper* _screenUpdateEvent; + NSOpenGLContext* _nsglContext; + NSOpenGLContext* _nsglFullScreenContext; + CocoaFullScreenWindow* _fullScreenWindow; + Rect _windowRect; // The size of the window + int _windowWidth; + int _windowHeight; + std::map _nsglChannels; + std::multimap _zOrderToChannel; + bool _renderingIsPaused; + NSView* _windowRefSuperView; + NSRect _windowRefSuperViewFrame; +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_ +#endif // COCOA_RENDERING diff --git a/webrtc/modules/video_render/mac/video_render_nsopengl.mm b/webrtc/modules/video_render/mac/video_render_nsopengl.mm new file mode 100644 index 0000000000..b7683a96af --- /dev/null +++ b/webrtc/modules/video_render/mac/video_render_nsopengl.mm @@ -0,0 +1,1247 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/engine_configurations.h" +#if defined(COCOA_RENDERING) + +#include "webrtc/base/platform_thread.h" +#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" +#include "webrtc/modules/video_render/mac/video_render_nsopengl.h" +#include "webrtc/system_wrappers/include/critical_section_wrapper.h" +#include "webrtc/system_wrappers/include/event_wrapper.h" +#include "webrtc/system_wrappers/include/trace.h" + +namespace webrtc { + +VideoChannelNSOpenGL::VideoChannelNSOpenGL(NSOpenGLContext *nsglContext, int iId, VideoRenderNSOpenGL* owner) : +_nsglContext( nsglContext), +_id( iId), +_owner( owner), +_width( 0), +_height( 0), +_startWidth( 0.0f), +_startHeight( 0.0f), +_stopWidth( 0.0f), +_stopHeight( 0.0f), +_stretchedWidth( 0), +_stretchedHeight( 0), +_oldStretchedHeight( 0), +_oldStretchedWidth( 0), +_buffer( 0), +_bufferSize( 0), +_incomingBufferSize( 0), +_bufferIsUpdated( false), +_numberOfStreams( 0), +_pixelFormat( GL_RGBA), +_pixelDataType( GL_UNSIGNED_INT_8_8_8_8), +_texture( 0) +{ + +} + +VideoChannelNSOpenGL::~VideoChannelNSOpenGL() +{ + if (_buffer) + { + delete [] _buffer; + _buffer = NULL; + } + + if (_texture != 0) + { + [_nsglContext makeCurrentContext]; + glDeleteTextures(1, (const GLuint*) &_texture); + _texture = 0; + } +} + +int VideoChannelNSOpenGL::ChangeContext(NSOpenGLContext *nsglContext) +{ + _owner->LockAGLCntx(); + + _nsglContext = nsglContext; + [_nsglContext makeCurrentContext]; + + _owner->UnlockAGLCntx(); + return 0; + +} + +int32_t VideoChannelNSOpenGL::GetChannelProperties(float& left, float& top, + float& right, float& bottom) +{ + + _owner->LockAGLCntx(); + + left = _startWidth; + top = _startHeight; + right = _stopWidth; + bottom = _stopHeight; + + _owner->UnlockAGLCntx(); + return 0; +} + +int32_t VideoChannelNSOpenGL::RenderFrame(const uint32_t /*streamId*/, + const VideoFrame& videoFrame) { + _owner->LockAGLCntx(); + + if(_width != videoFrame.width() || + _height != videoFrame.height()) { + if(FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) { + _owner->UnlockAGLCntx(); + return -1; + } + } + int ret = DeliverFrame(videoFrame); + + _owner->UnlockAGLCntx(); + return ret; +} + +int VideoChannelNSOpenGL::UpdateSize(int width, int height) +{ + _owner->LockAGLCntx(); + _width = width; + _height = height; + _owner->UnlockAGLCntx(); + return 0; +} + +int VideoChannelNSOpenGL::UpdateStretchSize(int stretchHeight, int stretchWidth) +{ + + _owner->LockAGLCntx(); + _stretchedHeight = stretchHeight; + _stretchedWidth = stretchWidth; + _owner->UnlockAGLCntx(); + return 0; +} + +int VideoChannelNSOpenGL::FrameSizeChange(int width, int height, int numberOfStreams) +{ + // We got a new frame size from VideoAPI, prepare the buffer + + _owner->LockAGLCntx(); + + if (width == _width && _height == height) + { + // We already have a correct buffer size + _numberOfStreams = numberOfStreams; + _owner->UnlockAGLCntx(); + return 0; + } + + _width = width; + _height = height; + + // Delete the old buffer, create a new one with correct size. + if (_buffer) + { + delete [] _buffer; + _bufferSize = 0; + } + + _incomingBufferSize = CalcBufferSize(kI420, _width, _height); + _bufferSize = CalcBufferSize(kARGB, _width, _height); + _buffer = new unsigned char [_bufferSize]; + memset(_buffer, 0, _bufferSize * sizeof(unsigned char)); + + [_nsglContext makeCurrentContext]; + + if(glIsTexture(_texture)) + { + glDeleteTextures(1, (const GLuint*) &_texture); + _texture = 0; + } + + // Create a new texture + glGenTextures(1, (GLuint *) &_texture); + + GLenum glErr = glGetError(); + + if (glErr != GL_NO_ERROR) + { + + } + + glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); + + GLint texSize; + glGetIntegerv(GL_MAX_TEXTURE_SIZE, &texSize); + + if (texSize < _width || texSize < _height) + { + _owner->UnlockAGLCntx(); + return -1; + } + + // Set up th texture type and size + glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, // target + 0, // level + GL_RGBA, // internal format + _width, // width + _height, // height + 0, // border 0/1 = off/on + _pixelFormat, // format, GL_RGBA + _pixelDataType, // data type, GL_UNSIGNED_INT_8_8_8_8 + _buffer); // pixel data + + glErr = glGetError(); + if (glErr != GL_NO_ERROR) + { + _owner->UnlockAGLCntx(); + return -1; + } + + _owner->UnlockAGLCntx(); + return 0; +} + +int VideoChannelNSOpenGL::DeliverFrame(const VideoFrame& videoFrame) { + _owner->LockAGLCntx(); + + if (_texture == 0) { + _owner->UnlockAGLCntx(); + return 0; + } + + if (CalcBufferSize(kI420, videoFrame.width(), videoFrame.height()) != + _incomingBufferSize) { + _owner->UnlockAGLCntx(); + return -1; + } + + // Using the VideoFrame for YV12: YV12 is YVU; I420 assumes + // YUV. + // TODO(mikhal) : Use appropriate functionality. + // TODO(wu): See if we are using glTexSubImage2D correctly. + int rgbRet = ConvertFromYV12(videoFrame, kBGRA, 0, _buffer); + if (rgbRet < 0) { + _owner->UnlockAGLCntx(); + return -1; + } + + [_nsglContext makeCurrentContext]; + + // Make sure this texture is the active one + glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); + GLenum glErr = glGetError(); + if (glErr != GL_NO_ERROR) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "ERROR %d while calling glBindTexture", glErr); + _owner->UnlockAGLCntx(); + return -1; + } + + glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT, + 0, // Level, not use + 0, // start point x, (low left of pic) + 0, // start point y, + _width, // width + _height, // height + _pixelFormat, // pictue format for _buffer + _pixelDataType, // data type of _buffer + (const GLvoid*) _buffer); // the pixel data + + glErr = glGetError(); + if (glErr != GL_NO_ERROR) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "ERROR %d while calling glTexSubImage2d", glErr); + _owner->UnlockAGLCntx(); + return -1; + } + + _bufferIsUpdated = true; + + _owner->UnlockAGLCntx(); + return 0; +} + +int VideoChannelNSOpenGL::RenderOffScreenBuffer() +{ + + _owner->LockAGLCntx(); + + if (_texture == 0) + { + _owner->UnlockAGLCntx(); + return 0; + } + + // if(_fullscreen) + // { + // NSRect mainDisplayRect = [[NSScreen mainScreen] frame]; + // _width = mainDisplayRect.size.width; + // _height = mainDisplayRect.size.height; + // glViewport(0, 0, mainDisplayRect.size.width, mainDisplayRect.size.height); + // float newX = mainDisplayRect.size.width/_width; + // float newY = mainDisplayRect.size.height/_height; + + // convert from 0.0 <= size <= 1.0 to + // open gl world -1.0 < size < 1.0 + GLfloat xStart = 2.0f * _startWidth - 1.0f; + GLfloat xStop = 2.0f * _stopWidth - 1.0f; + GLfloat yStart = 1.0f - 2.0f * _stopHeight; + GLfloat yStop = 1.0f - 2.0f * _startHeight; + + [_nsglContext makeCurrentContext]; + + glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); + _oldStretchedHeight = _stretchedHeight; + _oldStretchedWidth = _stretchedWidth; + + glLoadIdentity(); + glEnable(GL_TEXTURE_RECTANGLE_EXT); + glBegin(GL_POLYGON); + { + glTexCoord2f(0.0, 0.0); glVertex2f(xStart, yStop); + glTexCoord2f(_width, 0.0); glVertex2f(xStop, yStop); + glTexCoord2f(_width, _height); glVertex2f(xStop, yStart); + glTexCoord2f(0.0, _height); glVertex2f(xStart, yStart); + } + glEnd(); + + glDisable(GL_TEXTURE_RECTANGLE_EXT); + + _bufferIsUpdated = false; + + _owner->UnlockAGLCntx(); + return 0; +} + +int VideoChannelNSOpenGL::IsUpdated(bool& isUpdated) +{ + _owner->LockAGLCntx(); + + isUpdated = _bufferIsUpdated; + + _owner->UnlockAGLCntx(); + return 0; +} + +int VideoChannelNSOpenGL::SetStreamSettings(int /*streamId*/, float startWidth, float startHeight, float stopWidth, float stopHeight) +{ + _owner->LockAGLCntx(); + + _startWidth = startWidth; + _stopWidth = stopWidth; + _startHeight = startHeight; + _stopHeight = stopHeight; + + int oldWidth = _width; + int oldHeight = _height; + int oldNumberOfStreams = _numberOfStreams; + + _width = 0; + _height = 0; + + int retVal = FrameSizeChange(oldWidth, oldHeight, oldNumberOfStreams); + + _owner->UnlockAGLCntx(); + return retVal; +} + +int VideoChannelNSOpenGL::SetStreamCropSettings(int /*streamId*/, float /*startWidth*/, float /*startHeight*/, float /*stopWidth*/, float /*stopHeight*/) +{ + return -1; +} + +/* + * + * VideoRenderNSOpenGL + * + */ + +VideoRenderNSOpenGL::VideoRenderNSOpenGL(CocoaRenderView *windowRef, bool fullScreen, int iId) : +_windowRef( (CocoaRenderView*)windowRef), +_fullScreen( fullScreen), +_id( iId), +_nsglContextCritSec( *CriticalSectionWrapper::CreateCriticalSection()), +_screenUpdateEvent(EventTimerWrapper::Create()), +_nsglContext( 0), +_nsglFullScreenContext( 0), +_fullScreenWindow( nil), +_windowRect( ), +_windowWidth( 0), +_windowHeight( 0), +_nsglChannels( ), +_zOrderToChannel( ), +_renderingIsPaused (FALSE), +_windowRefSuperView(NULL), +_windowRefSuperViewFrame(NSMakeRect(0,0,0,0)) +{ + _screenUpdateThread.reset(new rtc::PlatformThread( + ScreenUpdateThreadProc, this, "ScreenUpdateNSOpenGL")); +} + +int VideoRenderNSOpenGL::ChangeWindow(CocoaRenderView* newWindowRef) +{ + + LockAGLCntx(); + + _windowRef = newWindowRef; + + if(CreateMixingContext() == -1) + { + UnlockAGLCntx(); + return -1; + } + + int error = 0; + std::map::iterator it = _nsglChannels.begin(); + while (it!= _nsglChannels.end()) + { + error |= (it->second)->ChangeContext(_nsglContext); + it++; + } + if(error != 0) + { + UnlockAGLCntx(); + return -1; + } + + UnlockAGLCntx(); + return 0; +} + +/* Check if the thread and event already exist. + * If so then they will simply be restarted + * If not then create them and continue + */ +int32_t VideoRenderNSOpenGL::StartRender() +{ + + LockAGLCntx(); + + const unsigned int MONITOR_FREQ = 60; + if(TRUE == _renderingIsPaused) + { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "Restarting screenUpdateThread"); + + // we already have the thread. Most likely StopRender() was called and they were paused + _screenUpdateThread->Start(); + if (FALSE == + _screenUpdateEvent->StartTimer(true, 1000 / MONITOR_FREQ)) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "Failed to restart screenUpdateThread or screenUpdateEvent"); + UnlockAGLCntx(); + return -1; + } + + _screenUpdateThread->SetPriority(rtc::kRealtimePriority); + + UnlockAGLCntx(); + return 0; + } + + + if (!_screenUpdateThread) + { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "failed start screenUpdateThread"); + UnlockAGLCntx(); + return -1; + } + + + UnlockAGLCntx(); + return 0; +} +int32_t VideoRenderNSOpenGL::StopRender() +{ + + LockAGLCntx(); + + /* The code below is functional + * but it pauses for several seconds + */ + + // pause the update thread and the event timer + if(!_screenUpdateThread || !_screenUpdateEvent) + { + _renderingIsPaused = TRUE; + + UnlockAGLCntx(); + return 0; + } + + _screenUpdateThread->Stop(); + if (FALSE == _screenUpdateEvent->StopTimer()) { + _renderingIsPaused = FALSE; + + UnlockAGLCntx(); + return -1; + } + + _renderingIsPaused = TRUE; + + UnlockAGLCntx(); + return 0; +} + +int VideoRenderNSOpenGL::configureNSOpenGLView() +{ + return 0; + +} + +int VideoRenderNSOpenGL::configureNSOpenGLEngine() +{ + + LockAGLCntx(); + + // Disable not needed functionality to increase performance + glDisable(GL_DITHER); + glDisable(GL_ALPHA_TEST); + glDisable(GL_STENCIL_TEST); + glDisable(GL_FOG); + glDisable(GL_TEXTURE_2D); + glPixelZoom(1.0, 1.0); + glDisable(GL_BLEND); + glDisable(GL_DEPTH_TEST); + glDepthMask(GL_FALSE); + glDisable(GL_CULL_FACE); + + // Set texture parameters + glTexParameterf(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_PRIORITY, 1.0); + glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR); + glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE); + glPixelStorei(GL_UNPACK_ALIGNMENT, 1); + glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE, GL_STORAGE_SHARED_APPLE); + + if (GetWindowRect(_windowRect) == -1) + { + UnlockAGLCntx(); + return true; + } + + if (_windowWidth != (_windowRect.right - _windowRect.left) + || _windowHeight != (_windowRect.bottom - _windowRect.top)) + { + _windowWidth = _windowRect.right - _windowRect.left; + _windowHeight = _windowRect.bottom - _windowRect.top; + } + glViewport(0, 0, _windowWidth, _windowHeight); + + // Synchronize buffer swaps with vertical refresh rate + GLint swapInt = 1; + [_nsglContext setValues:&swapInt forParameter:NSOpenGLCPSwapInterval]; + + UnlockAGLCntx(); + return 0; +} + +int VideoRenderNSOpenGL::setRenderTargetWindow() +{ + LockAGLCntx(); + + + GLuint attribs[] = + { + NSOpenGLPFAColorSize, 24, + NSOpenGLPFAAlphaSize, 8, + NSOpenGLPFADepthSize, 16, + NSOpenGLPFAAccelerated, + 0 + }; + + NSOpenGLPixelFormat* fmt = [[[NSOpenGLPixelFormat alloc] initWithAttributes: + (NSOpenGLPixelFormatAttribute*) attribs] autorelease]; + + if(_windowRef) + { + [_windowRef initCocoaRenderView:fmt]; + } + else + { + UnlockAGLCntx(); + return -1; + } + + _nsglContext = [_windowRef nsOpenGLContext]; + [_nsglContext makeCurrentContext]; + + glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT); + + + DisplayBuffers(); + + UnlockAGLCntx(); + return 0; +} + +int VideoRenderNSOpenGL::setRenderTargetFullScreen() +{ + LockAGLCntx(); + + + GLuint attribs[] = + { + NSOpenGLPFAColorSize, 24, + NSOpenGLPFAAlphaSize, 8, + NSOpenGLPFADepthSize, 16, + NSOpenGLPFAAccelerated, + 0 + }; + + NSOpenGLPixelFormat* fmt = [[[NSOpenGLPixelFormat alloc] initWithAttributes: + (NSOpenGLPixelFormatAttribute*) attribs] autorelease]; + + // Store original superview and frame for use when exiting full screens + _windowRefSuperViewFrame = [_windowRef frame]; + _windowRefSuperView = [_windowRef superview]; + + + // create new fullscreen window + NSRect screenRect = [[NSScreen mainScreen]frame]; + [_windowRef setFrame:screenRect]; + [_windowRef setBounds:screenRect]; + + + _fullScreenWindow = [[CocoaFullScreenWindow alloc]init]; + [_fullScreenWindow grabFullScreen]; + [[[_fullScreenWindow window] contentView] addSubview:_windowRef]; + + if(_windowRef) + { + [_windowRef initCocoaRenderViewFullScreen:fmt]; + } + else + { + UnlockAGLCntx(); + return -1; + } + + _nsglContext = [_windowRef nsOpenGLContext]; + [_nsglContext makeCurrentContext]; + + glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT); + + DisplayBuffers(); + + UnlockAGLCntx(); + return 0; +} + +VideoRenderNSOpenGL::~VideoRenderNSOpenGL() +{ + + if(_fullScreen) + { + if(_fullScreenWindow) + { + // Detach CocoaRenderView from full screen view back to + // it's original parent. + [_windowRef removeFromSuperview]; + if(_windowRefSuperView) + { + [_windowRefSuperView addSubview:_windowRef]; + [_windowRef setFrame:_windowRefSuperViewFrame]; + } + + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, 0, "%s:%d Attempting to release fullscreen window", __FUNCTION__, __LINE__); + [_fullScreenWindow releaseFullScreen]; + + } + } + + // Signal event to exit thread, then delete it + rtc::PlatformThread* tmpPtr = _screenUpdateThread.release(); + + if (tmpPtr) + { + _screenUpdateEvent->Set(); + _screenUpdateEvent->StopTimer(); + + tmpPtr->Stop(); + delete tmpPtr; + delete _screenUpdateEvent; + _screenUpdateEvent = NULL; + } + + if (_nsglContext != 0) + { + [_nsglContext makeCurrentContext]; + _nsglContext = nil; + } + + // Delete all channels + std::map::iterator it = _nsglChannels.begin(); + while (it!= _nsglChannels.end()) + { + delete it->second; + _nsglChannels.erase(it); + it = _nsglChannels.begin(); + } + _nsglChannels.clear(); + + // Clean the zOrder map + std::multimap::iterator zIt = _zOrderToChannel.begin(); + while(zIt != _zOrderToChannel.end()) + { + _zOrderToChannel.erase(zIt); + zIt = _zOrderToChannel.begin(); + } + _zOrderToChannel.clear(); + +} + +/* static */ +int VideoRenderNSOpenGL::GetOpenGLVersion(int& /*nsglMajor*/, int& /*nsglMinor*/) +{ + return -1; +} + +int VideoRenderNSOpenGL::Init() +{ + + LockAGLCntx(); + if (!_screenUpdateThread) + { + UnlockAGLCntx(); + return -1; + } + + _screenUpdateThread->Start(); + _screenUpdateThread->SetPriority(rtc::kRealtimePriority); + + // Start the event triggering the render process + unsigned int monitorFreq = 60; + _screenUpdateEvent->StartTimer(true, 1000/monitorFreq); + + if (CreateMixingContext() == -1) + { + UnlockAGLCntx(); + return -1; + } + + UnlockAGLCntx(); + return 0; +} + +VideoChannelNSOpenGL* VideoRenderNSOpenGL::CreateNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight) +{ + CriticalSectionScoped cs(&_nsglContextCritSec); + + if (HasChannel(channel)) + { + return NULL; + } + + if (_zOrderToChannel.find(zOrder) != _zOrderToChannel.end()) + { + + } + + VideoChannelNSOpenGL* newAGLChannel = new VideoChannelNSOpenGL(_nsglContext, _id, this); + if (newAGLChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1) + { + if (newAGLChannel) + { + delete newAGLChannel; + newAGLChannel = NULL; + } + + return NULL; + } + + _nsglChannels[channel] = newAGLChannel; + _zOrderToChannel.insert(std::pair(zOrder, channel)); + + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s successfully created NSGL channel number %d", __FUNCTION__, channel); + + return newAGLChannel; +} + +int VideoRenderNSOpenGL::DeleteAllNSGLChannels() +{ + + CriticalSectionScoped cs(&_nsglContextCritSec); + + std::map::iterator it; + it = _nsglChannels.begin(); + + while (it != _nsglChannels.end()) + { + VideoChannelNSOpenGL* channel = it->second; + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Deleting channel %d", __FUNCTION__, channel); + delete channel; + it++; + } + _nsglChannels.clear(); + return 0; +} + +int32_t VideoRenderNSOpenGL::DeleteNSGLChannel(const uint32_t channel) +{ + + CriticalSectionScoped cs(&_nsglContextCritSec); + + std::map::iterator it; + it = _nsglChannels.find(channel); + if (it != _nsglChannels.end()) + { + delete it->second; + _nsglChannels.erase(it); + } + else + { + return -1; + } + + std::multimap::iterator zIt = _zOrderToChannel.begin(); + while( zIt != _zOrderToChannel.end()) + { + if (zIt->second == (int)channel) + { + _zOrderToChannel.erase(zIt); + break; + } + zIt++; + } + + return 0; +} + +int32_t VideoRenderNSOpenGL::GetChannelProperties(const uint16_t streamId, + uint32_t& zOrder, + float& left, + float& top, + float& right, + float& bottom) +{ + + CriticalSectionScoped cs(&_nsglContextCritSec); + + bool channelFound = false; + + // Loop through all channels until we find a match. + // From that, get zorder. + // From that, get T, L, R, B + for (std::multimap::reverse_iterator rIt = _zOrderToChannel.rbegin(); + rIt != _zOrderToChannel.rend(); + rIt++) + { + if(streamId == rIt->second) + { + channelFound = true; + + zOrder = rIt->second; + + std::map::iterator rIt = _nsglChannels.find(streamId); + VideoChannelNSOpenGL* tempChannel = rIt->second; + + if(-1 == tempChannel->GetChannelProperties(left, top, right, bottom) ) + { + return -1; + } + break; + } + } + + if(false == channelFound) + { + + return -1; + } + + return 0; +} + +int VideoRenderNSOpenGL::StopThread() +{ + + rtc::PlatformThread* tmpPtr = _screenUpdateThread.release(); + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, + "%s Stopping thread ", __FUNCTION__, tmpPtr); + + if (tmpPtr) + { + _screenUpdateEvent->Set(); + tmpPtr->Stop(); + delete tmpPtr; + } + + delete _screenUpdateEvent; + _screenUpdateEvent = NULL; + + return 0; +} + +bool VideoRenderNSOpenGL::IsFullScreen() +{ + + CriticalSectionScoped cs(&_nsglContextCritSec); + return _fullScreen; +} + +bool VideoRenderNSOpenGL::HasChannels() +{ + CriticalSectionScoped cs(&_nsglContextCritSec); + + if (_nsglChannels.begin() != _nsglChannels.end()) + { + return true; + } + return false; +} + +bool VideoRenderNSOpenGL::HasChannel(int channel) +{ + + CriticalSectionScoped cs(&_nsglContextCritSec); + + std::map::iterator it = _nsglChannels.find(channel); + + if (it != _nsglChannels.end()) + { + return true; + } + return false; +} + +int VideoRenderNSOpenGL::GetChannels(std::list& channelList) +{ + + CriticalSectionScoped cs(&_nsglContextCritSec); + + std::map::iterator it = _nsglChannels.begin(); + + while (it != _nsglChannels.end()) + { + channelList.push_back(it->first); + it++; + } + + return 0; +} + +VideoChannelNSOpenGL* VideoRenderNSOpenGL::ConfigureNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight) +{ + + CriticalSectionScoped cs(&_nsglContextCritSec); + + std::map::iterator it = _nsglChannels.find(channel); + + if (it != _nsglChannels.end()) + { + VideoChannelNSOpenGL* aglChannel = it->second; + if (aglChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s failed to set stream settings: channel %d. channel=%d zOrder=%d startWidth=%d startHeight=%d stopWidth=%d stopHeight=%d", + __FUNCTION__, channel, zOrder, startWidth, startHeight, stopWidth, stopHeight); + return NULL; + } + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Configuring channel %d. channel=%d zOrder=%d startWidth=%d startHeight=%d stopWidth=%d stopHeight=%d", + __FUNCTION__, channel, zOrder, startWidth, startHeight, stopWidth, stopHeight); + + std::multimap::iterator it = _zOrderToChannel.begin(); + while(it != _zOrderToChannel.end()) + { + if (it->second == channel) + { + if (it->first != zOrder) + { + _zOrderToChannel.erase(it); + _zOrderToChannel.insert(std::pair(zOrder, channel)); + } + break; + } + it++; + } + return aglChannel; + } + + return NULL; +} + +/* + * + * Rendering process + * + */ + +bool VideoRenderNSOpenGL::ScreenUpdateThreadProc(void* obj) +{ + return static_cast(obj)->ScreenUpdateProcess(); +} + +bool VideoRenderNSOpenGL::ScreenUpdateProcess() +{ + + _screenUpdateEvent->Wait(10); + LockAGLCntx(); + + if (!_screenUpdateThread) + { + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s no screen update thread", __FUNCTION__); + UnlockAGLCntx(); + return false; + } + + [_nsglContext makeCurrentContext]; + + if (GetWindowRect(_windowRect) == -1) + { + UnlockAGLCntx(); + return true; + } + + if (_windowWidth != (_windowRect.right - _windowRect.left) + || _windowHeight != (_windowRect.bottom - _windowRect.top)) + { + _windowWidth = _windowRect.right - _windowRect.left; + _windowHeight = _windowRect.bottom - _windowRect.top; + glViewport(0, 0, _windowWidth, _windowHeight); + } + + // Check if there are any updated buffers + bool updated = false; + std::map::iterator it = _nsglChannels.begin(); + while (it != _nsglChannels.end()) + { + + VideoChannelNSOpenGL* aglChannel = it->second; + aglChannel->UpdateStretchSize(_windowHeight, _windowWidth); + aglChannel->IsUpdated(updated); + if (updated) + { + break; + } + it++; + } + + if (updated) + { + + // At least on buffers is updated, we need to repaint the texture + if (RenderOffScreenBuffers() != -1) + { + UnlockAGLCntx(); + return true; + } + } + // } + UnlockAGLCntx(); + return true; +} + +/* + * + * Functions for creating mixing buffers and screen settings + * + */ + +int VideoRenderNSOpenGL::CreateMixingContext() +{ + + CriticalSectionScoped cs(&_nsglContextCritSec); + + if(_fullScreen) + { + if(-1 == setRenderTargetFullScreen()) + { + return -1; + } + } + else + { + + if(-1 == setRenderTargetWindow()) + { + return -1; + } + } + + configureNSOpenGLEngine(); + + DisplayBuffers(); + + GLenum glErr = glGetError(); + if (glErr) + { + } + + return 0; +} + +/* + * + * Rendering functions + * + */ + +int VideoRenderNSOpenGL::RenderOffScreenBuffers() +{ + LockAGLCntx(); + + // Get the current window size, it might have changed since last render. + if (GetWindowRect(_windowRect) == -1) + { + UnlockAGLCntx(); + return -1; + } + + [_nsglContext makeCurrentContext]; + glClear(GL_COLOR_BUFFER_BIT); + + // Loop through all channels starting highest zOrder ending with lowest. + for (std::multimap::reverse_iterator rIt = _zOrderToChannel.rbegin(); + rIt != _zOrderToChannel.rend(); + rIt++) + { + int channelId = rIt->second; + std::map::iterator it = _nsglChannels.find(channelId); + + VideoChannelNSOpenGL* aglChannel = it->second; + + aglChannel->RenderOffScreenBuffer(); + } + + DisplayBuffers(); + + UnlockAGLCntx(); + return 0; +} + +/* + * + * Help functions + * + * All help functions assumes external protections + * + */ + +int VideoRenderNSOpenGL::DisplayBuffers() +{ + + LockAGLCntx(); + + glFinish(); + [_nsglContext flushBuffer]; + + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s glFinish and [_nsglContext flushBuffer]", __FUNCTION__); + + UnlockAGLCntx(); + return 0; +} + +int VideoRenderNSOpenGL::GetWindowRect(Rect& rect) +{ + + CriticalSectionScoped cs(&_nsglContextCritSec); + + if (_windowRef) + { + if(_fullScreen) + { + NSRect mainDisplayRect = [[NSScreen mainScreen] frame]; + rect.bottom = 0; + rect.left = 0; + rect.right = mainDisplayRect.size.width; + rect.top = mainDisplayRect.size.height; + } + else + { + rect.top = [_windowRef frame].origin.y; + rect.left = [_windowRef frame].origin.x; + rect.bottom = [_windowRef frame].origin.y + [_windowRef frame].size.height; + rect.right = [_windowRef frame].origin.x + [_windowRef frame].size.width; + } + + return 0; + } + else + { + return -1; + } +} + +int32_t VideoRenderNSOpenGL::SetText(const uint8_t /*textId*/, + const uint8_t* /*text*/, + const int32_t /*textLength*/, + const uint32_t /*textColorRef*/, + const uint32_t /*backgroundColorRef*/, + const float /*left*/, + const float /*top*/, + const float /*right*/, + const float /*bottom*/) +{ + + return 0; + +} + +void VideoRenderNSOpenGL::LockAGLCntx() +{ + _nsglContextCritSec.Enter(); +} +void VideoRenderNSOpenGL::UnlockAGLCntx() +{ + _nsglContextCritSec.Leave(); +} + +/* + + bool VideoRenderNSOpenGL::SetFullScreen(bool fullscreen) + { + NSRect mainDisplayRect, viewRect; + + // Create a screen-sized window on the display you want to take over + // Note, mainDisplayRect has a non-zero origin if the key window is on a secondary display + mainDisplayRect = [[NSScreen mainScreen] frame]; + fullScreenWindow = [[NSWindow alloc] initWithContentRect:mainDisplayRect styleMask:NSBorderlessWindowMask + backing:NSBackingStoreBuffered defer:YES]; + + // Set the window level to be above the menu bar + [fullScreenWindow setLevel:NSMainMenuWindowLevel+1]; + + // Perform any other window configuration you desire + [fullScreenWindow setOpaque:YES]; + [fullScreenWindow setHidesOnDeactivate:YES]; + + // Create a view with a double-buffered OpenGL context and attach it to the window + // By specifying the non-fullscreen context as the shareContext, we automatically inherit the OpenGL objects (textures, etc) it has defined + viewRect = NSMakeRect(0.0, 0.0, mainDisplayRect.size.width, mainDisplayRect.size.height); + fullScreenView = [[MyOpenGLView alloc] initWithFrame:viewRect shareContext:[openGLView openGLContext]]; + [fullScreenWindow setContentView:fullScreenView]; + + // Show the window + [fullScreenWindow makeKeyAndOrderFront:self]; + + // Set the scene with the full-screen viewport and viewing transformation + [scene setViewportRect:viewRect]; + + // Assign the view's MainController to self + [fullScreenView setMainController:self]; + + if (!isAnimating) { + // Mark the view as needing drawing to initalize its contents + [fullScreenView setNeedsDisplay:YES]; + } + else { + // Start playing the animation + [fullScreenView startAnimation]; + } + + } + + + + */ + + +} // namespace webrtc + +#endif // COCOA_RENDERING diff --git a/webrtc/modules/video_render/test/testAPI/renderStartImage.bmp b/webrtc/modules/video_render/test/testAPI/renderStartImage.bmp new file mode 100644 index 0000000000000000000000000000000000000000..c443a58f6cb2a7b13066f6db91819cbc54ac4cc8 GIT binary patch literal 304182 zcmeFa$(kI=j;MF;_i3%QmRjlw?gMD8?o(BqGD#+x^E}Tn&vWERvg&k8SK8=N^kDj7 za7Q4_Jt8wnR(1F7o1Yy&Fu1u#Fe~vp0E3zT@Bj2)|KI=VZ}RhB`QQJ>|CayD-~RT$ z^1uJ`xBo@dfBPHf|G)qFZ{dHc_}kx#*H^T{)ulDnm=d8>xTdMbx~eP}3&qk@E0v4Z zI8`cNPg>=wRJp2FuW7Yf!8cWkzEO%=y;-(aYZk24TSaT8L}<2(v_>i*x7sCD&N(uM z*6x&PDkoQU%FWOk?GjC8j@spD-$8{)Z(S4O@vTZDuklN;e>DS4eo|m?vt6aV=qkHy zqgk&vYTA%#%~m7ZTJ5H9e!!fo7}jW3qtDQ>Ws@Cf5Dy`Zq*IE^8bmugNm1+fAxn88HtjDX$HSM}u z(1v_jzM9i6%2%}W(&bt4f_7RsKht)7c{bfnuP$ikp`BfxpI)4C)P`*B|G5% z%ZCUEI#Q1chlCrm`4B+yEIueJvV5?=TNOM9B16+tC_Mzs(w`|PAEpqI^1;Ypf|zCvRiLYwQ)=Abc<~m@tPAoKiAW8YP;_ z3JN1bp;k0RC^T{@VSF$|oEI->5gfD&A1GMLGGtIx%SA8E4D zAQD>L3N2DfH$1!b@olDR-H91GtIt*HKRNf*lSN&xXU zypl22lm>H-X4EQR^$`l_8V$07UEb+7Y1U1IX*j@&W)}p-E(A*7>b*Km5t02edbxTL zc2r8h0mB8tEMj;{fCXByUecDOgtB5n3>=KgwYm@>LR!j)^TL%i!$mG1=HTK;7U;hT*=N8mym*6CJg+Rbu6HM5S9 zxmh=|(^>ji!)*L4K`-=Yvl@Rka#kA4-*%+0_}&{Nh$u)!wyvnprFpq8up*ql3w zSCHjEq-ph<+PH&~!5=5L&ErFa2u}u;l@EHH#Rm_WTz4W13S)?JLn&coC^gE42&05~ z7e)!-pv@eKi_#^{$Y6pZLiwOkh8^L9#T}7HF`o}vCIAGQ>Q%0kiZo+Fxl+=mno6}; zsTH)%mhh1n5@>_ZI^qUiZ*1L+J5#J!NCwQivzRt-Uhww z4PW(Yl75Vr8SlGK%2^ty#ti?Ng7Lv@Jj;Qs)n_$!d2}k`!^Co3#=t8h4rIGV1{FLO za^zm4Gu^T@$ZB572UuZG3={^0 ztboZY6X1gi&*y{TVkQSjV}=kTI;tUiWAvu&t$scq{8i5xX?JFD_FDga=G{$Fg>iK7 zgM8p}j|k&~hrlf7fiVgM4}k~~{uFW!6MUdC!+*#!0UI}EE+6!31a~41Pm~Xe2;)N} z%;AH@Yg2J4K zRCtOHjq3yp3I`?~rrieV&LD&7vt-~@r&XdE7ECb1f2LsO-|kgu*44~|bxknaoM(pZ zsu`@$)aA-%P|4XezhNJw0{IGr$!*hNc6lV^epXOU_@H)eo`!H8&(?W(tsrGzq}+^lFbL;wVXgCWBF zhpTFVmgU1`si4g$q4SiG!6q!#g)l@Q7$Oj6@d4L^Q?vXB(}M`e@O(a0r_&A*9O(XB zK4cM*g^6mixJX8(PC}Ls#sn{z+5Rj(V6%y(gn2-_e6^_?0Wwp%ZCUN zoo+izhWQ%4fO!tsvdOc)*k8bh2oZF>Mp)6GH2pr)$NVH`mIU}6--h#Rc(Gf(@YL#-mTB+8omFkU()-%ZLLue2{ zf`}Pj!BqYs9pjA}Lyo#x(?-Mn8D@O2EQBQ%^r|Nl5oY^13?_R=LX}Dnj%K!Isw(KT znpY+hZ5>km)EGz&fZ7NG#Q?MH#oqyW*2hhTP4nSNd-UFdr@<_Lz#{@#yZ%DMMS6Bp znP6WAL7)3xqq6r6o+Hol9}@j^@}b)&t0^?dHt$rJ^MIiF$>;DvIWX(!>0wC;1hAmZ z!vQbCxDg3O(wL2Mp<62rRw&d6qfk=}Ku3ZKln z|D*FELC`cG$Wy%~Na%tAz#1B#2RKG973|7y;23_P;;mv()!=xd&pWy|T7uH^+a_ zkyK2nV0Jx=0EPmCgCa=vc6lVs;zQPXP>26Te0ZtVKY#q6#0N9~EFb)->{$bd3ejM? z&&fIXF)zwLcs^KUkrn3WCub2r_1^ZgvR!MZj1LG|8e}z|59&h9;)9YQmk-{H@bVAx zL7AYG@O((%5d4Q~P1ajIpAWtyGLk2Vcm^LPgVPBJ&j(|I2_6o{hc5X>mzMO1P(FA~ zBDZ#GuYAWcUJykmR--++ifG^ujB*CNm@HEMHwmQ z^TEd^S=k5>2?hl8Dr*+vrF_WdCzFz@YPpa$^(7#KHWHQQIhY)I4%+<=K4^Tw(Yy%A zK-%F&d>~%%K7*GdCPZCv7`i3zKQy|P+?M5o+OKlJuwcSV`H&?;LW~(xifWJEX1&JC zjG^mCdN~^DPw9m^VAdV%>@2~^5CI|TK55tg{`sJRM%G)%YW_Sv7&HtKo)6D)Av_;+ z76U#=fkX-i6+9wr#?u7vGZ-i||L~mn!r}-Zpb>|rC_w~dNai%%{HFR3&3dibs8I0W z{Rdrt{r!Ak;xfb(AVLX%ozCGyWW`+aBYc1W(GA5zz(48K1P&qhHOF8hn^go4&c8Q4*uPh>Kt*MGR2ul_{(L^nn&0$y zc#dpj(81I%)|e>SYQZeADFQ1QBT!gz7c!(3Jv7g>bsY${y&g zR%NQOQ3INAw4!1w1Be8>^JL_V_1fmBnB$S~Cof(QAQ%nQVL&rkK`>dah?ztd_heTs z%hxICC0UKXm7dKW{g|w*pQT4V>%Xw-)%I+81C!j(8sgw^2gvl|`z@Ezp$_LEH zOzFD?ZerGIH_2-3)Q<>}`Ex77h~CzKfPiBVwOQ6`hK%5?)$6_7>35@Ut_GzxKluA( z{Z{RMi)I8+!88hl=&W@O7oH)U_4@g~gd)Npl@D1&%;E!BFg_?E)NN2usH{g=0V1S| z5qgA>z}yKyp&5w?4s_K|@czRr&p|2i9el_#0gcCk%4*ui2ZY3hU|B)Hf-%7hvw)zv zL4Q^?nkC2{vvANcdX+g0iC^jpgPq$TGj~~Yn{T{~e{#T+W9Hvs{@EwLunyoDkwLqW zZ5j$9{gd79pl3iZ2z-Yy>w}_O*|5%_O;bulN8^C%t^0f-fFfSp5fQ_~!Tg6TAIy=6 z1oa&JQN0My3gZKaP(DBgZJMz(N8+MXjGECv5n){wyyuYRgT)lye~3ejG@ytm5%Iwc zIhPL}2$5_TK2ykYKs9k#k@^)8AH;u{#1Xku`BT9~u5IR;q&>C&BtIegCVWbyp_^42 z98B<(Kxu~dgMz_9?-OqZ=~ zuI3-CYZ#dX6oCvDP|Tu)H*;@*&#`)ct;u?msO#_ieKI%9Ug(tu1Qi$@X%c;!5cnY@ zO>ctx;qPPKNTg?sKjmeAc~}TIVg7?6!c!uODW}N^dqjA*!R*~oFrN>8m|YuX8>|ZrvWP%qa8N-Vi76nY z3p`a>Zq;d}W-V%kdWCjfE7MfCs+MRT3tmp6jOmHYQZWiBd{>z;pAWbY+4y1(AIx*m zbcA*1^TC_>Gi7Ly9f{Hi5DE4LRBwE+fWm}l^1*CP1?7XvYHnT+1pIhoLRRTDf1a%0 z&`>~EKTGxgXdEz$Q^E8I{*>2ynK9aZaL*tpUoB5USoj2q7Ir+f*opO*9e z2jhbg1K|hxP;69b3JL^8geoV9NUJC;QjO1_Yi!0K*7Ewrhy+Yd@bO<1j=ngby@)UP9%_ZSY+qgMvbj z-e>TL@O<#aPgy>wMrmO31QDU~!Qi0I!<54?T_5GAEh0W>_b2ipOM|RtV))abco82` zIQ)ccK#s+wyR3L~HhI=~)!Q(HtR{PQ&fN?VN(SFGnaj<5A`>=!Bw&D%#=6lRp3}8! z5&Zb9-fO(9SD7Z_gXs~HQ%~{1$e_Y=nBd3E;sen{77n1p10j-g`Cz#S1BIuACxgnm zuk4%`STKOiBr8=apk!AlBZE~&&4WW@@@^Lo2QMoh)I5E{UEHtq5k3I68uFZ3h5?7xHRJtA!6L^+!e5gC3uA3P$=$H+}SC@7Q<9H;z; zkde^51V_tAq?Cv&r)}TqNPr2Z@qEyL!omvmA12X6bdLs^JbQcnHjqv7lVTG|P$v3@y@V z*Wbhs@cJK-{XOT)6BMLh)ihH3J%bOqa6svOgXhR*_>qtc2RsVRPtKx*A;N?#E2ce) zWF3V$66!e^3QR~hpiU@Zh{*LH=C}|_0PirEoP`63cqt!zyb#3*baT$;?Z=Y!CCq=A z#fMq;<@H%>&1(F3WIrHRj>c!ZdR0~*X&4r1=&CYH15Ra?s)=mYOfS+~_enn39p(#m z!(72%s{_1dmTVkQ!4I=jjc%J(@3d5iy6C6c&*MWBcWA5uG>i|k5P>gYaEN>fWrfXz zh8l@u|o znPZQ+7pj?OkGX??qB6^Y?9KfcZ=T+uyxz+;#+0hDE`p7jBlz)I*1#Z@Zp7sFt^7_$3Ge|p%;5tlcvd>1KDErV!1Bw`SdE$GgP}kvp5t|=Sc+1E3-uh>5T?bu z8=BR2tEP>uR?U}4gz#p`1P-UkbC&uOoB=UO*<+yD(L{vq z-XLLX2LacX)t#&^vs?JR;Ry%61V)-W!yzw>;d%Lhy~ z|0&Or&3yF!)-@s4_j;Y#t=sQg(_tPLcF%djjGIUQOzSp#-50f1pYffjMNF_{g$N;^ znLXy@Gn2nd{p+NXzNxL&Zbq%zX;#{eq=ouw-37w$1>x6fDFG4=gc_$ig5frEKA0_-pXeE5m zy&aN_W#B`zEs*#DK6umia@N+ojeD7q^DndkcF*W?(<9_#!SvH`z-s)p&ymekV-BRD zN35S=zX=Wg`{hFp5W?0pqMs&%cAv?IYO7&w4kZi`I1Jh*hzNYRu2!@~l<<7;aLAHD zUuog}2aPFwax!ura3P56;X~5wrI7B$PZDUP&I602a!q@VQvCO%a-%K z%Nvvr1_y+!M){D$7tB!c5$e6@XK9E*lo6UsOTH|=@Q;3zH4KR!$KIU1%rLtUA-YPt z&mTX_VE+dn6i_b;E*SjX@kN9Pi#W`G_&z@Hz*9tI`4EL0KE5z7LWFvaX8uDiA3Pjp zc@FG4%a*gA49Ff1$m&1%qk0Zj^I(WjK6s0sveuXLVd_?Smm41f0m3vWd|<15bs@MU zMFjt)?7ybv8DViJKA#WK{pltgm?iw>{==N)1E(zcfRKz#;}k&m ztX#y5_$6{ztoxVo!3wMNPbo#au zp>Ti?K)@s7Iebvh!4_mGB67^xKh5(DJm-?MJTee;V8@Ijf7bso^$`~@xq zvNYDYw`3K@EIwGE5n1#zyIp3SGF!!J(!)v`G+0RfyJ$9Y1|QVNOZ|ucsC@AGftT`O z)_8BHS$US_=IH-(^TGVmT>k+qSox))AXo5MP}xv`01ZGvE+RZ1k_k}92Sdb*_#khx zNL%DEMBRi5N``0f!Qf!RGqGU9n4X%&2SOV~gz-WCqZl6y5eVuzwAkY*X+m;IFEb_j zGXKFs3G*2u!Jh11$od0zl^s9~1>V9tXH zx+|crr{E@Bn{Gzk?sRz1L95y7v6nI1oeL$B6vWefh=8CxG3!RiYW!KV>qx+~(RdDc zIT~!;2o7|gu=mW<{E6`y;KboWIv~e$2&a68!OPjagr1$nH`Ql9#S#~mVaVRuYg+vt zKoB3aOH~8u$DXw;D8k^l_`%mgXd#3dJ3^z^q-n8}KdL@ChBX~}t$GVa2}?ibdJ(G3 z`VWc%j|=rB#IMNgG^P1TJO>jr|G*KYk1uBBAAE@6Y2an8eKIT{B=u40oTeH0e8HRl z92x7@_Iy4lz}1(45&;x4o!M;Aa2H06`k-Fxvpok%idIK=0(sUtZg1~d8e}zoyqR_+ z7#~dkpN$VmMk1^h_Z;v<^7@1rp@PB_KxM;4Bp4r*1_}oQg-1ks>faq7phPYrj1LBf zIebt!aI_F3mk%Bh>O9!YriK`v30{u)FsYw}54^JKIg|)x60?|KK!}9r@WJ8>5FwW* z8`Oh_He`nh_3E(QxUZImg{xMv&;k+42c|sKx?gnk=C3lBXO;8qJv z##4f)Kzq`g#$jIP}#!)S^1!m+Mmq_AAl$vJTYQEgq>_X97JaFA~Yyr za8TLv!2*r%<^wLo6ddTLd|)y$@*<|pU&-)nKKN_DgAY=Y(WH>N*J_UIm7C)E=;Cm& zvUv6J_36jAM>`wWcoAeF`0p$J##8ypXn=YJPad@HxePOQ6};!5a(0YeVnK)c55kJ)@j=}QSOFg*EGQf}`pl#`4}N~r z2N%c|Usz-zf-cewl?Fe^2hRjAKgkEY1s`AF$%sa>h6BQi>$Cp;Mt$vlY4Pu83x7Xe zdUL+N$-BpTt=52t%b&ytZ_&?@&FY^VY$HwZcA6t+@{>7ZGBHJVjDNEKI3GMKJORA? zd_E`*B32k8%tJ6lWSQVK&*1}bun^&1UQ|M1K8(r?9$kCn6iUbQ5-N}0vz%$&bS%dq3w&nGF1bh%Dq}w$<7#URXwht>jF7#BSoX-dKEB;D87&;=s^I_sh zJjDk?1kQs}0zP;+{2(9V_<|~_ln)6Uv{KP>3*XC!=jR_34jgrUGjyAzCCk_C)7{4A zr}FZv!u!8pz5hQhKmK~P^Qn4yJgAjNb-2&mMmjq=6+{H*L;1kbpAtBvKP-jhM|xpD zYCFy{?Kyfg_Q@D-&8xKiF}zvZMw*b=h57*DU9|E>T5}1E52isdr3(I3R%Q&@NTLRp zDny=qgyt%Ton*j__(DH4@f_T(ywBj}IsSupKkVOP4*x_5wI4trn7m{iBNT8-HC`WP zLZ?y?N(|2p<3m#K)NM>f6VHn;p5s46Oz>TN2E~FZkZ$OiqkWd-4X#HWkKj`#>PFTR10j;BF#ftj_-{cch8`rm8C*3|b){$)5 zfRJw85Ug8A%7=!gA37My2Qs)f7}D%hkjWEvhJ((KW4}G@&^qHzciiJ6;x_E}hAEEh zLsNm9aI0pwN0Z-aKpX}ezE6wtmeAy#-9j+_Tam{Y1bgR&o9W&T7<)p7Uccpp1{Pn) z`irQ|LWJjocoCuf8}p&v?zY%mqgy=boNP39-?xCn_M7g`LVM?ZefLxObf;3e#Ps{{ zK{*ik&;SU+1g1QIL*PRS9N9xA@eN3Zw>tgiO}9RRFwI(D@55hV@}Lioc;x^Z{(7f0pnJ3_FJmNza10N)CNIqJiB%8}IQcZqxNna)ruxs@YFj#3rl0Y!+Wqb%WE)x*& zA^k1MX8Vp01_${a1`ep7APLXVCQ*im19C1CBBg^vDE)W2M{o6>56E^Z%YpfP@J#UX z_whmF3xflK@xh!21A-R<8S?p{xX4ms79Tt)eu5YABL9KC)rm|>?Th{T!AfO&p#dLu z-u8AEJG&nm`^(kyy;|*>(kOZQXZbLJ1F%TvHRRFHPp2mnB8^_VGOk_s3g@ljWusn^ z{ew7_Mj29+B`^s;GKGr2(Q9@4q8CB_qSGB(|H&iUT!`R5SZEMNT9^YOyqA)G z4f*#fG5G$*4JeVCYKUC)gY3>(A$1=3VhLMhY9skqC?$fUrN5=V{2Mx>PWy(|>W-S7 zL9;XD*r0T_o%kLJ8RnK#Mr80^M+w>QI%=4FR%3{W1oIOR{3+FYCV2Uoe6X0p3!V>( z1%m@_gJMDb2kROuBIm)n5g!x|93g-?4_^2QWYECmd3;F10Wu1$PPNd)PrzH)dRO0m z+kp@2KOC&o;6}aJk-7-BkO@Bt2ZH~g#1JuT9jVC&vGq2S;(Z*3dbxjn)H>g*AMKRS z4l9*{lvoHlq&`aFFhN8fANZ?G`H+So0TSU5+>eM1ad#dcB#20wI1i(KciivX_Bx|p zcPNAy(n4L+l;KO@AjXSTDjbXtXv{FPChYlud!j!$DjMJzrlxNa;tu)5VBKlJ!p*Zx z@EZFX%!G~-$0OlGZv+b360V4#Oz5<4JFPn^;#&2)R_m_SzNIx=H`3Z;h|%r^Xn^dL z4_4>kAuvZaKG;mC2SSz)de)o2$$uUnJQKY9d_EW!5KsYhXQ_n$pnTYV*CM_UJ{(b=F2*jUl}*`kPQef#jSrwAz@c95l`cD1C+)-4 z>fYzl&T{c&j~%-xp%}pjNFsdbHT;T?DLznl!-ES4mW+Zp657OJkmi0QYJO%5p||rN zQa%VAi1u&$?YnOCRzo4;DSkji;wns=?F13tDphv2nOL!6B%~H2*nhuI z6Hai`L_Zu3M)H-Ui2!$a=yOc?V;^dOiUc>HHEJ}6b^Zfl+TtgOlW^N^N^3Q4nzdWX z;p*kPYT>qWeN(;~my0*0(x_Y>m&!wFm42l*s5S=WM!V8%u}DBx`1ENlZNc6{{4#sv zN`nJJmJe$99v3DX6TI-(^8r>EB4+syN`{mP5?M5w_0X!3tKhpa9H>M}(++VyL{O=-V#1+y%j!?e~>8SeI3)jI$?!bNIsBh2yjTe2m{0yL81ShQvXVvJ^gGeUa^_yR}RiY{~&qnuQ_+y?n_) zhX8@`pm)=2-*iKArc)iYE5lZK(5j87Uq#t#-Ex*DE)7Y^?oeEs9^nr;%$%D~JQ@2A z`UQp^$yy9slE)xr+7T?k1ghY!&wE#A-HYSS*z2ef40yc74hWZAdq zF*v9u91}iZ<{HJcZuwO-%Xg)tQQ=^8wKt#@4hE&;VfB1eyBsyH$F1_HTN{($9d>Sq zT|o*iNBG3lk?^c=pXKR9{sB02m=NiWaTqGaAy|M3dmH85_0rB-aeK9}y^2uYU90YF zRJS)uJKJUD!~T95J{%sGj!xOty;)?76aJ>3#|H-lO-stL&L-(I<}YUHl?K_fS-om} z&cVyi%s<#%MivgLc}{+EniWZW;mH6NwEH9gmFUC%WIkf-^;f3V_t?Qo;G zxmehEUE6xo*8JqgVr74=DtxHnKXh>$Sfc?Dh%#8O~kL zw5!JuStue1O#~78c3lJ>&8^#R;~sWh9`r!s_UGc^W~F>N=rtdPy?ePT=EH!w42|Er z-LcGT=xnCC3I+%58Z^Y^5J!b5rr*Br)*paF?ehL)wSTxY*#FSo`_MjE>Yl9i&NurP zJH5-@ZsD*GDB7jFUj1&+l&d5H(oal2RDzO;M`4I?-yzI2GmClC2yi$*YQu!JrOU0A zVrbX!0Z<%n*G~4@Cx`9gwFDQn+FiYIS8v|X8e}0lH>?Ms(6-iLfp)h!ptWh@Mabs% zIq`+(L%;%mXwqgq{j&s6O_l?*^nVQ>%yU4{oTN)N=!C`85f1pAq>=%Rnha;vkgW>} z%xyqWGBAhW^;5VA_U>t*{)5+eSP4w>556ZsEhu=gQjK^44n-o`bD- z-JOqQLEF{q5g`WA{gew4svXpRCCUyH9Ae1we8#E*D}ooH&`1~}x3KvO$r;jkbOMSv z=t%hhMkLe^IZw$i5cIc)jmn^MIRb`z%k|yQ(r|T(C!<#R0h1IzAuuP2GO)+qsMnKJ zf46hj>)!Xecf|F4MLxPuD4@1!ri!4NKGUW*gRa8iOZj4Wv{~QzRNMbJ+FQKcdw;Y0 zen{K<*gyC@I9%x+taKpi$$I;IyNh2@JHPD|zxL|idYuQt1o?D85m=$0F8}-hL;mf2 zqEtkXY~a5}hmnu%)|bN75D378&E*0Ohk-7*C|(S!#T!x(t=eO&`AB}UNph0b5iCe! zMuWND?x4}_0f&?iN(q?>g%3)DTu`Xe;bIbBSf1fYL7BiXaKzEWBmxM_G@_AZu~WTkm!IA zA@G4ZDEtS_KPVrzKGaTkTGi_Td=Rfd;DDcyyc8ps4|2BI#Vz-I*nHL9el^^B*V}&I zI$Cd+&qwVV*>Yh{NIRtS7tDMzk4oyXJxm(mf5;UCa}HSunQC*1%218TCN{Em3>%h^ zBXzu8y&}e<=+L9`AqfA>@*)F;ok_X02(A6M}vGm;5f=}J@+Rs09#!{(d*)?#aarFnJOmk@(7ltv_oNb(p` zY}KY>kF4FT99xp9z?Z<0P?Dr@2>Az{!eA7XsZKfGX*v-^gv_K= zOAZ**8PrO>^MmI8Y8@Q*8t+#aF+`ZkCrpx)cK=xeg8pefP(V{JbWgUbn+wJDHx1yReBiJ5e6K@3A48TM zr65B{GN=nN%?H+G2_Hn+k{Qfal||yc!;KnAjIGzb?cc`m0p{#1HBPs?IP;iEyMg;F zp+su_LJQo0e(SMQdu*0(nw7C6^8+Fzj*vNyq-aXnAjb}c-!f0vsFO^WoPbyajH;CrVv4^n}E8$%xQ7_a+dWpAm6tADU`v-e?yVIHrJDktCiwSN&BB%XsrJA#W2 zU;aT>!5{UB*>TZ91Yd*j0q95z1<#mg(&)k>E^OcP`-Hl{Ve5T;=R^BsZG3S+8_~}8 z1}8iH!_CgYdS`#NyS>!f{fIx)0}*_&{g31Q54R$$c8|AvmxtrhIkPSI%?fVJBZ%M& z$Vc$cmv}4`S&;_r4qNr#FOTlGSDI@dD+e3x;^}>}{H;^}hPR-X=&uX!OPjVP@4q=8bkZR?V$schwa_OPUSFKz1GY@Qj%Pi5kg42OgY{hO4fnTiN(oxf^)0HXft#eLzV z4`FIW<~sxt1k#EK%{+KM)GOoSRqyJ&RW5efzd|2{_#v2yK1q|jWg44nLPPk`8-OzT zp4%;8C=crow+A2Y$p(-IIQTTY-g{^jzljlr93Xd7J`h%5m8^o_TWiqp)maqZtg|#C zxDdvN@B|k`2p{;L`0&>!T@oF9-1hr6{=oUpL!v7@Y_Uv$Q zx;r@B=jBUz=sB8U~{>8*eqg-4^SaRga9J&0i$M$ z^Wvz!yIiH30zPcLAqCMT1wn{$dC;#HZgC+bQJE~=l8S3WmujWUhn{Zr4%eGJ)(dA{ zl7|EuQbPbf_z5$c6F#(u=O>MW?F#!`ogLTNzk-K5xjV^7Ff>isO%O4VrC74?D)50j z*NZpjyM6q8c{IPj+h2U#U%cO4z>653Z{AnV9wfaWqr-$I@k{5nRmTmuUt>CWsk*b= z5Rd(&Pn8h_7z}|)2yN8m(B!1xg-WM;P`{&KNPKJcN%C4(DUr+?G# zO6?+ffO_q&Qo1W$j4zLd_!awW?VZmp0+5}LeL%rcD6!UqA;b%WIedY3>jpj~E=1UJ zq1o(To{V=^y331X9vX#{yH4#Nz1Hu*MCM=k{YYv>mS_PJY3Z8HP$#Lm#|RPg_@Exe zpUMYNpu{jH`0gA&sLx=OFc(6ZV0?f8s_{(lvgd=s;i(kmgb$jiROi9F5Ngh9wpl(f z#YvkLU-)a2JIqKs;N;UB96~xVJnr?OFLrIVtQC(W3~K%IRhL)*|6%QQUE+(^J#2aJ zbE|MXB$J?gz8SyNP-2=7)J!tTcy%C|K#43C?)Kinherflh@<84)&3aAKr^4>kjU4# zYgQl1mv?NTv$fRNU2gM;X9+wz@=4MXGxvP3z@$fXe?u5@yw%uVl33>O)9vx6FDIL~ z)%vi~YcNsRl3J#oG}+;eg;*n*lx&VGl~Like}39MK5QOrcaBzX4?aEYejF>VNSva{;^{0t-;yh2tKU5uWYY2AW5_Q1^-8WMajHOt21m!3y6?iTx@wJ zrPRrCEh(#J0wT;rOyPo6A>M!ZGx%UY@RRX>H zY+G@>)22e0%m5LA@FA=Qm7qY4G(Bp3L4J}NW-5+Il~9#&wALe_V3Kif;X(L7h_NVH z!LuzKp*u_)gCcz=X4SfDlppZwH$U(yox(+*OsPOj9Fj2A+rBy3A8dc_tt?hIKR0nUAREC5loJDjnIf=udC?M5fOz?u# zOonMqjs^kb%SKIG1Pbb-3=t}5{R6mo%8^JSjwo(tHS}$?gl$ohsr3qYVHqg zkQ4!Plze`mM77emJgLJOut0ut`^|X!%`Jh(!H41LdcRY8q&N@kK+{lZ1#aJ#hm zvAFT3z45BQ^>)BaA8x*G5z1N`xaH2$O_ zLJ}A>Oh$?x`pjH#gfXP}sbLN&2r1vfY$Z7fC8$PN9Fr0}Tpkth+stKl+hh!wkaF(w7%|@8Sp`e=Lhvpb2?0)KREHWV__aSRhzPKj^(PpN#JHj!L z((CZck<8@u4lp6gNXTFu&`g4LpA; zIuwBuWHam6=QZL5a&dtVx7)8CwqD)tFOE-EhKx$1Q@9O*W-@Gu(VN z-h4I0-;j9(aWfoUPb~<-FKqa^ijc20q zLDw)kKCtF_%oHaD)dy=*{}3h%WTrqVfeUfD{ZKjmB8ke%*M8&oVe22w!q4dw0D8A28Uxk9WH`Dj)9%VJJ6cg0epx z0tYsM!FgzPZ;RKYC~tR{M{Dl}8*c~upKh;rzE#ft=+u5EyNH`qE0OISpKgy2mdCqG zqn)MR@fMivvlF1i4*YufO*Lh51Se->p-m$sIb0jbZ>jjLS^px{4oq+e7G&B{Jp^S! z_L${Bt=FYx^^qpBckMF9>*;zvOq~r?ms13ot5F(oj8I@*4-JMXA3P!;1C%g6h=bvF zq>P-0Ief@Yfigm@k2HwzLF%IAuU@j0vR#a<<}g0+EWX^of07RpaisGRJhe&a z5w>44t+3vLGrLm%K=R>kXYqD>;b!N(R2`gc++FV87LIP1SZQ2;DIGtYtW)dAv}599 zkilR^hG<3H603X&?n=UkFe_TP?6BGI_HqqdRzC1Sr(1n0BJeKAUC3@x1Q-1Q3#*S0 z+xwe?t&hXySG`ZabXVWV7du(MuU-C8EB?`}{G(a<*UIG|mj~aD*B;;l9>T#!JK_V8 zQStnS2#Eg#YwsGX3$4wM-GlY<`N3_q_@z<5CmqViNw7e{8o5gTYosf7WT5L2|Ives})yfCPKa&sYIq3WX zm(Wx42 zC|hEFp;5XGvlwjp#1uwa9~Jm;DusyRKX^Wb_#)qbNca#6ER_#rAgM8?@F4JEwEKbi z&HL?l_nU8SH{Xu8-;M}6wC4F_V?f;$MGJf}z9-Wh97$YU?v0zJ`$6YEUSAnNWG+Oh z&>>s0vm%KL`IN;+C`2iBa6d?2gs=$bq1UfBMu+>&z0EFsfDFsO4p)8~ZN0nM{&>6d zX}q^OJlGf>ZHgrWEY|RdQ2#;Q2IGV005(1=zKGzU-H9<$LLeE5pTmc6s~mLX@<9wdOl0^k zBQkbhku6-553J8xe^pz5Tcd!7Dkj|i=kvkx4~htm@PXtfsdaK_Qjqy(sC?MQ_7^)R zYn|c|j=??EP$WWkKiAe4s^SAL4y4%ft?*&zeV3^~raPsis&ON0sd)O^6b8#Q8sor+ z{^p{T2VCskJ?%e)@?(MyJaFX$&u)D6<86|Saue|!-raA#x!-tozy4bQ+FN3QF%E;& zNZ??6kl1JE?OkX$coFAYH>|W`Jr~O>ga!!~nAWt$iin$9wNEh>B@v6rQIn63Tw*}b%dLx$ywqr?}1 z4^{YZu-dF$->{7X1N4KQ^dEfY0XS%vxlvqq{9cNyF?q>PzP@3)b9bS4@VSKxfmt=K zA20VQ`>C(Kt#2%JWsYN^M{<(JG|JXulhqIuMMw%l3L&uSWQ`|Yit>Rrce!ube`>o#| zcHc=+1e6G5xS_lC+x_;d$DP-Y=#N*&#iKi2&m~2Sf`|+yn6GS(3YXoZ?bgmGQUL@| zk2?zwhabkLt0T!mP!rO+>-VJ$lKA56qI)ZchN-dMN=4(lLd;ZfS+ohUyH z)`bt9jiqK(8?oreJPwhp6=f5t&BF`3mH~^8!!KI!*TV^Xp{WO=H1ol zW3}|H)%;y3!R#%Y%S~q~=kvh~_L+k6!3IZN^d1p5EGx{SL00c0i-;AKPMc7|<}bWp zSnvXfFg{@51_v)hpwLsQG$0^&&9p85d*OqigSpGXkhMaaAIdAQrT$^{Rc<%0j9} zk%@j;nH9dgFq-R7&u^}m1F{Po+`tFL$(I0Gk};}XSGVd_v2K~B;TK_WtP5#2G*_tX6rl}=K< z@cte|9KOFhq*iKu$egdFSxByS$IPXc*_!WiaD33)UGHsv>aD*Y;7F`5jMv}Yt_VrMQ0RN5lU=*8gTjAyV2_F+vQ*HKL1_X z$A21q`en!scbD&ub{;QJe=isQrCI--4dv3QOvybe95{Nj^>Ws7pQGUpei&Ws+BDHw z(|aMyfmxa?AJFLFIehRn4~B?{4;~Rd)-a#JxZ!C49Iz=Hqg{uHKZ_6QDm>GF(7{q% zeR)HQ3?{6+swf|rL^#>%V2`aDiyDj%b#Wn{@ndj6z?>-x!tZ4ojx|S80wIVPArNse zUqIfkc=&jiC9qMmh&XPe{m%{xN=?Zpmr9}I(6%qmi=;0qz7qPy8YJ0Sni z$A5qi>OaKu7qWSyzLQS;hh#c0ozW9MkfUoqUYzz1w|k`TR^Rr;hldZpd|CbbS2zG4 zHhz29e*3WV?or~7-yYDc{qvX2U;eT6+wY)b^X+K=^XTf3=;kYno}>`M5Fs-fvcv&I z;HyxdK!`!bf|N;zM28e*EsU5dWln^DBRf%0WkC+1)4OBGhC*RF>vD9|vno-HlHzPVH|sj}Kba@*T;9Ui)ji_O*I(ceYJJroFzW#!HtFP`?e!E}!^+71{%k47m#H-QryW!^2-Qm{Pv*UZ&?IGRI!8iaPuubcl;0^Yf zvT{K0z$MmIA$t=wese)m5XYNCi!ap4S4Tp8 z20?_pQNpJCRU;F8tY~iD9`4t+*Q%@UYoA{?S6&U)UI`!YA6EY9mOz7W1GfQ0Y`nT% z`{fSLVf~+fhYuUSeB&&B1+%MAj>$w>QyErMstb{Sh_g;?2#*`^WvwyITDgW5j;F=YZK76GWh!wyZ^~W@>z98g$lGUaw3rjUgh65lkNm z<~FE4;)9;mYkZ-J2u}uOg&r+GndO6-yxN_0yWzcNz*97s`8jm)F) zVt0tE!>SF!112}&19P45f!gMT4-5U{4H)u8^AE}gjszM31zA4G7W2Xf_CH}h@wmFT zUETOxU0rCB$6J4MyY|}yAqM6z9>VK8$rUX9zPI>Q{Rhaf_1hmD>FzAvQSU^V0ZC4N zZ{!*9AMh6BJ0$)?DEB7a(Y9_LdR~3#+zg@HJ===~qjNk*25;fJ&;X@7_0v~XdSy?GT z_lgusN#+4QycR^j3fUCp-Ei$yf9=(PNAnh*_Ii`q#(F_#<$7C8VGyc+>a!M!CnLti zoW3lG2#YDWIg6}W-iZ5w3z5qQl90v+TnJ50rZWrBBYFEnso1_eYq1z}Z>57jBeNK9 zAGY2+%JJ9P_F*{mdwM>o8LEo`y%LI@P{5;-S||X$bsLR@j=`M zmYat~=aLR(eP#2Za&>#QCk4_n?XoC^Qmh-5O&*p9=R53IGiEMBO2k=(k@!5kPHWuk z-!%Jojoxjed)MgP)9M|b^<#E+V(I~gurJ){!EkFSq)=Yn^9{G(d_7$L{b=LM@z&$X z?%m1$=;W|ZPJ_0$)!trfZ7#PqKXnOPmKSXl!6tG^DJOoR^yTlL~qmt{o7(+5hX;RACDVJ%Vz@13;| zB>B0gq#vp3X$?Rbs~Vrn2hAC3wooP?wNxNM1j|1Q=d7xx$WZE^HGjxa%zuGGsV|qC z|JT{tzd(%DUv9SFe%X2V6`8X~tM^>OT2A(IEMB*HS1G{-+r^mwknka-AXvsl8j}?q zEbJmD!<~f>gcxCa2dXjoXR^<{ctPYw(y|1$Vvr=^pwFLl6=Q)3-4RWMPPfKWp*JoR z2K$?xwZ%61)y+5Gw%`0K$II>iyKwa1Di{B)Uiv>9)jt}IFS4;DYspz3TD~t{-Cv#G zo*dpD?y|wu?J*mKwuBEJ4s^{_+T3J-j;tv&gFx_fjje;%vQ!5%ax^ti{%g3>z|A4U`zsyG&@Gk za&F}V>$7OGGi`VUMs9?W%Ll1~;7_8_V`&muO7iwp@(@{w{}38E2RsMi!!IM@!{UHt zm^{j{pvvXL@y3lzIO1b`7?99A+a0m6T2l0~Hd&Szc|I_VNrWW62-~En`A_)|%7>H; z=>VYw<%`T3@DIN{W))SBB@&uH#Qe4GFWsN6{&DpA8@_~uLT}LDG3I#fu6XpVdik|< zaZ7npwa$_!SzX1J(bBXelC_x>D3S(|FF*$JYqj*iv!1mToA1Zl@9+2Ce-Rw9!2I*g z>G};5zD&8|DH$SI2_=iR#)HPV%`O+s;h;GfxBE9Oc|l;<`K>d&yzK6*OW;5ra_ik6 zTW|kmGx!f@+rO7i|Fu#2*LL%FGO=M2leOd&Pu?@9*=Rh}>yOpSL#_GuFNgC!znp&(Zu0l{}|d{$|iC$YfD>=;v;pkt!0T?hfrPwKnd zRh?`RSuOX^4?BCS zvU!^1&19{Xv<;H#@ZpyMd>{wNMB(XCtz0ImVRNL%^PM}YA~Aev@4M6qQ!N}eIvCI5 z12YIX7?_m=4pdtPAWY(md_G7O6I(nn&7i9+aUrOM;G-#NmghU=)~wiK_n4(eqqlp;`P#uknwNnY<^dC~%-6k<`a<&>aaPCNrui`)nBdmE{j?0wc?> zmPYUa7lH;Q_CG!zea0c7=$ic^WZkG#XoPp{wxP{vXlx!Q@?bVvY ztE=Ar7BivD=G<+*`?mS^_w_elHYo1;bbGS(aDDnvDc;vfFSJJWHk-{3WLnmK}*JY&q#5%dxh{8=yCB`Em@#X|G^yiO|?yVdHj zQk5qlZ(WqvgE=@blGWYK1{>Vxmq5(oLqjg=Vt2$W?3~6bLgNGVhdhPXejBa)eJFf* z^SH|ztMgWc&9~}zEL6Nc_<{kF#Dfo%R+1SwKj`xY2T1_R4hbFhKhgMtGN|jUMSVZSrD0B!j0cNG z+tqLV&R3b%Bq9J!!E>TmVR7i@NCd9?lUetYq8d-2Qe!nfUp zFT@(8ZJF+*#GS2ZWo=h~Os-OPi%j!!ulOcQAXBp-QtLME}rw2FOkYcL! zH;-w%+jw_FRW`dwogWR^zMd83jn+u~1n$5TNslF31KD(9C_BqR9$j^%mOBB(3;8h1 zuh1K2v4Fv5m1qd2avPyRZZl9sK(MYCw5A#qctq$dMlKxSg8{)1p~5UgJjn;o0ZdX+ zz*Q=sF{Rf;dhKS9X6%Lq73@|zfD8Rd8;n!XXm-oBcC|_tl5I+QBG;rEyIK!!L+~5` z1z-31d@$z$A#6;?IxpG%;Bc$So+?t+z%0g_vGHM<^+&(4f$be-z{Kx(HPk#aRaZKD zBxGRm5qyve$)%1gQ9L6{DH(pUlwjw4e?)Fj`0$~}Vn*t#HF6LB1Lc-d$7uco-N1qr z4!pn)JcRdbGB1ogWRX;3%*!vR$f7Qbsgr~cvSLdb4i8fUAd7M@vZnfdSWU@R((1Ix zqyy~2;=<-CA;HMJYSQQe5JHUp;0vyPt@L$oZM5^5^d`9%>MOp|Ktv#ttjA4GO%+ zrK|f|<)K=+!Iz-K0#-1I6KKZDi;y4a_9oUXSO{CjG3gm00_?vqIXNf)pmv=_L@pdq z=E4ExEIu&5iK_q{V1hsTInDW*hh%=UlKfONKehCSmCO8yabYx6LVZ^=V^_=NCaN)u zri)Ue{_K&DwroJ|+3@Y;5S`WdV4`faU{H#n6X!&>h?J#gvR5xGl8P^uR-t6Ssk?gpo`vCu`-9E3*6OF`G6k0JSh(?p z!fYvydOM^ldvE#nc=Nt=_K*7YAMMINx(!({F5Bp_u`ru~i(4XZ3+E3!*QCBzH#=*NKm_k-IAJrE zrIxOWVg&@H*X$fyzj|Z`s!sWLRyUJBB>G?ilvJyuEK5XGgEdGwL|M7v{q6lGRLwgo*g|}S@#6n#MSWwppK_d>W zrz(_gPELD!yY0=@&hop#3TwOGJ?O%Xt@n?lJ9a+aA1>dYZ9NnYzp{;gyYzdP5UKfX z*#1WSr)(V~ugDqWS~S`Nk`>IKPTPA(U&tGC24IRdeOA6qq@nvGeDIb&Up78O!+Zqc zWuw6yK7a^o&*y`hRc!9gw z02S0{z+3RngAoHg^B1zWHEXdu1Ey0+FC1*iix^~62v#L8bU7Yw@1vZ)W#?O^_~qiq}d1DOP>F1z=Q_G6{?aCtR4IUa5<57s}9);{IbFLv8snMZWpx#RL!sNIDJ3HR!QgKoE|pzHmd6%OB)~9 zI;KG-;pGW$ng7ZnDS`{}Y+P5Yp0fIqz3+C( z4}>LAL^M(bHRV5qP$IkzLtl#Fj>ZS}tR@lBYJDjcDMIM)uC=Ld+WH_-M#6`MFVKle z&#?aMu~qr18*EC_vLkr|&2q?!%Yj-Y4|FCqq=2eQVRq%=>_loCD12I7YEwK(2_+L8 z#4-dLEcjxf_d2ENq+6FL>AX4IxVbpIyEqLEA{k0_si7hZF~FI>V=30*UT15CS7;?shWHOOranVm z1P}VlLw37*;7u9wmP~nVu51<|4{POvuCm0FPpYSG>g~H~ki5ImXS(>v7WF;B0i(KeH62d3prQC81GkjF~)5_~7@43~7g#<{u^# zB06oF9xbmJ31<6|FpCe-Fzb3Icsas?b^mSo5W#`20>SGOl9-{+78DVl1G!_((#^C@ zlNECBU`oc@O;j*SM0}7f)6*t9rnY4}R(4XN&f#RA4Q#t4A<3fGTm2f_TT=2QAxTG& z!dCpeF$Z^o9pDRN7Kw+NgdvC^#9&tzCLf^$*2?-a%9Sfu_q>@xtW_T6!5JXFcnKeZ zb%zNKwy9^wX({XK$_uSlhjp)MHbIU2WbJ%A>RM0ZECCmG<%nNnJ{6;@x;f@|O22ZyrAVcKh+y z!SXv!Rkv1~=f^kI@?*34$kGq^kQ7>^<~(r~UYMWE&QPc(!h&^kIe^upk4|MZ1_uPs z1TSkSVOUTBrS}<3CQBK)4Rkf)_*wpgC&060^ADy4Ox29qo2|(R<~*3vE}0|fV9y-8 z-0rD-FeZQqlRY0$V&=++)QezW$^;RWQjd~mk_?grlj(0PQg+FgS)9QvdvMEY8DHhs*;nn`2!wxiuy)KuQu{1UO_L>yxLv^B)LC!i%otTGAIp z8Imxm4%y$5`%~G)zOpp-x}gSxkVjsU8{nWgGC0`H4G{=n!IK|MobFlh8gfP#qQx;Q>P86E5m)`>Vi^;bR&mlwy&3pbzN-Y&hq`S7a$d7-ub zi8bRrww7W$2x6Z!zK|;GWQtR#E!t8i!>dqUG=K3Oe26%pR-dIoR%3kNlxKpMy`SJ^ zV}c1fg;5Sao)Vr9(>5A2-LSwR#TE_aCUXBD;tchfAdJlKVh_ zvxBy5J-6Ou+J(J#>4vv#$SkU^&=e;}7MzC)%se6~8FWC#f1qT#)25C3-4PWX_pFJa z?wU8+kUZ4VYsx~%co0>QiDd)5TIHU|LdzkfxSC{OsD+jn>x2R)9C+0T7Vst5Z zu!f>f7;;NqqSlb*pQmSc2m9mAwc*NA_tRqc(?ajVyZ*;_?WM)a+Na9lZjasdWwA!{ zMuQ7|pbN&^9cir|E?;Opx-0q47d`l(`I>@kEf) z2>#f3*9p!;U;Kw;0K@Pf=+Y2)`yQ_WtqjKa5!{AbrckATQzjYH)t~_^fdjmI zBL7pA5_VKj;6KQQ)ok<6qkk}LGu=r6!`1!8@!i%ZrZe!4M$*`)_w(R#?+fz_GWi$^ zCc`n*J%*SMbpEI|e!nh%J-v9?I~c8R zc0RAvKP^>Omn*wl^|O;+sdPi;L8`AMPDwon1Z-bC2Uc=P9j3@35#jlOoUNCrcDuA$ ze3+%zbftL$CaBeOGz~&-a#BZ@y6xQNBMZ+5C4(}-A7O&J5avjD|G_{pvCW_4KV;dG zq$tBP{ofBCJTYeRK|!IE2*E;#7t*=Tgbzt7l6i`DJ|8s1m_|gk(qlIX90^_*#m;hg z4lH$Koq|*-@#f9&+AJJ^@8Cn8UCVo~xO2651DDt+PZmT_>Ja7*1UQ5z>E2M`@_7_8 zG9V}%IKqeSO}{hlv9O{;u5w7!a9^uE@a#U?>F=%gwm!F6!^K4E#r_TRkGzaqUeK8W zAuEVGA$*XEYvBW3c^P-R!T!EOS^fzkYW-2QH>&km8-8DHKa`peh5Ex~<@UUIb5R(d zUG&J5R;puJ=&f+T{Dlvpe$vB%z>uXJI1#1!tR|bE)G^7Y1VDIs0;p+8$7GLc*R%BA>Yphq9DL@%{0A?j zdK!P#`AB0|=7h3rIaFsKj4^}$Wj`ge62<)fjovjlS%V(j5vGn34NIS)UT z50;RO1aIlzCo3O(OyOnxhg>qu;e&-35g8&rBz8H?2ZaO2ECJ?gETD*nVbLA{QP-bR zIZFW5{9Ha59L$B71&UziQ%rD(5I6)r;6fzj{f-ZMRg$QjdVb;7Hpk@fakfbjNFF1k z9V8XNo4~a!Dnu4f^1-JmH1nV#2~)AssN|F~Ew-L!S18$&L5jCiFU9IBk_!WiBr0*G z4q4gY5c~&OlG!AwC@VB&6BrPYEW57X7p{h<$89#UW>FP$no`-Q&V#(kS6li%Um%di z6yiB39FnD5X$*q@5L%r@SgutE-Rhu6tMt2-9{Wtnwi4C$pwjMDTcn2CiU@(TIS&?G z{5=05!h&|c4+zm!^Z76b4!MZ%aG1k~Nuc5QU~u?}e8>_2%^XV1;=?TcEN_AF*;A^_ z!U2b2E+3Lv%ajZu{b*Ag9UVVG?mzlhlN9Ll$q!#AvnChYz9Vfl6uWsQHDh3Io;F&P^EVU zWX6D&EKwzhNYSGQ&z$|?h0_JICPbnYJ8zOutkt^%uh2|L3ETy;vTH&B)_1P3s z;#5{EOadQ>HT3TgfkL2ATA4n@&`1Fx^Br7*=OFejMcnP@@F98hr+feoaLFS=F%`m} zj@(r4E9=Vj`$#vuA6zFd1Q$3D>C{OsAF^;r{0B-PLQA}e)R6#yY$X8`1Pg3sNp^xc zQ06X!EvLx@$tS4!Cy_;xc95c}6b?x)LWZS?!2N&^-U;D`oR#^GFe|E*2xO4;7qzwk z*_y!tG7B80`H&@pYBWD-K!}8_U8`mm2WIKB_xV0P%tD0m!N{P(1PJCjsE`K-O#Y>O z@FelX;6t(~_$hoa4$Mb~qQC~Lpvqhzm{RhSY4#wQhM=1^;X|0dM589JlV!pO#&A?P zh;J{gP6=Ntd4vIqzzQJ-;R)j>aG(H_1sAd=OT2=d_#*fZ#)n|w5=w{@5;(vmD##bW zT@n5y>qpEk7@z=Ar0A!>2T~KVHZxsb@uVR2QPe8%PCcTGan}+BDDmpC@lajK4l}qG z@@mdt(VAuqlM!0jKy5>skPJaex}Sq=Unw3%+6MCGUzd)Mk7JL%@WM@*71bF@wvy-# zaUp8#9upe#n^o5S%Tz^z2>H!uBXt?3J2fZCaTXlV&*wvg3+v``pxosowai3ofKov_GtAv_k02?*%58yT$b7hx`hnfVX$!O{`Y$0;kmgAaD0(m>6eV-6hZ zL8<%6rR%Zg@ImJ(3=0UVN%)|MNU@N*2T6sbb{!uu$<%+C=0l~{t2Vl_NktntKn7{J zyJEbdbU~i*;m_8WF(@Un`ALfoB0h+vD^-X%f|DS>2L44UYEUW-LXn0zIf4k3I1k^; z2mFWj&9HsTM1~|PC0EHla-n3JfFkT5!9G+2w!{`bgr_{tK=2%d4BCKzq%VdX6Fz88 z!}%4$2SbF`NO0taVe^Nf?0Lm5xV<5KfDEj;(8er=>^C)w56S#RFTf#xjfO!ZAR>Ri z7Xv~hm?4`09A@zWy$*vC2+9ieAMEl-P(E1MBwgc!cOlNh5^s+952f&f01(ti@SsqQ z9SsNwMu{vO>@54Pyr^ZJhxwZMHRXLjeMl4*JB`qxt}PM3Z*qyM-I#S0vjt%PV6TfYY8f-2q127-|T`u~BMK zDXout*<3E&7f!ZlkW@q3dgGBb@((oXCuNG0W7_0FADSAq977sYl)C?n?4V`7K_Un% zq%3OE5?okt5%7Y?;>X~q5LU=!sI-u~2p(GEr-|Dj|3Ke@v&TPNtE8eMU7xRz8cTCC zBJ;OyrNfplNlQHqaXBz&Sy|cX(5~CbKKT%E!6uUe1SLbc($FSku=W%mj0Smp zNI0N0kYiHzl!F3eLL`8QL{np*nI2os(CRgoW(>HS8Mx5^4tOk>j3XglL1-F2q)bSX z4<0poH+8=`>P{?^BNm4>1wJH(gFn-a`Cy1JI3O4t5GH&`$)JdsU?Dv8C*1%CdEy5? zM2JY-3C{i7s${Y4m}?<=b%Ua35taj4oOTG5HX=d$_Fi>jE+-$5QaEDFfox0 z0fJ`22S8zn_&z>FtWbYLalz5}K#-E%=Yn=*h5j3|ry-|J=QYk|a5jrulj&GhLNDIOm)?=bQ&8nOWVl zy?c9d`Llz+!bgo#icm)&VgEWr{^aDUZKfIR@6C)Cah!`If3)ZYM zCLwqXck5f`;{=$!GW$D2GU(j+z_sN{hHS2ciI-8XdrC+nYcDgV@xjOt;v#e?*uup6 z#GD5)gwISq@R~Qi3}u6EQHPl z`avy>KZInkc}zkW0Zc-K6Z_OAv-j`#r?Y7={^|HPgez0|P_82$kPVO_lPzNKK@lMv z+4CVDy9inFOFkGGbiXTn#>bEwu**y#z}ZjrHAHWSqtRx<7aAg>Z9a@M#K@rP#lr8V zA0z@PHtgXrF->WdFg`d$XwOE;r$~Szaz>O7AsKWY`WNF&7+)Eogl{+r5Ii63=iU&( z#3*61!6t9i4{zjy35nsPL9B9`4N;&9K{+sEG}#~%M}`qTsFnyQVfsPgz&OqaPZHO` z-uMtA!pNW#g98%{PBLnu)3Cs!%|{RsbluOIKYIE5969a$q)}*5M*$E!B9fr zYtWg$ET=maAeMjBjjy|&#Sji2H4#jBGlBz$FMOy0K8&-%D8Z*1Av8Wjh#)N}{iRhx zs9T{l2thHC4?c_GqLc`RkO|?OZLWtjO+Tn?kS?;$q-Y^>H-Pnf1a;3)%ZgFb4Fqj#1pXb zG)J~x$9O>}nOY6w&5#d@h(EyxfH0d6;{k{96{SbCE8myp{Hy-T_z-=ez#+y5r9{XF z75VzN3IyeV+y28;>0~}caY*=}98kevdR zVM*Pv9uW~A+zY;J@{N2@+L#oHBjR7+gFDS3d@w|WaF~V&+xs$!*a9M^@qz6>kq^*7 z`Jjj}K4|d4m=Jr^^-w=32h7VbN5g~!tgzrD6XSpi29pq`A7YbeWBy4#6w6%YCHAMk zXA}`t`6(~f^ZBaBLcYqQZ{vyrWxQ_pC#N@FBE` ze30aYD~*sSWks?W=3f{eLO6t826NRIT3z`k_#h#OApYR87;+y(=v+)h#Gl{;SWrYv zH7FMaf9=l{zg48@y|#9))zx+ zNn~t{lt553>G+`Hfe}!Q^FjTK_wqsMVSG>^*l7F_IA{bis;ivH2lhMLIn)nYF{OM^ zP#7PghOyGdLH^1`CItwT1qSiSe}xa6vj8c|hbTBH`3o(xBsu{-5E$+LKuz_AJ=(;C z!EF6te9)7gq6);kevqGg+S*6rfz8XDf0g7`g33t^QT_=&3=#2OKGZwK$YeW5TYD$~ z+#Vl7W-J6TnynzMDVD9Z)bw#!4pDCr&r}tmj)uVjwV-DD!T4Yj!hnF52%!NVv^YxZ zqu{`}e__cBoy7Psj)-wG=*k;5_@8t=B1*i24~PeVpyENpl7@(pjXaj$43$GTH}T-% zK=GAxF6`TE0v6Oz4&!l1`yP`J?By6Alnjh&q&^{l9*&6d!I%(g2~iqR>$ylgyu-hk z$_JO?9EXFhgv9XNh)O7b$p;XDP2+=vCCB(cZN|UC2e4oktABW_ehA>8OvqK5W}%PZ z5fSh~BM-w;2T5gmIAp5Lbfv)d~CdTzc=wBEdm?$3%5ekRd{zb?Kg98)e zfawQYnK)5C=w3Ef$VrcPp39j2{$xJL1?29Wg(ftupjwneR&MdmW~vwyC5D6`q9}w@ z9g@7jR7K3t5^U`V4&s#v5cI2}Z&O5wY#@xFQAe$-935jS!t=o?0d+yfQnKY; zwc`YSUW^y3~B@iJWR5?uJgYJ4A-=ZZrIuTi< zA6(?YZ&+#A;;!fB(ktnHKhzHvh4`g@FeZePS$tqqhdx3$ALRpN$W+=xt2S+qhqVow z(^MG{Y6DdckResBCrdRfqC}I|{5Op12a^pdBy`I&s%8)l)EbFuU5&XuURP?i$mA-p zY4L}U56l%2#3%D)HUbx8A``;JaXtWtJl_}v9;Z#F10kk2rt-lMp>UvJQfeL?9}EsL zK8P0(i3erEG)nMkC?t4vZ%T5+2Rs8l@U^-t zG<1VQiE%!7O4M;B&p}%Y@R+GKj1T5qsDGh+7S5jEY!bvJ0Em|hkU4U z9Gm<_$OkU63G{c0+ zkSfVehYxDg_>gI|()DJh-d0Ohn~75Ms!%`ARxWbY%R=QEA;Fh`j)qu_4?{Az_yfv8 z&Gds|K_?1_%Tnc{SY}j8zz2l`69|xV@rSI7K`_GtN~#JPDVt*gea29nJ?i3Akt`O=&j&isD2pdgSZJ?!C4t_A&yw- zM#f4{&S^T4Fv5pwk0ah<4iRjBRSx5$N5phK7$r<0C=e)t81kVfr4bwtry)zz|XI-Hh;o z7CMvopd}MViP`>z@ssay+5Dso0zT@X;2C@~k5%J*o(9TBUAysS%9~>f-66!~o zkD%g#QFr~250$p@0YqHpYw#gauEPxsl8D%#6v~Y}8Xq()8S^g)HJDGKd;kcL;k-~f z%NH3zgb>5=K~)5B$P@(;=|Ubj=*WZ>;YP7&=J$2Qg&^Fn*A?CV`kKylkB6U5TXB1; ztZ(v;ZZZA2*D;olh(F@&=USCqA1kJN*{gQ)w2?e%rA|9nhmEU)*42Ld`kF582CmEPwr2NO)Y)z;anjQ(Y2?dY z$ilT3=yI8_x2Uz0b1S3(k_k-k?uPY)a;LHQxwrU(T*Zr#Xz&fLee-I2R7!OEU9Q%W zN(%0|V8V9xC$su&eB69YF1dEMVN&@ZQlazMtiRTauUyZe_ic5Wa&fTWg-$P&Sc3>5 z#+_)4iG3k-(N$TV;Yl(q{S}RqP6&Y(3Rr-lO zj*NUHpSF_K=4GXRQLders@Q3vd{(UR$l_JC;jDgHsR0Ky$Pk*sL3eJr_q_)($_HKx z-;Zd(#+SXXkI~}DWB3tsrx{#l2JPGuIS3j+n81xwEL{y}9P@H)mhlO!gL=drOU&?JqaEkcInjF>b0l-Rv{UWUF&|*uFe& zaQ(#fX)AfwOd`-|P9)$Y~)P44QxUVelg#s@i8Jjw@hivb^8$+VHdDF^j0gb(fz2lN0A9d6Ke znMw^lgd^*0XRjIhUG}va)eG!af9%5XLAIwhTxv^Bv(x7$j;C_|Ie+!UKmKyRpSie| z>%H3d!iPb-Js9#q|10B&@GB7+3=yIt+zomXSa7YXrN5K-6dn#tln?NV03+{!f{+iP zehB%X_gh(>bT%LCQ%3`UFf_x0PWUKa>E_BE&j-k01}oqL5lhF1e4`Wc0ZKSg;rWoM zb<>ru@gY%cVM+-UMXJ&k7o#qtnkotv6*KsNehB%Xa9{)uj0%VAS`$c|m`OLln%K7n<4Y9=FGCEToq{U#@;lZY>nH=8GHO zid)~RTi@!NU+Np58tWe#Yrogmes8S)R$KpAU;k7!Tl-j1<4s-M_*`S|?Q3OcvAVNV z+FdU1tyH+I_Fzr!t3BGRT^@F>kA)SvWUr9r?pkh&?BJD%gA=(tR>d^V2Ne>qC42F3 zx^XN2^^f)KuX5Gq1((jB_G_gFaWtHMpl7o1q1L0fvrqz2E$y3zd~l`F?t;9C1~5TG z1T?7EhL*I06F78lA|{AP5E{S%_bV?We26rLZe&*UaF+2wBm_pL!1a?CC!+gMDLtev z2aw@pvwgnXoxlfLNb%;VelSW5Cn_6yH%)JS_#mD_N6v4Wco_0Qm5h@#As<2}g!6Ge zSf11fpc6}I>O>dmOR3s`QB6Ss846syUh88_{v;pj9TgQKA?iKlgNg@6<%5pNa?AJ- zBEqvmyo{;(LH!HFL!#1v2EvD8UG1VIvH>zA>Mcw|5aZ%OS3*A6oc*hO&=p&U2H{*e z;Kaia2)d}_L|i!MLV}8KM1*{QzIl0E+gZskd_14~?QHc+dUuIk+}K&Dp%Zo&TDx=I zoo}7(FP*K=?afbOOt!wXOpa~(v?g#smdmes2k~&vtsL2O|Lm{@ z9Ju)VVz-~YyldBA`4VA{>IcsUiAabbiCheu=-F`1eQ2H{s)o{&-R^L=YY`dh2S72* zZ)!yZ{zA}BO6OpK$v7O45LQG10oeXnj1T%~pW--#Hl##V5TRz`LHvtQJd|9n)0HtO z3J?rP2nkv+C;e+@W#X4lbF3V_!hQi^Oe24`f zZ2S9AWDu8#u_E9=CFWn42;rTG1_BV~QG9R62lXkO0vSm|h@5Z-Fv%3ahs0Tv>nXRE zGDwKsl_J0I{K{`HRCX8ZJM&m``&*N(rz(Q&FMJT|00Bmw1CZ@6UBQI{#C@tf+4%XORQDhfH%Ep-kqv#G@oiVIF@b$id%%Kh~@ zx8pTWHoN@eFZTunbK14<+`T0tglNW12N%O!2APcU!MP9qoQ3EFS0Twr{LrTu-1e8B z`^dkj)VLJSne>u%S7%Z&#ruv1Mu`v$Hdp7u_@EF_J{Tg53<`&0wS@^B%AIjTGYA|i z{d~EXD|NHQj+*B~6o|mN&>APk2oVz7RMU@)520-Eo`v%={#rgL9LD*OsN-ccPzzz$ zLB#_jd@w}l+&^lz;thOI{Qwh;7|MrmML!DVgaW}Nggu&Kig{@ZOLu63JNwapxB_+Uv z?e@nzZ;llrMZU>@;%L2jbuf^)L|*iRiU(x^qnYUk;*$h3c^L(AxYFagYT%&v+MlfV zFZO!*1nT^{<&54=Z4gaLXSvz&n zCR)OcRDuYi5E6or`!rQNbfE&5W1>KCMoLem%!9k-b3i77i?iKPO@)J0K1f2-X$hCM zxao9W3)#D~gZ9x@`)q4)x-q!iyCo1Id}uz1M*|;-Kg9Sjh6wc_+#$`V4@{&7t>E}59}E!&gm7YT;G4~TFgT3!K|c~cR66QAgbG3946KWld&Y-M zp$#HN_~3#M#s{513E-fmmAWEs0^*?}VM%ilD?LkI`vzvM%%m=_5#%7+jp1_3s5zBnPdsZ3Bxgk+esE(%0F4&{Sr1_uXPKZ)fk zg=~i$WhU87CmQK%xyqI+=?N!Z95zt?;No<*ak5jFZMacATx;wuU@g%Va}A6MCcofw z4Rs>agD{6eyo`^njSroT-@6;X_cwpP+5GL+g91c+={iJ+iXfQD{kgeJgMT48O z!2ufBu?tOJC?7~zoUGp<8;+Llln?4+WUp?xN}Havcqnl`keF~;#gGp;7ZR?dDSYd$ z)wn0gl27-Drd}MhPqrJ3`NUnb^3rZR$hJ4GW0)Z}lNVslgR^Oa_>j3Nb!Nl=9B_VB#U>RwyN6eBi$12N z=3JZ=#L>XTxURO%#bEMR@j+9fLpMYKLQukshtQjV13bp~V4{L=&==gF4EZ3k!F7z( zrjgnjj$)@wx{^s()2WJBvYJWNGRX$GxIX8a*E)7}B8I>aJ8Y}2AVxv-V0WoTyn?OG z9G7HsuCO^*BD}dZUuI-3_O${ld^qAmXY)gE^TPmJ|BXoxKHwU_7QO`Un=7{Y2kh_<(m%&flglCHaEpKU{5}ZuPR~PmRJKt;+XS?S)@1c?3Sfpz=UR&R6g9IPyBTZn;#Sl4B?k7(A zXmXO9@Zosr$sywAaHVs3&?Pk^AMNsg>s0>%B^u>tK!G)@&$5vl-|;Na7`w~O-K7rBLh_sd$YZ_s zSZ~~_TLF|jmvB$qmPcKbvagr>4+pFLgO%Hpm50;S$MoUPM&|$SRQ}ni{gf(*_Jfp8 zbtTP#H9PlmCvWG5@S{so$Q7IvEeIugea%a1*LuA<7a%y<&_hM2u_9I*lqHpOQ*S)h z>ra)+eYyMqCFCPQl+iUhOvK}GH)QF$&b9nWuFWDW>6C*i1RV_*CL0tEjN{5dDFGJL zR6L~ft#nRQgy;tse8`aTq%NY?4fTVDAT;mc)AHv_p3CW%a^|U)`+>BmWM0u2`^)|P<(rc&$oYge zi1;9fur!f@ctANQADYdFH2K-Bd*E=i@_4%Pa<=-KIrxuO_J4It|4+C2&rbb^L_3>L zyxC|xlIdu59$OJ51Qc>)Th4BWe4t2DHh6o0DJ2XJZsNQQ@h?O+RI3m9{7o{^NhkZI zlIMe>z~I26t*|IqFms+o2nXi>Iv+$o*?%f8lmu-Qi< z649HoIPp!lIDroWhfFD*7X2VVFhe{5hlErH;52i;s>ExA5U?yNkA=Vv~1<`XR;#y&X?)-F4Sz=pRrFM!I3Uf4bYd*uT3vd^q2`mGz~2?0n}jd-+-` z{-{@<(GQv~u@ofX&2H}&twO-_d>?PytQeAujDjwxD zH%<3k2Pz|jPR98l?`IxJd>kydDSi+*%-`?N-%viaKZNl*o+i&p#i`{oJ2>-TU^u^Yvl->+SZ}o1MAA?!o|_aIi8s zT)jD7zdPBuXR@=<+h2GDDQ8>H*^3{w;vbFLi}C^Sph-#e17v^?Xq9H|5yHR|{t5&g zFML0me?4FQk>3B0vyFRn(b-Pte7{9$#Km#v@}zrp)=nf^*ZhA?s+!F=DP2%RxT5Gd zA2@BH2QF|r#t~7h4hrQPe2RSb?)0d!y;eLu=#&eu6+#eBEzB4d4&q{Pq~dMPMZVNh zo5lwXL4+=b`W+!3JR+j54(e!t2!+F+;Da928Y%~OC<6!>A9Nm$7JM*1Xb8geL(IRJ z#s}h!b_!!eON{YBvG5i?=$pFzr|}`Izlz=zKCn-R@dwv&S~fTmB3n(RtB`>)m8z)` ze*gs@5ef?RFA_CT8kvF%SrnXCA|pZ^0v&87VQR{89xo=UU}g$~6_WFa*UX`_+BCaND81s9yK$Ui2I*`)2gg^mO#BR(9h zx8)#4R*r7DTY4p@Nn{c}NWW-80f+}K$u$X~d|(%IJgWRig*K--4sjOdpLf6f*!l9h z{rP3<^W)~HhpjJS`wK4zOV3BEFWB)Klc)Wq`+3lR} zbxDIB?iUXZGDpYR%j*)g1b7hW1R{!d2+K`q3)A;}ki!&GFRdvFcD+i7QthTtzRefz zlZo5oz2^36~{%1l%=3LG%p^?9iRQI_){1Y`4BQ8oJV|U zTo&sZe6SFuA_5hmrcsF5e3*cUDE^?E-Ntb~X#Rp1Od;6Iti(Ky;X6@%g{^2DL-}B7 zO@7(60aPYe0}(k!1m&30=#6ZNt3$X|ZeIQQA&aC$iIo`6-BkXy4R6ji}pPiMGsRji_wa#6wbEA7d zKt#lcJ`qdR6SxA9N+g2djTDP$(kQn*az(hS_{D|3X2bMVP7|X7B-4NO1D~e25~G z5}(w}1#q~Cf|C>U10hNmr6dsXOFqO9kuBsfRTQ@0nY^73w(-X&Ktr4thX@f89wSaa zL>Fa(2<_P5+r$;B>3qd3!(*&xcJer<1~ytP~c5&RmauKM}x0BM18DAlV)vKG=2Vra$(7EMBU zI5_>lUM7ulywOrV00-s6!AeW{Al1?Cv;|C<&4+rcL6#FfkekR~KTxMc*l_pTqbLW* zhmGH#Hb1;>ef+-t>4%O;i+#M0`R}{mUUt4cZGU~-{_?c(>27c+91qvboz_;kHO^Clw2wyL$H)wcrpjq9Q829wu1mwY@1KB0f~=kMQB*tiQk6 z*jsB|>^~M#Lq3EwU@pSLHF?l=ORA2P+3%?%M=KWJH$ ziU*gWn9T>ghH?F1$xRMzseTZd;ZAXm>xU2svH1i(6dD)B=2f|k2^P%UL5yj9@VeTa z_-5~0##8)`Q9dXuVtjDu5RZU~aX@)uk4#2aBonINrhnx$hQNUc07V66!~oUU?*JrD9Vg3cmA@DCmKQuX3aZk#Ulj&E7{A%|$ zzYcIKwmv;?etdF#c;5c>y8HFJIvRKrdkc>u9=<&~@o>*%<4b>YzJ**bW*5B z=J{gfLub(KJoehJ1Y@ty`rC88ov*ksuREV#wm-?_VE*-F={s5n*ZU9Yv)5ws zk9z*Uw<`avmS1Yk=SKUr(SE75@2l-w^aHMhN(e?13`{D04o$S3`8L$vacJETe<)X< z^7*H;<34cMTkTx#zvQleAS#3aBtu6><%7auh=@o(OyoldhlzX`0s#+#gawEQDk7L? zoPQxT)dmNml%w$nkr3`v)ek;0iE~ksqZVdEK2SO7Y#JY8KnNe{`9Ne+Qkju+A!QGa z50@qHR>b^^>r6J0&8a1Gc`TJLVCg~;%V7L^7E3xJwqtT41_#~Oy6dBle4$hnu9TuF z#|@e2qHKSPsUU#+Z^`dIDnb%U)|KQ(+gEK{S$pn|B5gI-b5muzv6#9 z*Pk{vN?E3>zk*0mOJlGI7?Kx-GV?Obn^no^M^C0MvBEvhLd8K*(rflw)w z5}n|UiYaLe>5f;(X)M>4K~6?g2V^tz+1HbU!RAtXXZhxM>p64%N2&4{0{HMX$Z3w`+R!F7_-Bh9NMkXxwoOQD9 z=U#X8#F3*|dKwE#s4z(IJI-$9H{IEFnTmy$j>&2}RqHsT;e(n!cKC?ehL~BCD4h^| zAjoi5YQlumqBnUgw=OEJ%W7M!)-=1|xR!&1lEL5*B0@o7qv(gIdrU-t;Yp2&d{7#M zd=MIh7HI>iiVO&#^Kif0r^byjJ_tbg)ZWwypioL^)dUq*>ZjOQDu{t5kvs)=*ty$# zc_(!(2P-&VNfqVVm;B~J_Gqh;Ja2K_NzPfgquf!#LitcDRvN`hlhK`+Xi@G67Giu5 zZpe9x79mYanyGapRY9!+r%mC*;rydSA?AA&D#HgH5YLBBiy(vX!A*!&x^N{ovU{Ki>aR%d-uDGH^em7etTX2{rl$U*PXf7 zgO%6I-Jj{xzgIHV1D~syZkZ?sJ*f!Z;i_NlEyiSVOb z$#W)>99~-RDBew3ydL=naDs#W)wu?(=h*U`cOc+K8AC2{Lmu;Fx3a%pTKbS){*VO@ z)H%>-f)foK(xiWsYe473gG*@YfigLBidv|OPJj$z4g3z_1CyaqQQ2sU)jJ%tlyatv zKDCa32p2s+47 zu?W<{!Q#^a35)qx#^dD&$|>^~KkB)^H!J@EB`yzd@k9=m@2F&;7&3SL+@LzT`D4fj z;6S#dN583i`b0sG#K{AZNjwo`18~^*@MG)C_ucvL2P;1=wtps$eijq|sOJA@R{v-< zZLs>TxSVAKQ69Br&Hk|TazQJluAm$q*Ok;{!Wr&DBfk$ zFGo8!>+`McrGe@P$A@9Z$YHyurk4;PqR{Npn?-a!ZC+^C;0{%VW2het9h}X?!mxus zTT$$pU@%G;A2LK~++l4afC&a$u?fxva7dIorXPe4rIrxGsfa*7gtDQHz%V1ifN7ni zC&nlr&P(l(4~PfV56TB4gHA#eOq`o?kcs#mq94>k{h)ItLyQk07MLp;Y?MzU{lFyL z=HXmXz>2yI(Z(04fQ4_WZ=#FtWxkVayTloYJ-Td&6kN$UuUwrXD0)$5t~;EUN-ivn!80UW4V(pE5t4G|A9KGcX(iiHk^5@QGrcVt`Z zrD@&*ZL-$`DlnzD)jSmoH=Ol`6(k^vsfQRJgbZVR@QW_#$#F|LbV=oNsh3Ii&vtK4 zHXbQ%q<-m$u)xx@@*#Qn#3`y;{vY|o3-3@MP4yISQYckOQFQ%ByY(u~BstWHf6;Ft z9;9_c`>9rW&Rjj5?hJMpyBnVeL^!uT{jv4=Cw$l@$hq*0e{szB96Y7Y9}y?j;;Wqa z;9OW!+DZr?T)!80V3T&T?x=-xAN;{9ooh8UF$BF27c2mWfDg=-4~IMb^@ZlvVqf@> z_(9ln*iII=!Znm;1tx^1d_dB_g%64j<%8xgH1aUchX@PmTrg5E-5=+J_Kq_w=tN4M zq}ycYx;XSN-pB{IVSETx#5f;>296J+B8K9jEpV8mA5sJyCYhihUy%62sD(9DcG!~% z>-s1$3YQb~k5ME+x2=9#_|39D9Sio6#SCpl$$(T4wGjSS+_$OUp=8MAn`+ryg9KwB zu_pRauiSI&n#6>N2P&(h!xu&g4MdRI5F?DqHBz|wR8iIjgio)IZ_l>nSOg{3CrdBX zTEmBv)%%O>yWIJAPGn?GpEv-*0WtV+y7_c{a#zeesb3)9BV|!yhzA-#;VhK$4_C(n z$}cIV+WvM&v~%<0PeKslUyv}Jm$Hh36>(uswgwl6gY@M+CEWGmbF2CaCFE@QkPmW{ zTfz%=(~ax*B5I*72R6l1fDaOF01hMiffeEp+4S?#?qF@c0UwUG9@5v}g%8b+MIg1# zfsufNGoeJj-op?N#s@^gMEwvG57OSj#UCOvNCa|>34#TTkFfXhLEzx*s?fre6-EiX zk5siAg&;&i_=?9`XP9@Ww&O8I-!HDgy7EC0!H8W{)Z!rsuOG(Xkj&;Vb~@Q@aXfirp}6*~xcn)<_#wCSp}6+BM&|B#Q(7tFbd+<_XoZF?F6=;m z?B-BB7#!ThH@ns@wWT(#DJi^+y4sSAxN{~RinOYB1_7v|YU+&>)4AQ2Ypn*03`~<56hB;#76iaaB!i9d>t|%60+HL zhVBrlUOLfBUUd@ZgNuXv^WCSD?c2kR{?1BsW1+h8wXpOhGyf^E@Hx3Wms(%S?QT>~ z_FGq{gS2bHK%PR|OOmZnGDySf5DQT^S~}Aj5a{j#Sm-;)F)ON_abD=EL|K9yj7mT9 zlUmL`qDcr&){1geiq5VayWl`5{sl+0=u;v3VVn;t5^yCVQ$AFh4V4xyN+~J=tJQAt ziP)Z$I?F-M<$G~1mL5)*?@yL*le;gp#=Y2j6!EZlhq{o{mzVUlqm7JnE+RhQVu+C7 zbawNhn7_L^Wiz(+S3CR5cT`yIeEYfm)x{qi9}X5@$(ia|4_6;0{eBaN5BZDNYW91* z{vZu!hS>^QMS+O6QbIzJ_!qACtW-iaq9(K)%&a&QN{FLTsos@KkGb@B;ty+c&5il) z{`&3p`IGlA^css+s}xzG)r#wfY7ahyC@>l@Q42i_MFfAsNleoZZereqR}QH%eP!t` z+mqnruksh6a{(VjHsm`h9y}jZQMf3C*AGaDNJYf3V9LSjrBz@kI+PEm#qMdTjh&S{ zVwLuJRYsZAhIUqMV8(|S9PDuf4rBVkus|J!u_EMy{<*=y2Qu&?VkPA@2@qxw|kR1IM?0KvF58e4uT0p+k_9 zd?n=vpn%T#oUP!b6#)&7N*%AsP9ZD7W=S1CA?#^RON61CdnqM;TzL%hD~PC9 zUpU2CC)Y`kgvtkLB_};$#t{J=Km;uvd1YM~*+$zGFk$Ca|EVWXldVx%(Hk&oI;O_L4 zJcT=?sX~Hxn#zZr&vzJAQXJR1*r9d*1Bb2XF)NK{-F+6i@fm@D_bK#ThQ=SF(E|h(;hY!l z`=GrQfMU0vaWZ1@{(R*LP#_z`D0G~EJYIY~TYsz;e%7iF@+&V}c%b1#$!vDp!U}dY z$2qGHwem|T`<%Ia-CyFo#w%o?5CWsrinAEIbCNOPT*lGTJ#i19M0X1u9D*K&^doI& znoY{KY3Fh)fo2v+ zYqj`CHu3kvP?`)%#OwKFd$)m`LnGtEAd(8p0SKBr@qdi!5o)N?#p;*vLm} zyr7n^4s?Qe5R1wlOg|_hk|lwI$_DW)-o^*bXb2)E=?5Z~7|KCSltZ;Ea~G4mu5>W1 ze)KhvZY5Fe>PI&6sRke5!&#{(1{^9~HLo8WD@;Ye3d4d;OgV%T;{zNB;h?f1goDlv z5pfi_XlH!!oqRAjFp2o!N~UuSq7B(h2Zw?n1Db(uNNaP16tWAyXNf8hc%1)RY2o)G zWLWx~UtcKhZ`IC@+E?ct&K0IJtz4naF+IqTE7!1mMWn(|Ektk_;e!(ozyariBq{gr zAm;H7Xbs`IPPH~acDFwbcBxSMbWdYCv5z-9pKo@*+)6fcr6q@$)};BQyz}v4_tWF< zm;1fB!TFXna+Jtk-K99)X{ZO!_ zsT99g3O~!)e`GHIak}%mwQ#fkb+C)mw0loQCFeRhy(RKP`qA|e5^`f^yIqthgv)HI za+uBsA3boCFhqoW822wU#Th9F7j|%b(7iuChC)IU9OHb@SVW8u1_~{jQYKKpN=eQU= zlDI@XFHm;C&1nfZUuFaB0u`de-3_tN~w@R$mAU#qL%yKr*M=IXdo&S4wO`tXckeYuN|s;hko^6 zYeYELJ~h`qw$?v(H$S5s9=1N+Z+^VPm~4N!mzt&d`)!(GF1B~(`&(bAHG16o@Vx!; zg_k*neYAFaeSDuyh|6E%A|4K6NF!FMgL0(~exh>~3=*OYjW=o8S{dMF&@~H(g;WI+ zLIag(1~d3@yz+X!4Ih5gYEnKSfy6*RI6ia~5vT|XArKO!!sE#nJtck~E`8sJ35!o8 zH&55#%uf!9@6O#xIcS846Zucvo$dFiOXm-3wP)86?pFB#9OPa+7p|0Ah{i3>1DTnZJTt$EgePNUj;!~g;BJm($2Ne&ZAL`v{W@!jsU@m%qLl7=) zp(A_{IF#EkK}QbZ`wZ);>4(x2mTn6(S7TY(JOl5n%ue^diU=T8uhza~@kb8sc9} z;)9P*27KV!-^68gYq_v7S6Ta7TKigt5*uF|8(&)+U)r1W&;Q)t_|#+E!qNCjW%B*r z!tMS_Z*RG)eAxQ<1ROvF6L7J&NF$ft`9YtavBX!Q1Ur}eFDCOL3QCOdL9!llDIV#P zd~(1+^7HK*+9ZSoPf zR`E}gNblM!SU6aAaM*ag-u-*}@So?K-;Y4N`R0FHlGNGiF|Nn)wGqin54KBX4BxH z>xKx=2hLWH@j;mY9}E#*IdGi~_f`~}28XkJT@6sEU{EaRcu{aQllk^>z6~Oj5~?CJ z7D2@Fq|{V{5~?EX@P$pZqB5+z4(q1g#Rp!V41^do{uMq@|3IY^u>$;pT#KF$Cr72d zt^C?TnwSI0%DI0l%>7@b<-fJ~)!tvaO=7 zk?(>IDWX=XU?B)M@hOam4`7656jv7o{?oe~<-N_y-ez@gqb|1I*j;UIE!DBjh5E*P zZDX#1aRzF0zPY#7-d=8O&UNHe)TalIP+{x8KX8b0>sxo3=b&-Kcy zoZxVs?SX(?e&G;-XQ6$krGP<jnN8=oIne}7#2@U->yd2i+6>hw94cqrr@B5Dsf8WN6>^A~DT zJgIyD1;6A&%(*Z=@V6!*kPV5V@j-$Q9uDqcMM!{f{tG@7n_qxva;=kW`zVLCloA@N)O5y4v2|Q*s6h#l5Y>)h!6tu# z4+@RRV-(m7L1EDhzZmdgT5xijmdMjO29sQ8ox-Qf%kutic4H;E{56SlL4ngUd5V9l zFaJ9oW$);lbh6(j4w<-ar&66vwyiz|$M@8s2q~d*!XL7L1L{qTQX=#(=tps#C?)7- zNiW1y56>l)=wZpL{`Ezd?q>u#P7m5NGdbLD$&qA74;q_nE!6RZQ`bRF^b3d0PJy)AR=3ujRxLYeyP2ujxF%CFB`1>hr(NDykmm?cE3)Dj7lJ^$} zH!jf`=L5Nhi|y~})9;dBaHpsg4xSILC|dhYLk9V%eTRN174C%(YmXE}?XPg0oU*8g z^X(VTXB991XZGa3Cw6|Gu0FvBG2EFYil$l#YSfE_K%dBB00fIaNFoENQLaVlR9ZHQ2x!XBCUjZZQFN%9705>f1xo49YszIvCsk(&e~_W)@inRlxZAiT8CMzsfZ9t6k9@uLK8bG z)YYJbIvR!roBWaw6N3-aT#PVwp)?p@nZ|@^>qAyhtzCBJR0IdNX+?W8tFD{amKrH6!X3sL(@`7h4~hupp-FzT z-Kn%|wQfVUd4s#iA1RMK-nivN2Ju4?4|6Xv;<>LrUGM+Bkobo*gi!)G@gRTb1cT%{ zWra%4Tj>Zv_EX{yjt}efufF3qT@bM*VG}H|_nO@MdAG&Y`^mVypQ}dgW(WpKYa`#tf{k`q_#$p5IK!2O z`?1JFyx`#t@du-XAj0_(CLZLmGI9vZ@j=dMQacSFG(PFaQio83TI5rRY`}~UN9pE4 zrs)vjDFGmCuB zJat)Asf8#s(&_r;MdjqMe7I94J_#aLK9yHK)>eLREdQ-Zh*FXlpK{Az@@w;jowcge zL7jK`iRXu2SV4_t-V+1wgE|Ug8q`d>(^xvTlois4HbR8nv&mg}oZO^9i>zpg>lCZ> zNAgAMU&ZO*_R+G3|G?f)wk~T8* zpw68AjmB-|=rlGr$Ki4uWB{qvUJnZ^&%oU!Wh{)jzi< zT=22lJwIv^Bch~%{3d5MC0jwBMUtBxN+qS*q56K)AOI(jgIM?;8`0|Hu zlt&YruhQg-R##|-jgN9VOyKbK$L_)_xz6p?*4}34Xs>&I+D|5LXh%aRqd-488eU

cGHI8$2B*Z~vsEET{U9<#bV1h+_P((n9t88JSUFV7z ztO$b-%yqwyj|4MikvatSK8LAwF8v$jS9sh~`4Tq1&fn_ZcO?z}Xof8gl|yt;YxoUx zk&<3>^>nt8N;h#IF3uVUyXB4L+{#>*3dyC9$;a$Y*ZZJKL*;^#$o{Nq;ID*<}+HbyG0#M9lz& zNxUre#!X+7>}w9Ce!%UoXx(i(Ir`+bo;&G@#;D)%C7i9eJ|Sp8e6O>=*5cX@y*!kg zFY3ity;PSDQJl3j6Sj<)^qt1|m%umrS95CPX!U_3U>yG9h$xNbu%pGFQQvGjTM}CA zQtUx}8aI6-u1H@>l7h&I{3E3VLgsRe{JvPcIX|myZKT#$Qrl~#)o+c}FP*h7eT*dL z#3_q;RD>99wF5}Z=sT^;PL5BAA-><-RP255jlZIzlrCO$^pU~nd+{Q;waV$ssL?}twD*Cd_&H6yCdGB9GqZ) z2I^ess1jl#99VoeAF@T=(Tu_eDOhqg2@bHmD9}IGM_U}{gLnkAkfMK- znjF%slXrj;bU3`aYMhY_J3RG6Z0fr~uIzt+w5OcuchzVrIJd3aF3&W@Xxr!)k9IzN1* z_KETaswEG%ZVonYx0X9wOI@+Weq@8KMf%&^N!9o2-QL=bAY!ve1DVS+N?~8~rKdvW zQTZS$qR|Es9HlfwXkF7Vz!;<<3=}awKnaC|5kn?=&&6~y=nK{mMh{>3F^E@$lnD7? zHIp)l!@+}Mc$lIpXEV>GEes7|>QF-4K1i*UGfP+yaL_rUWjUwuAt~a)TTDVseB@=7 z1gZ+cc}Rotx$(h1Qr};{fkg`i*b6$*Kh$u+a8y>%E|Pcx4nj`yn1l+^5)?e_td}+y zD(l}WP=a!)#oxqCMJN-NKb9!7T>6;ftk%l6%<9th?r!@0yqcyalX7c;LmLmme23wL z#ft0t;-$g_AjF6v`<;d-97U{1fP(7Hi;LRjWgYQT$fFi$^YS3^I=z3ELyis(6Zv4Q z80Ui|Ac!Flb! zeDy|D#PYM;wXrCTCdi2Z2Wg?d-s3#DT)2~em7Il>4K6Gx!AbuQcKSj5i`qT1geVRF zA4eFoTK!qA{81_YESG+i3O|ea9|g*#az8Su=QK{)~FVmE6@yk>pg6D zy|ca6++ME}$2dMDCnfr!P`wvN!-XpyAKJM_pCguX%5oA7#ut?#HLb-e=Cz3d8z`5p{YmyK-5pU#!BZjM@q&vOKNJ>d$L;Mu-l2nm#nzdA*&Fq81 zLHfTnoM$mzKe+FaO${Fy$whJumiANPu&j6Gz~(Ba%(Q4*JIj9nzx*d-bEk`srDH2nRwC607V`aV;(88vR^-@ODJJmk+w%C-On{ zgMBp4hnRja3GrS&ICn!3;f#sE;flRq>oU>97s`il1P&AVkjNG=6AwCZyH)y9PP1wH z!8j0`gzv{bdKVueQ6b$j19Pe)ib!}Np;{tJI9{B#$sLd}q+*J4h0T?`+SXcrdn32E zEd`b*M_i*Ll@e@QDT<)FQme1&VOtUN;=w5gfd=O_YGTAE-)_EP8W+9E%?olekqsh8 zkla~CE^y%mHal1-({gH(gqXOfeDEa@Qfuu>qiN&Tke<4PBh&;ME}_YN7Ie8BIu~-< zViu>a!`~i*nSH-xom-xovCVg(@*lZ#5P%S-kTc6ZE zIsITd$7L?${HBCf8q#VKy(9;^@kHv>NV`t6?m9dc;G^(O=BgQNkq)|{ae6}1 zl-kQW2ue^K6*KMheNOm)63M>cwrb7m5DW$ZM63uc0W4?YAT zDGzcG7->-HF(oY@B{zz(<+&A0uFT^ccVsM@DHeE)@qv?`_$)Xn>FWWf;W+wzdEBFc zjMzyZGH?(EhHy`xcsHEK48oEi!u=zGgFoLbd~ip+`JY|RaZ@Zxi+k7KzS3>c$hyo) zRd?P>`(4w6PL5a9J9_^$7w$;G11+qbm8#@7Z>ge$9{HM_+0-nj7Y_o5W~d+Dq9Xn( zKIn}(#2gF}sx*uYObiYa`Je~4WB!FgAmjsc)epvp7$U~ua9QsS`H(4InYmns>W3H~ z?B`yypOePP@dx9;w25tSeIb!AB@1PYiOy5SO8BUsGO~`z%@J86B$}mDR3ZjLD~WuR2zDFOL5_q12?Vw89uP( z)ud69gd^OIu~I})@3UDV2+)Sn^Ffgkt^0_jal^=96Skqc`QaXCZl(}G4{(B8f)!>{ z_+Z}yYTCWWX8S>$g4!c>O+*Z+jpTSmx%ebjVp04Bd8c*D_D4L(mAmc=Jl*L!N~=lC zU5~{J^u6a29K?eht!g%~Qp=yz_AMQ>!vo%#>qHk__AL+q1!O3SjT=m`Q0o_)K4B5( zL!(W6QnBzBI1prj4~kd4K8n}KhWyzi=L+p+i3|sqrn^$$f<2kWxDpZvhfWL8r6&t z!QU!IlqxpoKMKUy`wf~^Qqs_P#Hf8Dgu+DPjnP%S8qyRpM11f}aAeS3FLzq;zZ`rh zqy?9EgA1n?hNE*dw0)eCL29QF1~s|GlF{8RL13Yyt*Rvyi8BHp$YwfIL}+l5G2}yR z<4=5yp+MzCe2lKBfCJO`AlF-zz|pJ;U40e)MA#BZEzp20H4Z zt$+yeAXFIWEn2{VYp_&T=!zRH0h%sVF<}caO#Zf5OBWj%{(mZ8OBL!EkLf~_m2A0{ zRae46z{7#LMjj%cLc{|ROSMWfUv1@UO|=45Qjv|X$VZKE<3}n&_db;H7;*17K9m}D zg@BC)2U82NiDnN2m*2O)mX_CM$^=Eh6h0^rCX8Jvfb{W3VvKgT1(8O|pN8HNXH87MF;Xx~~ta(r9Nov?7R2+dkHo32%qZ#e0(;6S0c(ENphBJ?jN z0%68t91GelD%LQ{6-OxnX6pxaD5fz%zd=7W@euN11{p>_8rDT9C`>bij)u+u1RqAR zU=hp7aENXHUp|H$2-SiyA)E*wNM$M?7$YDU4aO%!KB#*FE(8(4L2%*u;E0ha)o1Ww zhzlu{`m=nfJC{L)foOvvB9s8piuoW++G>Ufoy4%5`q&M1_#$0 zUu46d-~(i!*=*Erb_fdB<3$vUm?uN6D<`#Ggo|HBCa3uw)8HUHiE5t&2vhi=o<*o1LOz6w!sdn!jZenn52Fz9naxNu zNUk%oAs;075+x8zrna$vhwQcYK&Q5EU_WB_bz65b*~6(BhUHXS^(M=(Rm194G_` zVvY<32PYmT^T83oh#?b=MTDah8{s?ZTrg@n!$wJ2wpzJX2lLSeS5WPXtF^@1RavW+ zu;9Zqoe<;0G%V=4{}LB}@M>WSAH?<0o^k{th8NuUxiKM}*lQE>7t!`N4h$!umtmQU zSp31@pc6xc!GVeKfk_Mt_V~Yr4=^E?tuRWcLKsSjfDa>NaQ$9baZUu9p%KC%AO`DV zkU`BL5Tiu6Zu)^JggF;6ha&c92qnpYGhzCkqo-%{R`tmga{2p zcpCf*d_ad79Mpq|_@I<<;z23lxDoDgoog^5z8=EC<^d%n*UA5WoUU+iki4h56$*!t z4}tn@Kek%w5JwNnuxD14Bi z&{Pi07$@*SPF_qk&0^%LjSvyj;tv{#bA0d#h?u|N;V>!{lngOG%zy(Q`N%_rg-G+O z@|Q}6m}YQ30`UYf|7k!EQx0A~IK5$i5+{QLLZBt&Kk7N4S&V2jF(4BQH8AlJ37^x9 z9;Kh8#xw0@#f9es9Uj!EeTq0Rj66gua_BoMo`e!c046++=mA$+Ihzb4p9b;C(7(`~ zZczyIgJm#n@7pAX1s?4q_+S#^{d^ee3XsB|U)05{p4iGl?j zWD=Gn*n21W$Zn^OxjC=u#}8F0|lCmI7jOhiOX zKNufk3c)Bb!If~S&rmk#{1<%iK>!bg34CxV409`Nv%i%OdYPtNW)XFA7{NmD-*t?d z$OkeSBZvqo5e6-l7;Y5tU?g$xJN+OXCOsUO%-};)^@iM)qY^?9Ve`L)4{Yk$e6THV z+utMvAK9YL?a|;6<~QH0A4d4VUjc-1KF~bM+n9bpNL=L0UQXnT7X^_J3J2u_h%ieQ zi{4z(LKr;s^LNG{&<~u(QsXq1&244mh!%aQxT{Oe*GEb6LM%A0WpEJx0zTxL>3r*r zh(JZSL&> zkxGfE2~1D;;K*PjMUq}W3~$+!|7`=4kPik2f|jA22--q0pOlIz z866yCBy?GnPO~<8{qQgILA;M*3sY`Tx##)d5D~f<%<(cp{b1rj{!dgA_id$t@PAB%r)zBHT^*{f)U8(W4J1!Usy1eFnk(c5Bce zdM*CvZ25`1KK2hYEU+H6g0!BHzVjRFrFOqW$+RjtA~2n{(-8!c4<7Tm6Hp4XasJ(e52E1^I@Hk`^uT3xj;f1z`& zk}@)c9ALBCz7E%QF@^&8fU^)T+TX?}5gZ&PV8T?RipiOv05~uvidF17Uo!&>W?&(( zs-uLaFr*YJM1($uqXs30d??o^@S)K2i12)nk}9*mk`IyVp#+F=z&2xS{XKl}8-EA~ zvIe2HpE94p2V+7wnF0sT3WEca=uOcH<5p;kR1hFIMCe53Z{!1!$)?zQ`4H*{o5zI2 z$X*`hgK|JYVRM56dVq~T^5+gU=99Th*=gmOwjL?9lR+m5zy zB@2PK1!CLTilFfJZcR3ITj6il0k8(bZM7z*ss+ytA1e;dwYi1-tH zaKzB-HAnP=_aj`CQUjG|+`=gtjc&*X^D-{e1uVt~L&R)8jH`veiVp!ChI|mc;q=3V zQB_1yG=mwhvGEN8HVK7tzaf0n5c>v<%4<|5~FZrkPt**;&FH@)RV=Ankow2{z?X$ zkGmE65#uaAn2InyRO;{LgB=u#O@b2ydXB)46S-kQ2K_XAw7E{~r+->J;k|tLB@kjS z7#{)xOb`#jr*S?g8Q#qYi#$Z0#V_@P1{^{y@kTy``#RjwwyD|L@L$OX#e(vI z(d!2{F}H#VVM%+``UjH@27wuT2;pF|frHnCNB1uwakx;saRF?f-ucACNRLGU%i7A(RT^ zE2E!!KDf{&(MypHX7BJX{xv?R_W>Vf$OijJxArsw#5fQ_gO6f7u}A%@*b6*{njxG| z5)Yy%912W1G%xBc#R6W2@d4-JqS0jp6sMU2B~l89s3vNJ60s^u%Xo&MV9@|#WYCou z9}GA?3hGXbhJ0}UIqEi@<%WUBX(}Qfc8CHI9uYyTGLdg!5J1&}Q9@V7$uOM{5`s|g zLp%!;4@4#_a-_>lwS;29MoVGn#LgYZCjRWeFkdf~KiEY5@F)17LSkG+Owmu)ji}ooKeIZ;iK8zzm_q%O#+x0eK3y0fW=iz1_|8&Mm_=h?-8fe(Tm_XWM_))-z zlXMEm6mC>vOydJ^K!=2g z2%QV&#tpx&$<1;-h(C<+!4UEPM?L_DU-Chtl!gef5Jp0j4}4ng4E`!Ucy74lh3RkDkad6%oV2tj8$k5K0KlEhm_D*1bn#4l&`ZD zb2O9~SJ~Qit}X^Ba&-$_7+W;#AfwX{`Y6B|4Nb(t6#A*uJh*BH;NYy5D%Uj2De8ja zD4J4oAws$1t`a3*57EMewgS}Se!v8(?)*!`AEMlJR2UKaR~p` zG=nbMqiF-3#FRt$$P4Ntj4J_F*ze=(u|>~>P)@XN;x0zJ?^B$-*1i!N$!-ir#6wep zjiw*MiEaGY!~{dQu5hrGRJ|Wch{O9ojAcXpXMJ;^0F+5UC9dzI>3 zrkYo2Wrd@}I3GYnq#0}i0hA27!lSK(d_Xg(esC#HDWOC;M({a`dG;)8Ss z_E=!v8RQWdk=aKgCjxB{@!=-ZqDso!4E>O4-e4gXbZ#-4g5k!W3K=$Hp3hKS&=LQ@)yk0BqF z5<04Xp+1GdAzf-`^4(l%=TprEiV zPi(>FZ=BfHe&fb}<7@V{h6MwK`WF!*r0#kMhkElgTQ&<>VQ`2|VuHaQl@AjrF_xiF z@nFFR_h<#x$_EXKj8XkA=2u9Izd&q6Qc!!$~Sh!`h>u1w>D^k;S!HZ)*`(+8L? zGU3z`=Jw;TQgOJcM-OEP7Azdm>6FZ(qYz=jF{n(CZytxhjFlOD(ET35!4n{o4Hi|< z$uvHI2>3vd!6XC|d-TDF#vtd!!(Xl+{&hagkP5cP!-)!vaAo#fyU`jY!mxuwgx3#e z^`@G^fl0`RSkoAzgaKj3WGEOo81DFB;Yaf$MVb6!_O!fS#y1i3b zUn{MwpWG|8Fc|^FlWsC_LnT#z$iI5KljiH3llUaH=tJF>v+Q^1Ps&jl& z+1btR9~3Vxo0;qlasp`)i46x6`$IP|zavCMI5%8O;{#VW8Y03;xqS;9^kPRQ0UxLz z@Q9ek2h|3AO30$vZVxT`L;HsDNsI<6V{D9l`b$2r>qq2)9;O%_HFr6Y56Xf6L-?Q> ziZ{vzdrjkzw#VO#3;V;^L}dn}^5Lr43-yCiLiu3&VVn=j3QvhieIcL(?I5%x#0)-! zAB}QgcurH-BU}vmfD7ZkU&I5kN^>_tK7a`9ELqw=$uBSE=D%g<=Q3++1%$+P@+Oh# z6OIrGVSKQ1N_UbgghMQmp=8h(n46>Fy$sD|knt>Zv-z9zi^k?wdU@q?dpm!5)z0Sb zSd@AyJIOT)hfq$0Oo+`55tAqG?$?kEHs>Fyl;dtDFDIIiVaNyF-Q$#a3m>drD~1Ro zfH5JQ*rz)AB_GE1LrgX(92m#>K$g?^kZ$%dlMuQt;=xrrq$y^Yt{-?Oj4j0G7Gwz5 zRW|&8#Jz`d8(Fq(`)&8MsggtoZ|N;OKoTAxy!Rj}sk-`{cR%%wwO1mU1WA>uyHDJR z9T`ag1Rz%Jk#o&u(mv{D|Cjl&wYB-t-rYZ=7WBniZ(_d#bL2q8Eol*0VUia%DBy!O zb!vQJNr;lf7ck+iesD?%dt%Iggb(6FZW5Xx!p==V14WQxK|M)5ck%&y;kMLEWE%lqO~j0oj}$z#3?1by%=ALMZ|9XOda@hY``(&7W!K*YnS zP?;7>Gj8k{3Eu|;xy#FXzQCZCEn+QnW!N80nLDf*j5f>{X`11UY;^9vkBnDf}~Df zCRgaVX*u#`tbJ{tI1GeZeKrENyW#0YGZ?J- z-5t+<$IZ>SgY9@?c2`*67Nb}wg2)SBQm-5G|i_q!_&4E_?z#+dm z%9gLoBSn%FjcKtuzt1o3@7EXSy-=`1bdyXC5H0LV2`(N6dTS&(HoLM~9af>;Wm#|_ z4a$wxOs0b>N=CgLYQa^rZt(Q{{D1V)e2YcAqI!xU0-(&tBX0y&hs`ysce zj$IC~&iXeO+%9lduRk~2Puz4;?jdD-e!H2`s>5{5RWN&5hR?8+C;A#a$|DT`)%%fI{7t~c+O_N<@3K)>;G(Y{#kCV%N>)IkoN6S zxj$6NQR;FcD8PBB_7*I$AjnsCr}iqerys1jd0dXRNZ`Z{3Vv|HugJ#VbXw^vB>}nt z!hLhhx@$~Ww-0izPs!t^SBqcKgegaR*`Ejv?%I>v)|i!TjdSfuwmD6=Mx1Bbf(Vrg zax^i;%a11302efAvY7c`Ei?ZiwK>{A*g8=f=+U}@8$Q?^_~=BrVLxm0;pOB9d;kTC z2n!A!8xg{XT2uH?L_ADV+3Cq?C+Keq9CmsKyNzI=a}%HC@~d27vdxFG%#Vv7$gGhxOADW%4c1xzTa79TcJ6ntXIA8niTPj%An}4x4w#k}AAkdtFz$rB45dLG#RdqkPHH`V z-HI}5g8~VQ$}C@8+}$t1!pU*t^rR`KM`2Q|O+-8Z1an_S>!^G%$3m+oT7qU$!j^g; z0>FXN!=zH16PV~K)#lbBbJ|=~nlljLH2)b?Mcg=mK%=Mj-P1c0i4Hg`6gkgndM&zhHApO4T z2h|?d-Lyr4$XGm6cOisP7TJ8@qpjidI*`Gxd^UH`w)BJHgBCLYG-@Fr19!oL9>f&I z+A5P<++5Gkj;E)G^ONZ8I5IssoSdIdcrcls79Uo{=1louG=mI+U`ZT-u4pgH?WJf8 zUF~JDvw#WK>Q3zo^s5ty!L{3PL*99_^bvbI|M7`aau`0`H>WI1Jk&)$+_h%t0c$w| zgsrebeKI9q`R>mxWLvxa9jBP}K0ytpRMA^9D!43dkx2RYxPE=p&fX2-hHQ31h9B}lZn;<$7V$Os z6+udMUMW8o^N)AAc`h?WhfvE=D9_9FWx2knH0G)zBxfOpgE1nEoG7<@HXnGa?Ctpc zxP2ZSoP|dh(aFu}G##Jk?^dPi5+g!-51G=;LSvivQ8;YfXG)`kjk&&ghyeo!Y{|FEL0t&B*+IQ8j6%kg#*}s8 zquvt@rBlFkXyxp*bA8JFI zIWM=l_4X3a;l8wrCnsm8794~RhrQ#Y-tln3LKgbQ0vkb5|$^4`GDbI;E=41WYx#12csXF6BA!d#FQ}i>E@O< z*2!kPQkB&AL>=WgK9iDKq?Mc24H>_X$KtG88fJ=Wj z&naRaPn_TMQQRzC~$mOLLLgEFJ%KVTI1RW3XLE$ig417=CiZ84e{lj1{8f+bg zTc;;&-XB?#xgAhC&fbR)whMt#DL1S)TUbI2dcK+?UQ?+(C2uCzr(@u79-b;9V&~Ir zT6U{UHEEA*{b14};$|ortn_Hxy2BLjuLKa;pm(u+A<58su>eG@1_4WZ#uwnLl2fMm z1btPmK340`wfegLuzqN+A6mTF?2)BTTrCO-|0b zCuhAxYFVs)FEo)5mM!_}TEFoJwjNYC=-Z5F?8j{~I8W?n zIkEB)1goUWIaKcJ+?! zFPRJVAB-ab9BwJ>EJ_5C&CQa@@#RJT7zOd<`6f=Auwt;yGNLkE5YM z#6LPeS|x6tOU3^wRlb+&U&{}VtWxc%RD0%FY*@Fh*vx&JmLOz(T>uWb<<-^r7a>Ci7~rTU|CkSUX4Bz6@D7sYH2NWx8=YTv z1O5m25I*Qd4u%nT_cUzj2lznogCsueY{(WL4N;_%J&SPA(#|_~lfr43Z%DoUfOJEzMcAvBJaTicj50?slHe^8U*bK@F4; z2#AMCqC}SPq5r}MF7BYFDSsB284Oi@l&D3USLb8C#?9q4lYG3*J{9ku$fQ)uUjap} zCVnX?*Uio~QO`z5Z1F(^4E7D_@`IAEvse48g9UYxCeBN5X+m0ChaOmxygu2&P!)%Ur&atf@vWSN*{jgz< zV!{3fA zIFCG7AKX=-mV_0to8j?cE97g0JZ%vVf$rt$IG27d7QUBCUyG%6v9chFmtFrbLOB2j z<%1xiZR)QKAMBDXd1Y-;EIp-@%ZuabQE+tV9~}p#7vaU_(FnJbtF~l0TT=;D3>L)s zd|0A7D%Iyo<$J04TQ2jMxLI6ZQu_6%o34bR2Lwng_QVSnIAD;nZK=>`jgbKy`JUO_ zI3)XmWNqK0jgu4nz?}x);Y3W{h&FW$FG~we}rEXdx5tPvE0b4{)$n zXMjxNpDBy?C(N%c$W@&j_eDJTrcvMg{O~Ci|6VHpE}Egi1$|g(jKG3Wg4H6?Fyt;3 zL>ZRt@bF9=kxZ{H&SyuF=}~xo7+ggCi_^$c?E3k(yk?IBgnVO(mdH1R19!FMeeF>e zxzx&IIzPWok1i7Z%VaNKTPPgv9YKLJK-fOVPq3G%QczbxAIv>WRXo^ykjI@b+nggg zy45S9yJG8gJe+D8S2jR;#d*e7rGgT3qZ#NP7u zk6SO`N1FpS7VMK(WUy-+{v|%hUbgvg+@frf;wZ#}i7)!c!48U@6oAbKdA;zVEx|>l zYsD8EK3D={^Gzh!DYf7OOaLz|Lu?xy-5rplHXqmh3c13=ypk!h4KM`A&8LG zXT5`n4IlUwyhpwyRzuL;3b}hk7SUk)@_dlZtcu0w61j}ZLh>3Io)k!I?*Fcc5K1&h zR{E9lEi-plw1lbWEEHFn^z!m_avT~S1;)qz`FU_jE`x$|Y(uWvk}Wk@Kr>YE?TafU z2cMfxtdlqEo3qvBvEYT$0tyU7v~%T^am`{=SmFVHV8aJhBX9{HfNOEU(~Sg%BpfB@ z7#;x!jwg}fS!9U8LSaViY?iuOr4ozV{E7>G8PUS~lZXTnMnZhVhr813CO*Q7B(jKj zXAuwkdL6rZqCToveFO{e0dEIH6zbE$!-RWM$Svv^01GslbIDLtpY99aZ*G=nCv)I% z99ka*AEUnYY4}_0>icc+tIz;#V1RH}T|Rn|lkiTD_`aC$O4j*bq46E4l{zNmjfMIwkORliA4-auQy|&YolE>&uhb`LV#^Wrat4le5SiX>oHi z&E73QP^Cetab;o|aU|aIfsZD3NZn4(E(YOHFXS7Ay^FACc^FuppR96sU+>CGF6H-) zfwk)6`}$bO&@zwI$CArH3>2R-xu>&J#KY_;xB?5ufv-{T<4N#a?Be&k!awh;U-PwZ z`Re!k%C}tcDSf|6-Y#Imd8~hO)j7HBp5F|vlapj_nJupGsh%}(__KV_Ktm|uSQ|b} zGc{df(v*q~Sc#+XG3gkxD@#g=$LLZBaPAZ(rdr}*Gx zg7f@8%Lj_9l@Am@zz6SsJ9IFL?2jVu-l4xkiZGcP5#uW#?CrmSL*M3uz+tltF*;&e z7R4eUJt_`oUW`OS@{7v>p9LKJf%55j8#oYg<5h}yD8J@|$)7q{q27g+TD`B#5}6_P zVTWhYQGIMxe8_6OSVxo*q?Uuw5 zw|9&9&GhU@^uv+Q@F6sh-Hg#xge|o?^8PCn!Ttd!m3DOtAQX7sQ`Mals#ouZLGqU1x4rvH0>ZJRz(& z2|u5Oe>)C*k9wauUY-1&O#PP1J*97-S&8iGIypPL9v)xxA}5XLY4iB9jdDOj0FB$y zbH4WcXZgT>*X87cUi1Nl@&RX3`H-%To>5?qLc z*ye-fLpq%YXK=BF1^dT;jSmpO4mX@M;M@j+K5qZ`af0Zb%}^;q}sl>1#!?u{R+}aY{`WyHcD7zO8!_ zIaq`brdSYUF>p>Y8L7;aB*FvQ*z|5}x;BU`Wpd!s=hBy zMq)vDr^nv)X>fUUGD;^Vh5U21@{eNSYdST%yy%`Bx1ymYXo>{;hzCr+uzMbI$%^`C zXGe=f?D6)Fbigz9%pd~kA{&;Z337tK$5eh&mqq;s8~@O~JoYcHPabp0--`FY^I1eL zsnjGM8=apIj*kRED3e24Ua&LeDA8s218TZ;4;RD>&7X%CJ`m>8^FffV)m&f66r;&di z2EGSfPZ968lgRgzVi{GE{IF-wj2&hsT5C^L`?` z$mG|UleiPNwfSvr#>&+QVJtW(AN~*#R&K(m2&aTlIBXCh`r+T<1C^74i29JUg=Q{n zI4BNF(edBs!$$U77kBH&a#Owu$0GGn zMn6Q_sviP-13?5k&ev7_K$y>K(gh3$kr1*h6byJgRp_P)opiCsQZl6T{q+5S-y$t? z6Ca-AA5yvPtGo8g`#a^ZADuj<6vyi9K#Ejan(Qzc5Cb?pehMFYR+DltJIQd`&9a>^xy3R2f=qinq z#fuPM%#J*(BQIC>;q}QR8+*zno+ywwinLk$p*X)jpIx3UE>2ch2W(sa{?NO_8U^-c zJM&aJv&xq#LdLO|G7k_Tu@CiNI3&b4h@!j2Nnm{9U7q+SAJ3wzh;JDV$5n4s~P+gLAhz8~X*A(Rd zA40BW$h{!MAYQn-nrCv^OCucTtK z|7+yugxX3KKqT9cj$eWj~I7NIeW# z3I~lW63wBChg8S96StEOcEaKh`S1t%&5{gckd4W>7zR{siX8qlAJ7axm)ZOQALI+b zhx`!bAC0s<2X)^;$GhA2?G0Y|5Fb-S$7|C0E{G6HSRz5f2MRA!w}a&ED0MeZ-%m1k zQu2f_U?CVz&wG(*+v}_D?-yAfcP$ubU0uu*@fF!~HaPE@Du|^?LQ`TQLLpTq=BRSQ zm>BpNn?fwnEvE9p@PRe~aK}6L`m0BE- z;6j!iU#MLv(n8fmbqF8Wp2@h_fXIH}z?B{TU<}63zsAm=$SH*V&68-45{3MoC<@x5 z?{0siMVg94lz-4Y@_rcypq6(=CL$hNO1go!t+V?iOKZbHf(yfkBk$;#_`)|PPeFY< zAreVXUWS8;5Rz1BIpgu~13l@CPHVQ$6gAYJ} z<$OfX^>Kkiy|4Pg=7S)jJ>2HQMnc$#csH!DA7m^HD_MH5mp)=0OHDa3$8@=st~4{% z*1LcWKo(V+@82;2vsS&}nI!$C)bq*9~nwqOS{ujl9CiNrhaUO-W%uo&kn^Q~M1l+HESr%Q!B#!-&CcBG6YreT&E)mGm|yc^N;2f? zFD8j;sH{D+Pp=d6qvHuz>VSJ8hzO898D|MG8jq5T>TtzjU}OhXUV2MJ)6hVE3%N8(lk>Zrz`z*rOV&vw$_(M=?2hP$I^@F z@gxwK1$~cx4`Ib`zP;bvJCClNmG@v33NFttXE&*-2_>F$)yHggm95RSlu@^^>~4Fh z+hAf1(BWKG08%wvGEZqdBIVPBJZ8MZyGQc^tHhd51P(8J(3VUGCA34+c_fKx$p&LP z=s8DCTYv~zrXCVjB-`XE`tPoIv&ZAHwyc|0&tA#_8xEYjkO3?>mhizKLQ(MU@f~#h zBoMY<_J$8A7B1P!2ZXxO4-YhNdiVRX4!S;SAtYBgMzyB13$!|oRJ1!?oOjMnrLW=q zxWlDW>UKgyLSR%bmrMWN*t0uyed+Ih>h6ASec5@~bJbnJM)AR-T^Ob%T2VL{ zJ{T;hI1!cdvigK7!UqQ;lKFNr-$~vP!q5}M|I))yu z12Th2?ETEFgitq#5O2XulF?Y>zPaH8WDq7yEJ|$nK$i37Vs;XmiZXGp4n5CYc0+q> zJ_t&LJW|3$;7}mTS<_uCoFV;slkWKg^}x;~_<0@PTON8AXVDo&Q@6KhfQ58e7(NKK z72wZ0=I>Ow;>pQy?J_J2R(g!B+A)TWD zlBy)(L$TYZvvVCF ztFYM@xrez3ep7)!57Amh8h%x51d<%i1tuENNn;WKFG@iMCsa4 z`S1b(<3OrTu;LDr(9|$Q>l|z>=!sN5n9Qb=4^RR`2prnocM)Op!HEbZK(g4dm&F9f zTx%u9yFl1_@PCgF1Q&!J?8?+#-^mA}W)>;}34uaJ5?o)A)LWdKbPo?(;lKl<=X^vW z`w#wu7Hyv1y@q?Y>E3C%zO-GRI|sjZ_dj*_e(ikvdwusy!|iK_kNaozI%TGb{480V zq>95#30G763YK^mVqeHNd07@BKIX%H4+RoWk1wu9htYn(+kX6d?-r)=@0n8=jJzj z5Ua%EgH$Kt{HN|`H;EM{VaU6N65-I~^b~nEGZjbH{Vw??nZo$$W_)}!4*SQVG~CZH z0XWFod-m=>`CW^EcY1V0UH5{TgiK+PtxRza^+KGf4lxWIq@Vb!~+wk-244taEN5R zO0M|74mc#((f9p~;e$bhq&zgf_yHeOY-}Wigczb9?t6*bempZI#5g-2MZ$f*rzc_2 z!36OjAqIRnm{Jr0B2GiY^TU2NxnL*+b|o7VyN{_dIF?uq&RQVlgH}XHF-Ca|AGq8n zu9xSBbKwJ4p6ijA-A}B&zm(4s{lK6J`jtdK2p^=lLay=4TPwX>~!CkV=E&=p%e!gNMVS zrU>7(B2n6<*&xNqO>#uRL&!f2kWz6i{ClDtxZ)!rJbRA>1=xv^(CqSRjd-9eAzNDH zsxv$MPnSk4h(V-~ykDJOkNuIM+cR;yR<7M=;2`VESMR~MXyBh`C%@m`{}Y)(wWL%| zRu{6h%|-tw`JgD^2o_k`%7pcTi1+yL0tBNOUVc3HgWJ_sEC4L-y> z0tcH9PB{EoJ~-8a3JK>U*7;W8@JBy>#0OLS@Jc_3|FGNje;FZ(13P2(zR6__eEVG) zDrFP`9N7DNXYa55{l5+m{yIJQ+hqTj?*1GD)= z7QY>(^F!dUQ4SUyUaBUf4$~~lfi#{Zuh(G2#3L{ML%tu+brZQ^A~Ti#;u8~PFgbIt zCshagtKj~ejVa|2q%(ZO!$6-(X^Nc)7C;2^Hnk>-?d;?Oj=e1MjFk~CC|MG!ET~ST zJc(>0DGc5`e6SAgJ^6P85dtFb<4!9%` zLyuw4(r?21^Fi`Wn4`s(FHSKe;lr_OMG8atKmvskPm+X%IW5uA#%&~Jjc!fNoD$O1mNch{c%mEXHOimdSx zlbPqt{Ue3Yz(MwAZOmeCrb}bug;Z&tE-mA?i=*pN;Be^mPd)CXYxfB~{fL|xCcHW}sfUz$ zX=7C4Ra&_W#R5;+%V0$GgEP3`$1OgHgmCm@{y}8X8pPW}*1yk(EfoB3@j-Kx+6nH6 zFVGM0!M)QFK5((ynQ-F!GVpxv?0;(R{Jp+W2;Kd^_xArf-v7&F|G(yY|7WuQw}Cuj z*1<1*&*u?3KkS`do<7H}z9kbl*pI2)98Z${lI3FLa2`ZF*jR9$I1s^a$ugACTq;NO z0~Q2H6UdWD&#rDJM@MAbNYTpmI=HuF1@>m4y{SROgt}|>BDi)FFVKTP?4XFC1V~nG zB)R^oS)v?<*;ZytK&=P*X60iBA+Il7rySettfQhQ_#;z(His-m`D#*}MB)`G9zcgq|)h zzQ&W!xxzQu(Ndfse87JotPt@~oFod(iq?!~=ia7}%j5BH1LA@VeU`R@E@s&W$K?EZBu z>AZcZF6P2|6q>RZZmt*1!A_(msT`@&vAx-ikg(zlOF7txF!@c*Y$_jYmr{ZY6JNlG z1D#y!7G}`rhx)L9;8O(c77FdhK8pk1;xu(g~g1t$3wm1whLH#!p z{iO5wACk(7(twZFS`O+z;RB7Fq#j6Ngt!XJW?y?hFNF_Sl!gz~Mv}9D4+JdOo_7r* z7m5c)*NK5-IS*z62jRnnx+yBClZi)dNt48@n3B;wG@}If8-|tG_q`8s`N##Rj!uCuRN5DahqryC%U&4p;X|{3rPUBHr*JReVSl8p->X z@_|AE>K}+Be7_8WpMk^J`)MF>_|+H=pK80mR#|($){zr9ko&*(_J8T`{e7_a_uj#; z1NY~V=gS!Jz@j1u`+_100u%ZW!+|a(!Iu}LKgfg35y0s@T0-Q6Y6&T*FooGKM@OQQ zRHm$aUo-T9<14MA&QiI zW9-&=9C^P~31O80DI6*yZ)BOeP_43%`f!TFQ;eINoshcaLcHOFZ%4dPn-6!zg_93t z6$$?n*%AMIlh&EQ2XdXj^CT?uSQxHC1&E|jP2L0o@L3d%IQaltZ9X7xh%Cf~6FzMG zhug>F<3+$XqD=83HqPXyCv%kZWJYlvQhGwUZAZ!l@sAk`D@pkNEIn zN4%spDt?M_VLJEwollrHc%&?ns5w?EwA2Kmc(d zcia9?$_EO2x}GmhG89Zype@Mneb6N@b2VIh4JsTCcAB2uJ_$|l?wAS$LTtwpL|}6I z4|?=ZQH@C|0Z;9J_$;p1c5`|6lNy=LkWn1KZ*Z< zU7yUL9~M{FGXiaj0I?`Zbo#$6d|cwctO7gh;O?V168MB3nV7`HMe4oT&UY1}_!0dh zsWOeI#yBmE#6C64NC>F`Lu_C_vQ^Kb6GRbwAh_^;!csu4OX0!g8J(U_svjg9LS;p> zPlzD_OKCyigE#0ukg&`1W$OOSn=B|&N@do$0!R^Iz}Mh}-OZFX>H74nqts#rCzt2q zY%s=+U7sz}K{la+2(2!KM%A@UQAo~$q2A%)_^3530@Y=x4&#`FhH<^i%hlg z5e;l1v3>GKhNKM;C5H9u73rU{^97RG{D2ScUwVO0Bh2=|UY~tUi4oa<<_yH*(^z7B z9UF3ulmCqmo54`S?_*?TTW*=b$m`v%cdsW**dHSDDJ>QWVT6R~>0l<+m}(-t^s|fp zRgCQ9h7zI>dQM7Mh%h2wA`2rSOh7?Q(NEk;@&dvjr{k$9<^vbXqr*WoGKd7nY(tbk zWPlGocJb%6AYyMRzNKf3+Gidb{}6V)@S%e9M#{;+0W81=8xcY$(_(f%csPT=Ke%$24m!< zAR%*)co04yW4P!8%;VE8f`k;w8$K+GOk*Pkk&F8bIAA1F|8RaY3P+j!IHo6pyyQzS zWObgoTd+Nu-$Ly)O%~*2#a0Cio3aP=1M$US^y%vAD<#nCKjiCA41kh4$qGc8Wh!%0 z(uwjkNw;iy8ZXWy%E&!lrk?$gsoOVoyXRi-QoIPy%m_j z9+41MF2kfVw)n8cfj2P)Va3bZM!`lH{Fo0ss}NAxU>|u<-;iUXG6ovJ;ZO0w_9vkP zsS0~JWrGa|9()0UqXa?%jo0~2JAC+m!UxGG7NvP}TR-fz@EpXV+#3WAWUc~J+8_hn zmvma0ek+-}kjxHasU9;DFRr>o{ZgUiX(1}S2Q4~42`dB?)L|+gjHu9NC839h*E5io z+3900{rqpt+e*JiUPJy@#gj+~LJ3_PJ`5ynksYOP7m1W~8$kvsp~QLc_Jba}qGl+6 zxx^bj2qN$jcO;#`IJJ;pCbvT;!W>G7F&vV`o~*(^3b*X22Qs|k!-7%&bW(@B5O3=G zwDA186iR%NMh1GXQVf49aQPmIY-lhIPYN<61yGrz^N)_HM(&}qJf9ZsU+1U}Iv_4C zrsycz9Echa2FeFs2_K^FC*cDVtIiipTF~n+vzQ1l(Bo6`{ z&>s9EF~#xGkm4x!UfZY_m^Negz3>4<$g*+=0aqV4fJllx%f&id91}`x_%Ph?!7@hl z-^lmD7T_CVvC;rV-8lSlZ}bCvK(3rdrNSIRA|osqHPh%p3HTs+|C%Ij=tJ>)+sX&; zr#0+Cbx_uI6(8hEY-cLE>AhNltiz+O#24--dNWCUoStxJ5F(dnuD_rL!Oxy$W&)EF zs9`=l>jV!UF5{zkcE+aWqQBvTSRcIMZE?w8>HLg_%`wv+1QDJUDNZ5_N+{uj_h9Wg znEPB)wss^kqHFsyHf4&-dAt|TVbw|%77_wJ#EPRBH&H4sh%e5Q^XS>c7aIElG-^Ga zp8fOs`oGW4zeXYxue;Bu5YAArO@jum>CwqBo}n=N>0Lg62;lGoK1e{JZ4{L;Bw=ZE zpp^JAAJlo!jjw`%b0Z|A;(AjqW#u<{oY(N?TjJp-`S1bgpAUe7{f_^@90r>Yo}Hd_9q#oI18jEghCmxN zn-?3C7iaSAY@wwqhhxA+mZx&#czVDcG%l`s%$p5I865XOwoydIf$Us0D7-rpJWThO zA(n-dJ9>IdF`}f0DYfJX7}_j|kZh(Yo0JMkfY25`+_hr4wiGTuY4WcY zN%QRg@(9PceIeOT!WqI4-OfZz@Bu?3|ZrSD+o zIT!>2gJ8HFj5e6!!XP-Qg&@j6LI@wmF^tOmGIqB*y;%e)q4Z5W-gzjnrY0&G`@hM= ze_vdz52JJ6!Q3=-tdJ1AAz9a}#FU;6-e)5WtOSJminct^XW#w^ACwF>9~2R~*#!{} zpTR!2;9&HFfrE^6Kr?^^j&?_eKC*exNQlkM2X!XlgCgQ1{jfy_n+bZNk%f}sve9GN zeBi{E5K>fW{7F?2HXk@~!r^DmoqTYf|9}sYMcwEJsb`kpqLs`)*nCK42;s$nL_g>i z-{u4R9X_BF*eTE;S7hE)(?$dnp(R2Q_u!?co2&E1bsvKw6lw`2++E6+)W!r3>|#KH zpUHy=i7+i57h)LF_$k@XJFe_vM;JJCH+*O+AJ7kMZT^QRo5_x&L&UYy0S=^Ak?wc` zEHhb4hOe4x_X;)4k;w)6wWsl^9wA0|^QWFUMv zfVDEGS5fvjRS+9Kkj_ir;sA`8$(gHlwV*=zfW>j0>P8|R$~!@p|MOS!2)s!s(6jk~ z|3C&%_;84qE7xD?%Jhv+k7w!h*L?A}LggD1+nC#y&M%p=5;|-K!p-ATbkqvvAmz|h zJW49W%z`EjN`IMYot6M9C-v2|X2{qF?<7Eq!L#za9|P`h{sZa1AmX3}!@EEAc?L{u zV4{npLJC$;fPS#~5W8PoWtNBMQ(tK0a*tgYk`z2dzs0ZrCzJlasq{Z$vEOJ5l{Y-# zBVgc&NfGper|!`qOZNV&1f`XWeDM}+IG9&jkL!cBig22WmVWplA5=L2hpjG*O?>fE zE@g!n|0*A}+QH_7)0Tt}3I~qyLM>6O>$1DFz=EXh`vc(GT!}fi21h_s+m$q5${41PkZ~;tLFi%w0e8q8~bt;Wp0^&NNt{#_s!; z-pt#{B{CH!^Hgj@q6u244!F?s=AVtfw=)yHz1ItvE>UjP#N-p^MI>`nJt0)K`G5t1 z|3E<$_i4iMV;{LG_TFn#%Pn;mzhDTgyuUuN+@DwIf7!VfA7+@P6fRPX!2|~6IvtKA z4!EqR%iOF)%t=5&qMuwI83ei&)S`qBqzou=WImpxFu2#l<&>+i^1-`9A)@dB*YPkk zyS|cQBvLJ8Ly!=3HktUELL+*jM<^eR1@T69D75}Y9sw(WR11}##1~>?GA5-;O%Xl- zp0MyiqIlo*l&La_N14@jw-;K0xoDiyQ$-v~OWWg?a#uVKw%0uD`@ zvdN@KcNPB}sb5X3F3y&Rkp)8=0=^Y3oP-$u{qMAB1z2=x9q`~h8l7HTL8?bW7&Q&j z+enNKuyq*&yJusvTMh-ZPI-5$w=Sz`G$!$ga}mVTvwhxP_iy z9=*RjquwzO-CrnX#s-{`>ts+;&_e@P>-st{Q3DKoe?xmhp=~$4bi$kQX{yaGb?lZ zoib*=HaMavg48Dx0xl6;Fk0dEq04jy;RCJ+m6rF?-+g|u3`eIS=41OQRFFV}#m7m| zLH?IfcxOzR`%11;OCL43FnpNB@8*byldDBAIzv1F2eB{qSDt;wJ}jc4<>|@#;_U0i z(f`LdDfuh8$HDOUFz5~n<)LyRieu5L0>hJ^?l2-G<^O$mO{jxv6R4^B9sA8h5I zC{R9Fh#=gcw}OF_%!#TE)tPh_PilN2$4&j@R({jg2_Nx65y8u@A7sVyjacDW88ucCjzm3q^;g zSF=R!DO>t#&uuU}n#|_JWHwh9iFNpBPIDGD57b1_pGD=Qo0@GZs|i?W6Z5S?{wK)e z@r-3QnZCS$_Ekww$ep+a4rU;Xc4F#1u(+88gX7YTX##(;K91FgthlLz5)NoJwV65j zwsTYKqrd_Luyul}1&;cl(u1&$ZTNYBN{jegkhf$6m}B+6XP*|XN(r$Xs| zn8-6)FehF)yVQGP!v9( zZ3^AkeVaesO|fb7;qsbpP04^rA36gbsiXcP)ljD3K|DiR6urm>?}XuI=a=*I>+ubI zyO*jbiww0c76FS}c*x8g!v_W(WU4(DCv0kSYYzt>xZ7889M5(n6XBY(e+gkcds88s zw?}sveyQ;vn5AiEvXRB$+WpW46exsjelD`-gxy`oyyZ>!m!hvwMDT4|jh z=_v(Q(q9RJ_CGBTzAU*w6VcOQ!M}(O&66{3y*D#m*rw64qSslgFG-6AH#LMJ)S1Bu zAIMR%(&Ys`R+-{67jy>ogiqW4Xu}tI&~yw1$hg-76!qyE86Z{V%izw9_N7dVq= zH>*Ho>T*vH_M|yW;9xp0Xx{X>zcCTQ=arf(5hsCB_;7Se&CLBWRbHiQ3nw4sdzzsT zKj4GH0S-uv;6Q|G1D*vS8SX`BmQn&ZC?D+C@a1hj2qiS8Fq0g(#|eipQ%VhZ_ts=NCfg4_=*HFAc6paT}Jf+QNvYSYM=BC zc?Y(Sd_=JoD+$D4kyvf9As1rOh4_uqYOy<|$-=T4Oiv|lJXRa#`SIb3;fL2Ty34sm z%Tu<1CpktH7-7KP=SCa!T!?ZIeJ;&eLwyk0uz8tPag9sB?^4cDpF4_iz&U1@o9 zyF9*{;Y=R5Mp84myS5Oq_e^J&)>nHx3r0un^lBG8n6GNO zFQLRo`a$=&!ofcBpvVS=!-fyraDWo_%dGZH(GTzP0sTOHVZos`67ldL6CP|bY@f(L zm{<3<&}^n26%@(`6%xvac(bpFkmCpXLE*sh6(35#;XlTQcM-A02OUTMmJesKE{UxJ zSKa+tdN4VvesJ1Lnzm zk=kr52#7%i1$+=Qg!eSRi~J_1kSuj=J{W&;Oe{}`L1%{)Z||AP3fu|ZEUS)(7| zgE!RQcMT2>rmlmfd;f_A8N43Wib6`SZ{`n-gOTA8IwwhF@tCbWW$H@^qx)Kqe76n6 zto^Q!D-+(5K_79xgLPh;+nPbb3bW)9!v_%92IFY#f^c5uQ0M7-q# z+E|wa0rMPh!sJ>+J0|7(ho-!|m$0Jq_-UaFyayU;?j{YDTK~nj>$%Rer>mC%5RKi=0_BK1S~jDGtO6kP^_sj4(lG zK1=z5nP!Q25QnhY&oz3O8}fG`vm+@IDkbI#@k8Wh5L6maAwc&fL!mJl-0q?4VCZrU z+@3xvJ$ft&1>8u@VyJT9*QNMbrbMd${~;fk{pjF>6f^LMjD4%iiLEOLJ$OpP3JVSm z>z*?3A^SjTa!qB_$<53c>F;~Gd;0?_tKF^zQy`QKKL66^pZkIn!iwVyfrEG*%%6N% z5d!Fjl}&Bzg3Uxq69K8H&{{_mP1sh0%7%C0pzZ{3$g>julqmw2#jn_(rF;+&ZM2HU z7tSa{%t`KQfo4EC(2nuv^@A-Z0EN;(m4ltoSxGWN z2IWKYe!_+lTx|Ftsk%FCe~B^}m`O1tB@m{#g0c!aK4ug&!Uvo~N+1vtvPkP2eSUJd zI6qy)Zq`tO)??%Zjv@gIibYU>6}bs3R6hte5D$E!IxY7@_;8sV_@k}hVfXN)kFyca zkkb_907euJCRr)*1q(i8>%CmF!{S7eo{U~GeGOdwwT3x)H@&0)`>YqDK*Hblcsl44 ze{dLzOpZ=x=hw@dv}guqDKJ@v-jN&uyS4F^taKIl8het*4eKG=w``Cz@0 zFRxyxL4?f*j7stmVpP(2#f`DR0%Z@}ZIk1lj4y0HaH3j5CB(nLhqs83aHIUba!_de zJRg(>9Bm0=5pHXxf0$BbcU@PWo{hB?UbixfbL`6jnYlzk); z!srK6NF=3P(V|g+)oS4PcpU*A!m@h{jpUZAA+`;SE*{SUB*ZqM` z^tgAO8p|b>fdj_Y-~{m?JmE(TWSCbeLCTOI1w@=(ie(|KA8sbw^l<|a3@8VacGTpf z903IndiJIwCzvG_l%fLe0YO|qS@^|GFP4^cjOpoi%&IaPM!rKqX5z`$xF6o!jjpo8 zNK1i&=y{Jq@r33|B`bE0TC}Tdo z?uHpd7jB~;&?I>NMDEwg$K)+HwEdPXaYyVYdMo7hCHeb^vH}Z?b<(x*A8ZRkHu%d( zgc$%ymc}e7ftJwi&#sY;BX^5XCPAyYfreVHFuaYk^2-4K>y~=-QprOKf0WT zPe!4mNiaI4kdhNBBB;1NVv6q#+IOQUWS(zFA;_FJSXQV3nVMi0j-ac5gxhsn8x% zU0}3_raUBb3Lhj1B8V`2Ad5krlHr3kC1EOp2vJJ;(M@IuA851U4pUb*%Vhd18OGH8 zbGq=9;m)?DCCVSkA$=UMk-5I*0cGK)po`&7^678$oA2@=K>-+qsj$;Wx>-1K&iGcf zFRT;84Q^y9d}zy^uVq6?LGoDQ`pPm@dc=shOfRp}k5{QDIi??Pa*y%5=TzZart%eM zgHT1Q9ZU(dRiLOM0XSHEFo@v$u&;I5a8NR+60m~>n-2;c{gG#R8NX=1gT7wHgL$Qy zrT-outfHw6AEa%wBE?a$hyI_+Z|ZF?R7Jei4>lI`~y3B{S~n%U)ev&HcgF?tY2I z$?3%yXmBNk9?-xdiHRw&HIxh>Le3cuvzZJ-mllB!bp_`a4u}U^ zGpLZTk{WW)i_9`bIyHD_W3T^V-&OYb>O>Z_Cv!t*JW}CBPskW0vyk^?)z&lHr&yDKL9cq*V^ohd<8xDN= zE!psnU~p2xL@Snr01+Gg;Ka)|A2u-L-%G4yBf{pxD>#6N&5)=+WHvEAm7D?u<%1T2lK(aKCQr$;PCg)JZfR{8!Ibc`+U#f}A#5zXK0y`GS0x&NPO-vR7ZHbG!6E~V&$9RJ zceBKU`Bo?gqaTD1NC=G)-s6K#Wt2>TE_?6GiTDrJJ+JL^PPiAg-VU2Ro&gT+L7E#? za~E0@YsmI=?pgcXY+mbq%w*}ee9$a|DhJb#DWjZpNeHu<)?yRjgA-}fX6fsZ4Jsr= zKbQ_~TRaF5ocGZ)RS>EfGR+xflWI=h)CTsliyv%0=p#=3V3FaqmQeqJ6vkWqaP`o= ztao&6VZo^!-s%Ur@_vxneEay1`S5P>1Ny<{L#o_ikeXShlFIaJC?Ckh<1ol{q!>ipl#Wxfi(dcT-;GwJU|8= z#8RT?DLIyzFj~Z=y;_^2%?Gm+WiuK?4F4P-*xg1g$niBFHjkSeWm&8Y-d{`xnR3AB z#i7DM_+S=cg)9sQA%iSQZj#iq<}_L|z!n2eR#;@vyIDEpfei;sHf(U=0%>2i?V5t{~>5Dcb;Re+CP}gpB~O_#kZgm=E$|=S?<^eo#IL9L%(8 zk{1dGXK@s8P(FYKdr6?7n&Ax@Y&dN3L4yk$4xHHaQF>y<-37$<`z;u6Iq>!o+kD_; zt87x`gcMbNP(NwaVOBbc%Ex@To6y1uAJ7jt4?BMyfBvuGum7j}>t9-*krTf*zx>*u z1Uej~cR+d&D2kvDg05k1G0AKh3b3t}95c$2>20(!-NYtLYrni4(K&u})DDN42vPU? z%PxPBsU~OFeLM$7CNjQ&Js!L5-`sX?nDej+NnX;X8#q{ekg>F~KoE;xB<+Z8cyl|J z1t?N;?(uZOeYMVq+`20m?J=P-5bSacM|&t1nwQB-ijvRI3IE0Gb24EQHE8T$QNmoZ zNrQ@Os=9;tb$42(DYTw=xJGIn1f)UK9*`~QxF9flnk-vFm7&- zrRKw1J_smG@g$Vc6UBns4sVMefCI{bbzSe+>p#T@Clj3KAMwFyOFGZtgA{33I>{d# z{AY20N%sEBI+W>c+|@T4k$ZQBgKesq{GO)Vc#E5Buv}FNjG0g*$-(;t)|?0cinj|x zw!d^+IM-blXh{1QPnl&VPcnQ`*M<+Uv=*#(o_6Q?;5IJCzaN4%>a-ts{Tr#EE}svPXH2UQM#$Ol_R*e6Cd zJ0!H!SO39E);jhQsj$^~p^tpvKWGPs{4ZztU=Y!f>}Rndt>Oe7Sc2Ih)1RWz=D}VK zAbk4Em=FUMvGcd#4o2lK!`)wd+)@)nFtGYy_kowW4r&Mc)&0Ga$5qwkb=SPE2bR!a zui@IU@5|Zb)|?ESVRG7WbjHLpOar-aW8IrO?qN%EUABubZ5rI%!iNvZAby2d7StsU zV);S5IEa;om|*C0alJ4V&Al5)O2+OcNDDkboIt{Sa|yOm5BxO}<=)FByE-s`CCgI4 zRWOZOCPP7z!T^Ey#|<-4fTR~K*^lynqaV04EK50H;2?R=&9WYc44Uq=!j0<4<^v#- zPchw>6if;kSQKNM%4*6Xgd2trc8Qdw8I%t=qMU5<8fM!HA9UAa);Sk-4q{iD4eNYd z{K*fPkZn#`n@C?yG~Ce4g?y72Jz(MjV^e76QN9d}Y7JTHX*l`tV?@}mG>F((5E~WY z)DQB42Kj{nDF$;Ks1*qofP?bk=3yLbOyaa)w59@wpXP%~2;~C>QCpq^l(6|gOrhc- zR_n;x^k2MtV*C6rl=vYWY%DlV+DXF)Ha@`xM{psJ&~jDXKJr@%ANh6RgDi{);X~;` z`GA*5XX0h76*;W#?G^TSIy;{Rd!PEcAOre=qqI{=Q)gF(J&KTMvzQlwYpZy-E=%UnT0Se7U!U{f)0AIp(bKy4InX>1u&4(1TAXqi2g*JaE zlbaAMNJpnxvZ+<}8$O5|@H2dnPZZ5y!+{gi7sgx{shG0vHL2N8>uW$N31sB^zbFSq zL{a*+(GP|XGBO1|*c{M}DG)GAujZSh#Rr=JV8LGYgPiDtoV?2iy>=@f^zKwji47lK z?o*?$)Wp1upC#EjXBpCXyG|<>pPLWv^JZ z{i9WD#cG0uts`;7O;th(=i|V^>1H^Krer8YrD=_B$WtTKyiQZ(+;({@bYUDEJW$yv zQh|kL*lCJ*_+o}h?)4}$pl@RD3keS6UhcPKu#5o(9pVT3<-LQ_?%w^rH%|f5;aQ78 ztk>xgDH1!0sVU2sqbal0$_Hl>!I|82ws>s4MhpbYdhOX}f^}j{0~n&7-ix0=6PCH+ zzY{N@6F`K95APoJ+09YkXDfWrA4zN4Hly5VUJ+!D)F#9F zo*I>Gn6PaoY<}_8GzLCGMaxef^((2Fpko}Q&B`PxB#O~oDo3+;F|Py**9Hq$ruK;m zEjNt;E7lleIJ~S)n84nDcPS8vCY%kM{CL@5cAPc)MJK@M@v7cqUDmp+i)u#~=jyOI zX`oneK4SBM%?lrFGT0}7$Oj;>wSWU_>p@|KaVAwopf{{?2y3pj8EQ?H6m<*W>$Y)| zZ5^FGL{4Zic<_a49$0Zu+S@Jfekt$o)XjR>-+9>m(vbDJ#qrDM+LtfYFJH<#yI`Sw z;I6oRRZp-SJ!>3YKAgo{R~bA6)173>80=bztrH6lCFG4JS>*UqD`l5zZ%Q%sDd&sz zoPE8G2rWB&v(xp`zs-j?FWUx%sEe2NHmukJ#S0(U^(NwYb*%T(^?v4I@Uo0=pzeY5 zcQ?941dL`hRBzY>c=u$Z93+XUS&1z^ybA{>C3M$scK4=H8nzfLIFG0TUaYTtmlDo5 zv0w0$WYCv6`M}->4szU}Z0m%71usazfyVgeVI&JIIGMot1`GBH9C&xBA*nA@L~xFN zu=$`suo3YtAE1QI2PY6fg!4xyA3%hSg|{c~fZ`1qlnEd4L7fNXgG~uQq545eq_hfx zLI_HpQ16s3r$@AFF;Oad+5{BLmBNQ)BsI1qjh0kIOPi8p27B?y4;K4fgA3=; z2?6_DcbxqrC)+4^_mMYn*aE`VkGj)UDmclYS^_vIA6~YJLl0QLYlz`wg_1$$=R01! z#RoMfZ7e7#j`J-(IDxQr?&QNql&}%;@AE<7pjv|C|6ko(aLI8j z?Yj2f_O>Mptzl+n(14iHk}NPuvKU8sHdDl>!*Q^rt7~G@f!`SA_F~^! zWXB@W0m9KJ>~~Dfnk3wA6$iG_6mhALq3F*Fh0n0 z$jOI<$=jvKumm5Vfd&f;VJo9HJ2gYjZC~Uf$Q7hqg%C3KC`dvuVwd)0Ljj82wa&~x zJWs5l#7M?}@TWfk2g%ZWaAQ(@5GA5E`>>Y~6ghQ+gPaHBgMbk7L6ODMd8hTnf2Pvlc1GB# zbmh()2~1#(fRzG=1-3#*Wbkl!Jj($u5_eKu%mh;m4yK3Q;=#%hYz`WLP})9iV1dlx?o({ymYS5;1C+S<3OB>MA2NI zXf;5cZi=6F8A7vDvtsK>03_ig*rg2IK9zos^%x2gABp<&L|LWFV#p;b43qX0e!pT!)Fs@P~W=3nIgx<3pC`;PaL_;Xvod@IkrCC{r9=GOhfzeE1kRSVA(Rgqa6)Un)QnUh8zmgis0j z;QC`eB(NZFpHAb0Jb{o8<_3h8YXp#i+hsyYkRj(~f(+reF#fON0}!zILg|#S<*ot> zYnS#M^2HY+AHw{DvYkSN!NDa(+y>7aktc$N&_VfhkXT4qdg3yH2>W27-#GB6`LMJXVb7N$$k68FLk0!Gf^4e8O`7@G_tO@JW2& zTlAcqd`NJ>=6V#3w*hu*X+9|Hm_>69fTA0NWtBJ>|} z!U0O)Gk}ORA2i$MbQ>_S$aB!1=*P~JSN#KsNJu*Kpjbm5LV^->GU?|;1f^46qCzPU z5%IxWw`^Rkej9iBNs{>2W_kX&5at-96?>o&AX4xpyw8vTLgF&v3~>2}W}X-jS-sz1 zX}9P!9Ytf*@^>y%4LeOI$EX!d)-_84XfBrRhyZ5(f2icJIB$dc zpg9AkWDGx^*|}wu5F88<^uq)~42k#T7-dgh_q{QBW&t7^`84ApK$f?*ci!T60$b^LMDxZ^(I;X?uocDXbkEa70F_;@}KlH^1FfC5CofwVu6_8CGO&!@|@b=S-s0wl0tx5Nkgl=Y+Vsb~$tzdZ*& z(8+kep&+7xQNlXK2bnqg|6lkJ`U&WFr@@F52ZxX2gU)(2_dYag8N1m&W2DRW(97WK#@p6 zr2Pj~Oog^Ak3v3!^<&^JXBZrbVET1VlH}O=*yK?XZb0(#@w`$KE?+U8CD)$JOXxQ} zR8+t5@omDuv2j_boCIran0mN0;3RIjC&YUzxMh66_6-gw;)AvDL3(oel0}L*qnK04 z=Rc@lZi7wVTt;(uT$p4rLS)^+(q!D@atqd4^6|m;V&vn4 z+=hq`K5b|~K%pqnE*&2{B9a`4lZ~4CGj}!Kx5kGM0_gubAJ{tKc28t4DGCnhHZd&C zhj4)LflebpLNSLS(f@DdgO3-Y3g^(|%>f8I^YKAr3g6)L-0WwgFP!j{(8h4>hK@FTY1Vm(5H zo|bOMp3jGJimF z%uu)I=j21i%yYR7dHLY?Z({rRX5X6g5Gt6nY*U?_cn=~h`GBJ0=g`ta8iaZx#E3rP zw^*73U>wNlIp}Vh4=Q}pN5ZJY>+W5d*Jc*3&n{*pRS6kx+FF_qB7+?Q&r>X+A`D7xRJV zYkcTM7#vV=JeKBzv+jUD$>$rFpv2SJ1PVl$L}!`|VNV(nd^`#kJS8wI5kQYMdYJv% zfREt=#Lx%r<7UEPkDf`(-M^3zdLYj<|G_yzJ_DKN0~iT!Nw?037@iNn!O6^fDIp)w zWtBu3>X!=v7Esb~a7LP|{s9^oA3|uzoM{t?$j1kM#wYOs$H1i%XWS`Ijt{YKfMRJz zrv#fiZA{jH_}zJIxPS?2MTrY9<`4@ma`J)v18^X?IQy8g$>R-VF{v^J8>sa zXsJX38Zton|E!#<_$qEG!J1Bpd<-CyBN*Ytm=|*{rtpH*hq}A;=StD01NrEdFC-u| zkne3t{*2GcV?_ZWJ3YCH+g(V-bXAcKZPq6kxpAOTu8yrZA20 zR;V*36x}kvBB6vML){*jR6=_ReqKDxpRMFiV~%%TWc~^9m`o4x*h%TJ~d z$Y5e9$f>0HkRXGVA1uuWQ6j62&yfhj6Li_2o3eduB1JlP%~uZ{b?J6%X+_peD~BO* znds!MJK6As3NwUl9hWa+X0AV=b_<%5GpE;!H$4MI4e zFUq@ScWYL5h3^4vNp|JkS}Y7i8cTCZL$fXFBun z!A2&Ogts6jhz8O_KG5@rd{D?>t^%IJANmjCgU+TlG9)q&R`ZbHgY8s8iE5?%X2x*) zQdoJ2|6qK;rbLD-1D=z>POO?2pC=e2~>A%s)hd4G71ity?`G z!-weMzSJQMXT%3B8H9Y$xq_jcVz5Gl%vn>sS#ds_4ed9f>w!E?D=GFUJ(>Q*c)JY0 z%XIFVzN8XnAu`#9B|Z`&B0&k=5&;s!xJ>&G;Vz6UZu}egkVXV1yo4m+z@-L@5@}YX z9~JUJdB{v~k?VXpMHczu3&UNjPVco5W&HpK`LuCTgGf9)*l+COjx(foD&L`Xv6fVv|FMlEMc?% z!mUr{gT4!UHzkdNoIQ#fvN+&(Y~}MG^7#qTyJq6Fq+N#V6teD&tDu+E1YU&Md)lrQ zFZ>BU$o%br#s^!Z&;Y$>)7%Cr*gh+mhKTSkZiYM!<3qxK2;qQka0nMSLq3Gg1A0FH zfn^JY6`F9-KZXxknFpV-On4C%Tzq_d5pf{il&Y<%*5bWLzy4xg5?yA8Cx#alSE#j4 z6tiU%*LZEk5~ z2%K#Ir(6o1BzBQqj%?#e|WZlTzono%K7nGb516X6(_DPG`gH!< zvjrqUiF|zEIWqjv*wMpI=>{(0i#E*)jZA#jp8RhGLgL~2QaXhKa|}W z>K7kkN+k0Satv~#VCqed{!ipXXz8Jj6&nh~2esB|gVm$))`>ztG{Aod`M{;dr~L=x zgSik{>z+Ym;me{ZdzeK8hXfxkJekYKhs%#?>r9>Xc5)rRhoK`Fz&mLR`maK0_E#I6Zb7%=%=2 zcF6$eJg`#0%cT5zfsXjdwBCQfg$TtcVfGT*k#T?rniYp5rH588U1YG|?0#Ezu|#;O zkI-XZ9qQKke@Bt+Lkau3h!|QLh)tWU;~kQd)9-m-Es~Xl^x*(KKp><9Ixd`g(ruWJ z4gG|4tf3yWeJ*9W`C8dNcS8uElaRbU_cqZPLBJ#8%5>H$MwEq^dF{}lGaP(XgZN<1 z14`&K*hmzsqCEe62A=^q@Nx|lm~ig#jL3kJkh71bk)`3l-4G7w2|g$-N&7K;h;7^v z!}wsGAHxT6K&=oV8Wz?mWR3UkM54zo$!}y3@riuUH^PCO#vbv(pI3i#`fc=Li(Hp>%e^~q z=JmDNND_!}jzsJ^1aRPed6CG|P^)tucpDD8EszV73&^<_bC;(U-6f!SPPpM}t+nBT zQflOUEax6gNIr%Sb_PODKFD)0haV+g+4QGe#;^NK^C6e{=MFAnI7GpP0U@DeoP>nW z5MF+q|6o8^f)Ak?W~?>&M92pt zKw1GECcJg`#_XHxbCIA65e^#KbMX;;&0T!kfK?GPh_iX0u(`i~E zLWK~ji{L~E%AU2$oAc$Ug;;_Qavls0krJ0eEfG`X<3seuPoInUQ*g+^2V=rVvVvju zkx((dC7xsbCk>*fA2MD9PsognUm5Ava8oOWf37F5IR{oBJsG#byAT#%M4t^t=6zZU6o1Hv$brd) z4nqWrXpm@KVWc;IVIi#>8E(zILvA41dmmW>h=Hr7!7CHG4`3!!;bx4DBe4*VRYIE`-@C}x7 z9>yk!ERuW(4ICZQj;8xO5YU*!r*xUcfp|&O9_Zk}GlNphe0Lo^_i2(e(g zj1B`g>r30R^%xVZ$ILclLa2u$Lp}Yy^mrdYObA;SW!TSi%17{llwcw&D4WekkGvM0 zwt8hIm4yiH-H+t?=|@Egig0kON9(6P%8+jm0wE-Tc@E()2Ixc@l|klcXWk_s00-z1 z5UjmD`yRPD``-94HvjhKoO35kxV$<42Dv^*0P-r9*YH8&5)|>)PI(cEFYq6n=P)r( zd_n6jeS9Hb0zOdxAkQH>nfCD%aBzHhII%Pzl5iNG;|bv=O$_H__#iTbt@#fjAAkTG zN%G0ni8(v2UkWzJ$8Su`OKfcv#X1uTmTSf<6ILq%=x*5ZJHuayE=pwa!N)A>A+2Gz zq7;Xpg-Xw|yIjGAeZ;KZdP2qSCX{eYLKhJ12Y5Juf{+Xu|G|bi@5K8X)kp3FIkH*wpXn?Qv1ql8S5?#z55 z%0q^x2OO5*gFcnu1BNX71p>};eR`9Y=OnOTmuCAR9IVrn@ShO=NaKTS-%x;}=))-> z$OjTg#9^n=en>;o87wSPdJCcP3UmR>wfLvWBEYBKjRFL zE#x^td*g*)RSbHP<{Z!9X+DJZo4)IwV-h?>pNfFM8YoP_$MA5t%k1oNVJ$PbbnWkm zc6s=-Z6kY+9Rpno1%wZLN}rY0J0|e(xwp>1=ikv<52lS6A5gdp0TBEIi7wUNp8tT5 zd=MONEWDHGIRqEZk(hgpKnBZC5(R;H^v6B! zeLxty|Ezl-5ay{Cg?=&sp_pTzar=YpLDIgSyMV=@A*9%4LJ?AU?6p+2BxOzdwH)h^nn|wVs{SLdP(`654-qUjT?tDhTf(sn{myXT7zc%;&`uuMa zbP1O?7d{}@=HDV$=iW%r!3EGTAef@HOY#YqSihKk&9GYwPVOwcxjX;*@tY5KW?!Cr z@c77u%bUMH-mztSP47T)OZ!*l69Y40!Z z9IojD6kF>Cw$%1-P5>csDL7nxvLNBoups5~Q}^Z_Bp)9z)qG;mU;v2FFj+lqf(&lr z4m!mL2b0vx!ooLz$f83&B=P{7Rg+ifK+$@daCVB#q8;+-xX876;|wPMBobeHU-UEKK( zx#j-U`+JkoEe9D-25el2P>&!G@nH%wM8Y@To&Kl60R`)JqDbGT1OdYR+2150gIebg zcriGLD%Ypp0)(40|GcyC`>loFZ_IrdgAA}@?!(P_*LipGKkhI5$KAPqx^J0&%in`% z$Xm#O`HwAp00q|;e!DXF4lzDloqczG_8lvmSh@s0h!tECM38fk+kg*nW8v++*B>6d z{T;dc?mx#C-;-E4ar@!kvzIp>{c&L5f$r_Q*3>qvs;*mAS-rBN3RzKJxuUfEo077% zwKZ#Ms#n!iudc0GU0b)Rrgl|r!`g<{ZyVazHFXxY^pvy>mUIl2b&XVXZ>;FqkN|>R z;#5e73C0H$I9qf2XLr?hUqp7wr%GKbldyUpXC4#MNAOwI3w`cwti=%<~aF011-KPP~aY#jk)`+i$D!Hbd)KC9qz3q%Ar7vJ4_`Tq9eyQjbZ z&!hMMdFSPO(gi2)KRI~*>gbUZ9lQ2@)7ZYOzV#n9O~E=0y{i1{Eh#~#H87EeV9S*z?cXZxvq5DJVBLWDY{>l@XhZwx2> zXzEX`Bgl%=@xFS@kZod3^C9dJAJm5Z`nYCcbbGAq&Me)GJ?xQgBh6H;Cq3AnvxKXV z^H%t9gFsG2bGPg?v{u=#?$R0=WTy%9Ph5uF(5(m4zaeJ+(L-WSAhP|KHeAMJ01=V5 z5RIAr;1J;>h>#C*5+Fm!0fLV3Qo3_{$RWg~k1Q4_XXug)3lT1O8V?5!fPzd2MG@{K z`AJuPKw^+UfYz-rPA~W(D7O!bBkM_c-kX2>aN)N{uRc6{_aFC>H~$0+py1Slr{7%~ z+i~(-&G6=;u7MSGEq^Pi`mCVrv%<2!l~jIFR{d3V<3FkzzIIZ-ysCa>O{4P%>KY51 zTGur+ud4$K_3&Yh^CfClR#f9rtg5Pn4+wBrUQxM>5TmT}>vD&Pwe@WU&0R&UJ;WO2 zUF$3QhAsJEN*WRLh!Tn~GM>ZJ`76&BB-o?)AUL4sWJ1`P<^y*V&I6Zb?LD%U4onT!#f#^nJF<_Y}GB%Tx6Bf437qCatI2CM!h zK4lOf5HLMde1JfV5Q!dW&qs+k;Qax+%$UF4XBQh3mX(&!)`W|&)uZS!+jl{M7o34C zV#LlsW~A@2G4w+!QUMZ$65+RpI=>S`CaLHQJYIdU1xlqQ!|WxeVRPx;Z8!Vs_WT>< z*4%4!4BTy|*4wOnvP=6oSUKe(_yAYt92PFkEL@&_d2QkK?N`6ud-qSAfu|q-YvP~( z^ZdhqJy>{m`N_mD*Y6%aceQ2n&Z_krzisLKvZ@XUe73gev(*Kkty(7L9w^g>vH;3}-Hss#uVbSIVduC1tA10uXA^l<(|c@^>vck7x9nmbC` zd&;`{E4tTL^b8sn5=u@=(24&L@&P?U1WOg!@x>n^A}1e2gOCH#4Gy;5Rb;S{`rt?M zA#K|6>RzUzSUoc5uuXq9ybBzJ1zJ_bzyx7Iie2(4r^j|D#W;}Yw@U_~n|Y$!N7AOv z-HhQ+xfEhfu^7RH3Bm8NZ$SyaH0*b}Un4LqXkEZAQL?Pk2L)!}0tF9|67|P$aM17) z`VSfzw&sM;2_JkW&`s#_6T)9&x_%@Rw^k3;8XQoJ56;Y|UPTDN?dV-zS@-$c;?GtUe7?F6<;#+4=M_}eEw5=@0R#*M)eQhaWB>*D2`g*x z7M#N%A={vs;+wL{<)sxk42Z}Ofuf=g|6z4`HGIH%5F)%IQM(c{)YQxCC~5Di=pLx* zMb=mJ_Fs53pHnVAUP#EL@x@DVelj;A@FhGY!d7c8Ia@pG%I?ym%%1#nn}|{9(WR_U zy6eo7uMjTqA1pZ;`4Yi8g6vxMEQ_*WTsiSQ{rRjqQk5G3Ag2G+m5pS!EYLxJ}WK%tf1(#b%m}iEdEzL)?02a+ha1{s*+|=&_2Sp8Q zNjMNbkawd-o?Xr0v7$1zCV+WLlqrsk5i_VSMIimvXm z_O9agjt~wBAcU8|;qsH0m&s4&L4=4Al0mvbAg4m%mik?o=14E>UDq`SocKdY(@fna(#z^z3FC~%7oUK0H_Ch>K4$qzF`*z>V^kAln+ z1AL%|5)Z^mD}9e?|7{N4LUZ{FTrSiJgT;^dushc8}k z*|@E;cRlm_7bTUSuPgg|Va3-a)yqn&i&{I1JGx4{dXSRN?t<3#wGGV}yJU4;BZAKK z?Uoy=>u@IsB39Ho0R_t{swH3h5C9Yi4)6gc2oA23<%w8ChLvR%tI8{ZLxK-q7nHDy z!CjU!G=>j;Q&zFOv>b)yj5T%jfMQKutzX@!4*3x3f(SI8wtSA_@S5R}<2cJ_{)H79yX)`~Uh= z3|s!>+#g|Te$Uq;a|@U??2GFeQWhV-?z<; zUsaTSUR3bex`NLN3Rwa`+~m%*UemO;wwV?wYHTTOX)EvSs_gDTP?(^LTiez(HhtUB zxVpY!RbBmxn%ZxwYmnttjs|)Tj{*V;8Jveu>6R%K6woSlKr1Xb6d)Dl8Sy;?6o{OM zL!GibzB9P@}rN}qsWs(&Y-(C!t!2_Q9mRZpEHorTSOpqVB4kJc3+erC$({fRwug@Ib@NDy55yYgDxic3fshQ5 z340GV{Od(eSgYN&KX`*fnh)GEKJXQhEyRm-_Jq*z?duYXeXrmE9})xXGGv130xld@DnsJAcSK=I2a=Mpg5zt2&$)$>o49?qh-I`{$o0~$7(Mvo$Jr#f9DlN2rtjS zm?kao{q1V?tKSz$da^hm^q5CRyQ zY)wkKwZXb&mncR6!-DZAyo)UytbL*a}fsyio^`(6S zr9FMcT|EWuorD12Hn$eHb-F;n`}`~vmb7-TVo1EdvbJt*Q$uk_TUCE=?cn#A!DYHA8=Ym4gYitFo3ni|Vmn#QSGcL#x#bSmpB@G1n6);9& zqd!0c<25UjWywWPn;Shg(iHo+EK!sYBH#meCF&2yNEaNGbvN@j8<7>5UE2UD_8UUQ zezrP|T22Yp3(~roe?qYZ&3tJGdsIk+kI@rfq)VQbv)^@+_i#tp`ArH zm2Ns9Q?coFd`m@bFd;! zC|IBs9C#HY?{7W-=-J7;kAAp*`@3_Ocl>y2_~7?l+jcjPZuz#Z1;OoiQr||oU_n!7 zK}%FQ}_6YN)ShYlRXe zZLJOwt<7tj8==JNhDK!|Lq1TV03XcWO(8}w-J%1Om0rjvAQ~V(d|guYb*axYc>f_M zA0kkc6+?!vON)?@385}N7$Q)BL%;`|hb;fW_+Vs^Vw+E;*jYExzO(qK9A7Tt(@OCH zJD8H0mBsHU))^|HF-A71)$PEg#~AsGmH%LfK>2fgkO3McOyPm{pCbLM=+Rf}XV8Y0 zZpMyr>G^4^)OahH)Ma{@^?VEy!ZC^8V?Qa~AAL1@yZk%C+*UP?q#y*~JiUnp_iraj z$Yeo+2yb)3otf+kYscN2TX{7+Ek~eZ)Xe(`Hhycu+PRw((>I;lFf%sEZob*;UdARI z0j`d->G=7H3)haFyYk)X3wwY1b^FnuHXQhV{qBRkJNGq>ZfO|VP&+Wh3SD_uFB3Y8 z0tE7S`z|L?-?FZ$y{NUTth0~B{p$W<1~-h1)(?*$wd;rQ2k-<+JG<64Qf5%Ev;bKG z1p*`rTsTl$hk`JGVTEn&rCr^45q!3?w+~{lP^f$(%ZscLxCMpE>NRdPp%Ou-hq$Al zzOk^Osj#_)K%=m=O|(&j!cqnK2T@{Kd4)*^2r(S&mRHJ_#c2dA9cSR{vT_p^O2h}D zgLYX-nUlbOF#YS2VoAt^Fwij1!4wfA_8$l+-dIe5MMyW(FiIE@LM4QT$Y3o`;E&2I zD}+Ek-Ha~$pq?Pxr`42gd^x zWMCa1I53;LMZ=o<;^xkhmagKqZb}Xu9J+}M1{ui`e`Q}krd=~Ih~NXj2mAqS9}b8Q z;(*A2E;w)(9dxW|Y+BdS%13|$AIF{0Hxip*;FK{VtUS7^18S2hs^9}oh0{^|3Zp1N z(h|XM;WrbcxOENx1PmV=7YD?LjDeR|{G+TKi6Hl^iv%m?FriyE%Guw{!@9`W$}syA38RS3RAVxGk%^`|L#| zZgPJ%b?M2(#qk%{2>Jcg&+Ly}c`3o2b=l1=0 zx_8U2?#zc-9R}4Vv&pZ4@&25z(-PK*awLSff z14E7LN1BF4n};_bjl&yA3ox5wr%cT(6EK^T3IGw9EyaQGp`7}kzM+}{9ERbBq0xrn zk*491I`2H-Ijn1@{Gh?53fY@bQ%mx2ZBsJ@fCDRP>$DuH)yb804OB2n_=PNA!WKLM z!Vne*5jHru4NzW;5~@!C6p?sJtgWe|4#9a8)J1zywv#W?Dh5%8tA+NygxR>W`m*vb zed|C`UW$MSSY3d4tMX~+qrFb!@!1y-jTZQp_`cEqZkiJCwWd*fCNJ6AQ(@ zn251E+zC8~%AP?A72t!D{=u65_4R{J>IMcy75oG|1mF-YC{$Iis&4=h;sXXtiXakS zr2r*h7vTU?zjGd%ngs_62_i~(R`6p~wTRmQB5)z1phJaB-ko3o5W{)kpD9Pe)SKzMvagFvLqEX~f$~*x5rX3Q@c5<4mB%lU%a0ZjDVH8D zNN3E|@mD~@V1goaTz&T1{XzPDf|`%Glw!odWE6l{Ydy{tj~6c-Hzr?QoP2e8;3aOx5pG!=FV}Pj1b3Q* zJ5Yi!*&Ref{RS>Ei=mz>Dw-mnM&g~8OX?zswWTZ77UkqwtS8j+=RW`t5ytryF-#x^ zh#(TTHr7pioQsI>&1~op!u!+xi_Ay>qHuY|zu2AR|CK2>^ z_|x^f$1dJHeCG1LpU&<6;q<2cM@RP_8r-q3d()0qrtg6vX8*e0fzp=t66~d^6)9@! zz`ds}Z0sl?F_$?2u(`w0ps9UrV;d6b4erwIE^O^$>kwvM)iYGxH&oj{ zTEBin!}?8i>o?R5kaifUXzwa(YAe9Xs6F-z0eB9W8*pHLFYE3t@986^aAX)9LdaAQ zlF(N6^fLzkM7EE;M~jS9Dc3c+Rf)J7gnU(0l2i03!NIWLfbhV{m4}OxtB)KR5XLZA(T7?* z3PXgEA)LtQ#1>3=D~AMbC_;}~&j-8(M}x^XmrSN!UyAvl8Y%$?7AdSI-gon|Kj~4e zH)>Y`hsiq|f8oxmqD6NrU*yK)181aYm7KfEliLYb?$n*B1-y&Mb9nAwkoVNN*6#g! z%(Kj%occmW4ljZPK8_0ObT~5}4&lyj^T{h_Q5_z3S^a1@V; zcD_W8y`Ss#AM@giMda+WdE~t3-N9LkmLj08WA%ACqBSeVZiH)7Y`Du>%4~RyVdI zs~Q{=)&hryb`-W27Ps|sl1X{jV0GUJQqw#E&eBU=$oG`vzE{&{9R+;Hb-K0JM&7FkD zq%lV9j@cOlm!CijD2xv%X+F4L=u4Q?lCL0w!2G}Vlpy=zzNM~S4h%jF3mjY$^$`C2 z!zUluYJn^(VYMPFx8UJWrV!)HvJyvzvJynx2wOvn6j)(B1S1zP`NuCWK3cr+Z~-`6 zd@zrMiu?!Qz<~1)=0iT9<3&(W8P#1=dL4-R6^4YZJO?a2^e%LHY2xLj7q2eF;x3U% zUhNSHpYPaSEyqLf;?nr+W%J+5q<`+| zoZfp#D+ho#88HT2Fh)NKotLGaefUD2gPaEjGxGGKDTIgUIln%hK6UTK&o{?^xO)HS zrJMUtU)p=}{H~wQY&(8((~%!X4jx;-@5tcZ!|hvkwr<+a7NLgWP1p!?F^)I8_KG|E zkfM%WNnv}Bq@;t0o|6Gs;_oVE3vg2>_Fh!qURc+L6xX+UX~s5@2n+Si%;N~L10_bF zKulnHQOdVM{D;tUKo=iC z#1}=y2#CPIZR?Zxz@tP7>YWfVL97Ug0UVt8AOgrBqJ-QDycYy7g7}3NU71ooLX89q z6Ha`h5=J5X5|4-|3*j6IpPdNF5bDMRQf z3&_RCbBJhQt;NNcp7ACxX3UJZbuO}V0Us>rK)Lw*<@s1NGwLlKIYx5XSFBF@VBZy+0pAn^X9w5k{S#e0lc8!kMRYr^lVKpL{TZfdd47p1r%|GLztaAY8oGI8zdruL@-qwxBj>((g*sp17SBtV1SHGpxiFf$FbxyvsbOb(_lu%*l z5Yb#z*Cdg?uC5WG@(@HogSE9Sz(IWAGL$uSttPAMFxA$T^-fUW1I|NXOE*8Be~8=$ z!2!1c0S^4@0EpY%R9PPxT~#fa$uP2TOBDm_5lS6uhDP86O6}k%>B@C2Z7$j99R?vn zMb6>_{sV%yFXthF2&Erol~^ct4IgkG5V)ad!Uw@YdW`E!rv z&pw<(YzxM@2eW7I&mbu0eLvOL(BRC2*^E4#{`J9>m**>36c^LJPkP;@Z{XH#j{TroG=~3iWiFrNh6=D?manj^U-%V9v&Qf@ZI(M2d~{baQV*ui(`AwUmHIB zWB>l6ZQJ%WY}mosAe;$MF)&)*KTo=Em4{-!gVN-WOV;5$XTUxsygT&zAmj66Jidt~CBPI~|uW4wFq@EIDS7?N+ zBcoW~itQtA*7oJZuA;cM5fK^GX4NM7M{%GT&&emz21adTgeu_3M;1T;+(85202a)1 zAmrf(gBNx>uV5E*ytk1&r_V|DkahqLDuPlS6nsFThg>D7B*D16nw(?}t@sexHDykI z(ImqM);)3fW67|BSilPgE(J=sx+d}%)p07r`wt)@auwt-#4T_@w55V|O#BdNBka&> z<${)H@_dq?#5*j|zzT?Ac?o!;<0b!ARE)$lC=w`WP!bV9%alHds>;T7l1=8@HVCh)q_GY_Xu z-*@x-@#_zdUcGgMqu&u=+#cI%I4HlH}X<@hON>xt8wkDc6b=!c>G zN1I2tHx6&DUr(frKfkHLn|a;Hwwg`5D>v_{*|N88+x~`a`|CIFt>3h>W^`Na@Yc$K zjV0Yfg`NEc9euPVT?574EZJ|U;t3<$Ktbz{{T;i%6CcnUHt*sVaNv~DqSoG$R!aUj zD9oKZio<|&j+Hs9_Ypn*Kt{Y*Am70Je5cSK4^y@fyGCU1LIi;WzJU~2sibchztzlyW+G%8V(==P~cG%xAyRBSJgDHtZrJ#DJcFZ z12X@G!j}a_l9LZ-k<$+%Idy;L^u6g*cc;vwKtb>^ z5J+XCKWVXC7iT({$nzNEC5gcsz6A5xMjzW#JY+@C`gt7{+*gTu;CWIemZf z)V+xlx5gzu+51by$t+Zj23Yw!76d(PcNw)}Wu(}{B%kDuLe?9AxVUxyB#>_6~h z&%R?#8}~GC+}Sv~y+j*d6`##7}-Zxs(IatuzThQE%^B}>#5U?Nt z5fat~s4hkdDyC3aQPDLB9Qd8UVMSHrH7{1a-( zT@aOkh`(nC6!I(7hLnI0LWDYXN&Cx#83aB+hLiWEf4MvL%k7DuZ@oBn^|9pmweh3Z z#=pP%`0(Y2hb}$XfA+?{Gh@h})7N&Ny1Mh1%R7F)wEd@xn~$G0*?R2kmZN939Qzd+ z{_fPsq0^&>f8B88%!cnzj~+QSa`@!%p{vVM3eJ3!<-u=gsz5^$u(9^y5 z=-{EBkiPGJ>^=BH&w&#r-TQv*-1|f4o)hi6k9Qz@PIT=(-nIWk$L^!;dydj}>^-I) zTJ*M^M_P9sp>@iR!$`~aLv7m+b?*AUW9O0f9fxVVxV!yO>y`t+0jtOSv2iBuS`F06s`UJ~&@uWLx#dUDccS)We6Z z`+!5$(6*wUp>=J2Mfd<60a5+Hl8$byM3NOC8pq-Q>B1(`v#A(Oeuxkz{#CvBxm0&&;9pM|Ss%ltW z9qm{Q;jlLD1Q7-YjzQCzNajW43o)FR;@3At$nxHyssXoDK}=zMAg>}g;5I;p1Yq2pmeOlk6Kp%KEeTKo7S_y>i$B2fQn?P^A-mz@qsF3K8~C76h=qkPpNc zQNLf&5Fc-;2=I&eWNHLI2aZn135!3MFv_ziN8YtTm=rRr*x{S znx7|7kgNb5gc~3t%tPkmL&%B{5ts~iBs0NM+IM~5v-f!4{vT+2_nknI&Sf{o>o4hH zZXG!IlSF2YX(I#soeUoMAtU>L9yH-oo&!s?(7+G@A9M*H5cDosvH!RP-O1h`ke)pb z5#75_(4y1Ry@z=GSQmWQeN;NFbXximm>{KxF}uE(0Eh1FhdUs~*6%vD9%$RNt7(LH z0c_x}ku`UhIqwGuqhZTk{%)Iww=|D#Yu~yTKGctHCpswa8D>IvB71(IZDnHf59758zeQ)ay;DGah z=dizSsq@J`38l}T?nfTv|0{OD_}BA0i|=Fn^oOB zi@jh_0T#poA;M9@i!R}ViB0{+2O%P6#gFh|VBb$p0yu~#iPnH1&%rL2-~-?S2#_Hu z9u6{p$bd*UAOHse0XP^RdUhN^;6v9o$A_-12Rb+JX~&-**~+VhSp9d1pfaa-7~#@U zf`xdmGk}G7A`ztyN<-Hq=yVgU7 z!Zy+c-feJec>}CG3J&d?cXVvs)v=SD3J%5xRvbb;U^U`HU_N>IU}Qjn43X5gSbP!kA#^18F}O5D2RKed z0T^}d&_;pLn~)K=j6kx z`Ya4aNL0=tn-7A6^C=02I*-&{H?eIF+@-uf&T!V z_*Kf2pp)1nYoUk(AS5L8A3{C=2M~cnj)(xb4e}hM(5fH*0nx1x9nml!6o?67Yj}}j z&kR3hG!PKF_J5z{B%Q)7dpo!6Zr!k*Iz3Q8wj41^py<-!VqgQo2zdZJhtjU~ zI1ln4fP>{6$Wpd!*nwN3v<87mMgIo!kdT4sfV`qB0ir|@ICMGBpnPF~-UK4t z)ZaaTUqNcZrLI3l5heE2)Hk)_3dD=GTCxbSn< zEXX$ax=JDoQV>u=h!7=kZ&ZE>C2$@L5uAS|U7cJaqJ-ekec*?lgFp6?k=y@+EEjV| zQIuhPkat19S-l3kVV)2D`;LcjK-Yl0t^btq0W1KArTG9NqM&2n5!Ut6a4;ZP$Po89 zCP0QX7U&TfXuEeDLb|qp=Mb@dfA{u%J=+~3IyUcW8Qmr?0!j!AVnqfQeIqWW=pF(N z2nh-o>O0pBYf`pOy3}hoc92^-Cp2^au{$R5!T1K^#`_Sy#pF zT@Jzu-~b9>g5V%NfD614JO{x+nTG@)925M(u=>qmFy&0lI}d*$ABZv#uwcRzFDC&e zhy#LyF(FiZe38WmhlrwLa*}GH1VE54VN0=LYJy>o2L6aVVZkoN2a&Esn9umWL#K9XtU1cEsUp#mH_LBxUMPH-4v(YHjc zM+9&P_~5*Pgg^rmZb|RmeJGTkUEfK%c6}%5+<6e`*m0oCOV5t|Ox#!~GdMMeOy!Lu zn^knj>_!xfxShg&zmLn9bqwG$Xpur>AlpDpLBfHU0-u4LgG)PX*x@{f-VrkMWFT35 zMDQF)IIxh%nw`tVjcnm<2qYXjcOB^3eNbTqZbRANNMTpsn%2%$&F#hqhXucFfqVjg z|0ZcZC>N2K$7P||z9kzC2$|3ze#wGX5?5C%Y3cYN(MayP0TRDp0TxWcm?FUk!V3Ng zcn&NpM0v%*k?P@%H6zY>01o&P+~OaIQGZ=gi@*o>R+n10v5W27+M5X+SgJ5MnCAc- z04ZSwzZ1L=k|5$klymTYPRNHWN~HJ@dJ#+e52~gJS2^W8h!3iM)c$KvgUkj@R}t;& z9#tJ3w%SjjmhBaPD=v)0Bf|Uvw|-ILHg_x6L2b6gt#SAwZ3Zwn3l897T>+iY12P~` z!hs^lFqnscqJHxt!YD(ILC6Pl1{@qf!Kd=!z|pP)M?3c)LktcmX+HE2UpW84g%}Ak z=+fYT67ga8A<;mH$Oi}bz@j0DAlUERv6tNk@S%O%ZrWB~mfXNW1U|>alOhrt9mu#~BoqJ?aycN+*pa5E@uP-TL_0m9%mkgIRnxV?GHu6Cb_gbYnP z_SS9LS-o*vL3jU}_HMASvZ)ObADr_LF&Q_$8;>SGyX@IRY>h<-o6FycUb zq`VMet?DPKF<=34d0n0P4an zBI(?HB$7RcC>cU14=}My=OpYtm=Fw*6#)cA(z-iR+IQ|npahORh-lfit7Y?!hE3aW zcbVFl(MST23M=ZSN?`yg>v6jSuyHJ1o&bv-%=|4i0^8 zwF1w9e+VDvXY1eV5J861^8p&@l!^ST$narBLj#iLL$s*j&z6A|E-M)C^TK%m3mNez z$678TqJ&?vG@pU0*8XmUVQ%Io&iDw$bb+j$SXi+^%F!Oz=5TF+y=-XA(|E(2vXc}XTzJCH*M#O z#E0gc`xyBUEG5rDX-U9BnguxEJaCp2uY7_JdIBHRXZarl!>POA#f@Z-e? z!2v!X@PTl{oQFWk!3W$4Whv#f=j4OJ3gd&ApgkAD1wFVEAcEim^AsNhhd<;4K;WQ; wY)ZI-2do=7@HFJ_C6q~&fr!tGN + +#if defined(_WIN32) +#include +#include +#include +#include +#include +#include +#include +#include + +#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID) + +#include +#include +#include +#include + +#endif + +#include "webrtc/common_types.h" +#include "webrtc/modules/include/module_common_types.h" +#include "webrtc/modules/utility/include/process_thread.h" +#include "webrtc/modules/video_render/video_render.h" +#include "webrtc/modules/video_render/video_render_defines.h" +#include "webrtc/system_wrappers/include/sleep.h" +#include "webrtc/system_wrappers/include/tick_util.h" +#include "webrtc/system_wrappers/include/trace.h" + +using namespace webrtc; + +void GetTestVideoFrame(VideoFrame* frame, uint8_t startColor); +int TestSingleStream(VideoRender* renderModule); +int TestFullscreenStream(VideoRender* &renderModule, + void* window, + const VideoRenderType videoRenderType); +int TestBitmapText(VideoRender* renderModule); +int TestMultipleStreams(VideoRender* renderModule); +int TestExternalRender(VideoRender* renderModule); + +#define TEST_FRAME_RATE 30 +#define TEST_TIME_SECOND 5 +#define TEST_FRAME_NUM (TEST_FRAME_RATE*TEST_TIME_SECOND) +#define TEST_STREAM0_START_COLOR 0 +#define TEST_STREAM1_START_COLOR 64 +#define TEST_STREAM2_START_COLOR 128 +#define TEST_STREAM3_START_COLOR 192 + +#if defined(WEBRTC_LINUX) + +#define GET_TIME_IN_MS timeGetTime() + +unsigned long timeGetTime() +{ + struct timeval tv; + struct timezone tz; + unsigned long val; + + gettimeofday(&tv, &tz); + val= tv.tv_sec*1000+ tv.tv_usec/1000; + return(val); +} + +#elif defined(WEBRTC_MAC) + +#include + +#define GET_TIME_IN_MS timeGetTime() + +unsigned long timeGetTime() +{ + return 0; +} + +#else + +#define GET_TIME_IN_MS ::timeGetTime() + +#endif + +using namespace std; + +#if defined(_WIN32) +LRESULT CALLBACK WebRtcWinProc( HWND hWnd,UINT uMsg,WPARAM wParam,LPARAM lParam) +{ + switch(uMsg) + { + case WM_DESTROY: + break; + case WM_COMMAND: + break; + } + return DefWindowProc(hWnd,uMsg,wParam,lParam); +} + +int WebRtcCreateWindow(HWND &hwndMain,int winNum, int width, int height) +{ + HINSTANCE hinst = GetModuleHandle(0); + WNDCLASSEX wcx; + wcx.hInstance = hinst; + wcx.lpszClassName = TEXT("VideoRenderTest"); + wcx.lpfnWndProc = (WNDPROC)WebRtcWinProc; + wcx.style = CS_DBLCLKS; + wcx.hIcon = LoadIcon (NULL, IDI_APPLICATION); + wcx.hIconSm = LoadIcon (NULL, IDI_APPLICATION); + wcx.hCursor = LoadCursor (NULL, IDC_ARROW); + wcx.lpszMenuName = NULL; + wcx.cbSize = sizeof (WNDCLASSEX); + wcx.cbClsExtra = 0; + wcx.cbWndExtra = 0; + wcx.hbrBackground = GetSysColorBrush(COLOR_3DFACE); + + // Register our window class with the operating system. + // If there is an error, exit program. + if ( !RegisterClassEx (&wcx) ) + { + MessageBox( 0, TEXT("Failed to register window class!"),TEXT("Error!"), MB_OK|MB_ICONERROR ); + return 0; + } + + // Create the main window. + hwndMain = CreateWindowEx( + 0, // no extended styles + TEXT("VideoRenderTest"), // class name + TEXT("VideoRenderTest Window"), // window name + WS_OVERLAPPED |WS_THICKFRAME, // overlapped window + 800, // horizontal position + 0, // vertical position + width, // width + height, // height + (HWND) NULL, // no parent or owner window + (HMENU) NULL, // class menu used + hinst, // instance handle + NULL); // no window creation data + + if (!hwndMain) + return -1; + + // Show the window using the flag specified by the program + // that started the application, and send the application + // a WM_PAINT message. + + ShowWindow(hwndMain, SW_SHOWDEFAULT); + UpdateWindow(hwndMain); + return 0; +} + +#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID) + +int WebRtcCreateWindow(Window *outWindow, Display **outDisplay, int winNum, int width, int height) // unsigned char* title, int titleLength) + +{ + int screen, xpos = 10, ypos = 10; + XEvent evnt; + XSetWindowAttributes xswa; // window attribute struct + XVisualInfo vinfo; // screen visual info struct + unsigned long mask; // attribute mask + + // get connection handle to xserver + Display* _display = XOpenDisplay( NULL ); + + // get screen number + screen = DefaultScreen(_display); + + // put desired visual info for the screen in vinfo + if( XMatchVisualInfo(_display, screen, 24, TrueColor, &vinfo) != 0 ) + { + //printf( "Screen visual info match!\n" ); + } + + // set window attributes + xswa.colormap = XCreateColormap(_display, DefaultRootWindow(_display), vinfo.visual, AllocNone); + xswa.event_mask = StructureNotifyMask | ExposureMask; + xswa.background_pixel = 0; + xswa.border_pixel = 0; + + // value mask for attributes + mask = CWBackPixel | CWBorderPixel | CWColormap | CWEventMask; + + switch( winNum ) + { + case 0: + xpos = 200; + ypos = 200; + break; + case 1: + xpos = 300; + ypos = 200; + break; + default: + break; + } + + // create a subwindow for parent (defroot) + Window _window = XCreateWindow(_display, DefaultRootWindow(_display), + xpos, ypos, + width, + height, + 0, vinfo.depth, + InputOutput, + vinfo.visual, + mask, &xswa); + + // Set window name + if( winNum == 0 ) + { + XStoreName(_display, _window, "VE MM Local Window"); + XSetIconName(_display, _window, "VE MM Local Window"); + } + else if( winNum == 1 ) + { + XStoreName(_display, _window, "VE MM Remote Window"); + XSetIconName(_display, _window, "VE MM Remote Window"); + } + + // make x report events for mask + XSelectInput(_display, _window, StructureNotifyMask); + + // map the window to the display + XMapWindow(_display, _window); + + // wait for map event + do + { + XNextEvent(_display, &evnt); + } + while (evnt.type != MapNotify || evnt.xmap.event != _window); + + *outWindow = _window; + *outDisplay = _display; + + return 0; +} +#endif // WEBRTC_LINUX + +// Note: Mac code is in testApi_mac.mm. + +class MyRenderCallback: public VideoRenderCallback +{ +public: + MyRenderCallback() : + _cnt(0) + { + } + ; + ~MyRenderCallback() + { + } + ; + virtual int32_t RenderFrame(const uint32_t streamId, + const VideoFrame& videoFrame) { + _cnt++; + if (_cnt % 100 == 0) + { + printf("Render callback %d \n",_cnt); + } + return 0; + } + int32_t _cnt; +}; + +void GetTestVideoFrame(VideoFrame* frame, uint8_t startColor) { + // changing color + static uint8_t color = startColor; + + memset(frame->buffer(kYPlane), color, frame->allocated_size(kYPlane)); + memset(frame->buffer(kUPlane), color, frame->allocated_size(kUPlane)); + memset(frame->buffer(kVPlane), color, frame->allocated_size(kVPlane)); + + ++color; +} + +int TestSingleStream(VideoRender* renderModule) { + int error = 0; + // Add settings for a stream to render + printf("Add stream 0 to entire window\n"); + const int streamId0 = 0; + VideoRenderCallback* renderCallback0 = renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 1.0f, 1.0f); + assert(renderCallback0 != NULL); + + printf("Start render\n"); + error = renderModule->StartRender(streamId0); + if (error != 0) { + // TODO(phoglund): This test will not work if compiled in release mode. + // This rather silly construct here is to avoid compilation errors when + // compiling in release. Release => no asserts => unused 'error' variable. + assert(false); + } + + // Loop through an I420 file and render each frame + const int width = 352; + const int half_width = (width + 1) / 2; + const int height = 288; + + VideoFrame videoFrame0; + videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width); + + const uint32_t renderDelayMs = 500; + + for (int i=0; iRenderFrame(streamId0, videoFrame0); + SleepMs(1000/TEST_FRAME_RATE); + } + + + // Shut down + printf("Closing...\n"); + error = renderModule->StopRender(streamId0); + assert(error == 0); + + error = renderModule->DeleteIncomingRenderStream(streamId0); + assert(error == 0); + + return 0; +} + +int TestFullscreenStream(VideoRender* &renderModule, + void* window, + const VideoRenderType videoRenderType) { + VideoRender::DestroyVideoRender(renderModule); + renderModule = VideoRender::CreateVideoRender(12345, window, true, videoRenderType); + + TestSingleStream(renderModule); + + VideoRender::DestroyVideoRender(renderModule); + renderModule = VideoRender::CreateVideoRender(12345, window, false, videoRenderType); + + return 0; +} + +int TestBitmapText(VideoRender* renderModule) { +#if defined(WIN32) + + int error = 0; + // Add settings for a stream to render + printf("Add stream 0 to entire window\n"); + const int streamId0 = 0; + VideoRenderCallback* renderCallback0 = renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 1.0f, 1.0f); + assert(renderCallback0 != NULL); + + printf("Adding Bitmap\n"); + DDCOLORKEY ColorKey; // black + ColorKey.dwColorSpaceHighValue = RGB(0, 0, 0); + ColorKey.dwColorSpaceLowValue = RGB(0, 0, 0); + HBITMAP hbm = (HBITMAP)LoadImage(NULL, + (LPCTSTR)_T("renderStartImage.bmp"), + IMAGE_BITMAP, 0, 0, LR_LOADFROMFILE); + renderModule->SetBitmap(hbm, 0, &ColorKey, 0.0f, 0.0f, 0.3f, + 0.3f); + + printf("Adding Text\n"); + renderModule->SetText(1, (uint8_t*) "WebRtc Render Demo App", 20, + RGB(255, 0, 0), RGB(0, 0, 0), 0.25f, 0.1f, 1.0f, + 1.0f); + + printf("Start render\n"); + error = renderModule->StartRender(streamId0); + assert(error == 0); + + // Loop through an I420 file and render each frame + const int width = 352; + const int half_width = (width + 1) / 2; + const int height = 288; + + VideoFrame videoFrame0; + videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width); + + const uint32_t renderDelayMs = 500; + + for (int i=0; iRenderFrame(streamId0, videoFrame0); + SleepMs(1000/TEST_FRAME_RATE); + } + // Sleep and let all frames be rendered before closing + SleepMs(renderDelayMs*2); + + + // Shut down + printf("Closing...\n"); + ColorKey.dwColorSpaceHighValue = RGB(0,0,0); + ColorKey.dwColorSpaceLowValue = RGB(0,0,0); + renderModule->SetBitmap(NULL, 0, &ColorKey, 0.0f, 0.0f, 0.0f, 0.0f); + renderModule->SetText(1, NULL, 20, RGB(255,255,255), + RGB(0,0,0), 0.0f, 0.0f, 0.0f, 0.0f); + + error = renderModule->StopRender(streamId0); + assert(error == 0); + + error = renderModule->DeleteIncomingRenderStream(streamId0); + assert(error == 0); +#endif + + return 0; +} + +int TestMultipleStreams(VideoRender* renderModule) { + int error = 0; + + // Add settings for a stream to render + printf("Add stream 0\n"); + const int streamId0 = 0; + VideoRenderCallback* renderCallback0 = + renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 0.45f, 0.45f); + assert(renderCallback0 != NULL); + printf("Add stream 1\n"); + const int streamId1 = 1; + VideoRenderCallback* renderCallback1 = + renderModule->AddIncomingRenderStream(streamId1, 0, 0.55f, 0.0f, 1.0f, 0.45f); + assert(renderCallback1 != NULL); + printf("Add stream 2\n"); + const int streamId2 = 2; + VideoRenderCallback* renderCallback2 = + renderModule->AddIncomingRenderStream(streamId2, 0, 0.0f, 0.55f, 0.45f, 1.0f); + assert(renderCallback2 != NULL); + printf("Add stream 3\n"); + const int streamId3 = 3; + VideoRenderCallback* renderCallback3 = + renderModule->AddIncomingRenderStream(streamId3, 0, 0.55f, 0.55f, 1.0f, 1.0f); + assert(renderCallback3 != NULL); + error = renderModule->StartRender(streamId0); + if (error != 0) { + // TODO(phoglund): This test will not work if compiled in release mode. + // This rather silly construct here is to avoid compilation errors when + // compiling in release. Release => no asserts => unused 'error' variable. + assert(false); + } + error = renderModule->StartRender(streamId1); + assert(error == 0); + error = renderModule->StartRender(streamId2); + assert(error == 0); + error = renderModule->StartRender(streamId3); + assert(error == 0); + + // Loop through an I420 file and render each frame + const int width = 352; + const int half_width = (width + 1) / 2; + const int height = 288; + + VideoFrame videoFrame0; + videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width); + VideoFrame videoFrame1; + videoFrame1.CreateEmptyFrame(width, height, width, half_width, half_width); + VideoFrame videoFrame2; + videoFrame2.CreateEmptyFrame(width, height, width, half_width, half_width); + VideoFrame videoFrame3; + videoFrame3.CreateEmptyFrame(width, height, width, half_width, half_width); + + const uint32_t renderDelayMs = 500; + + // Render frames with the specified delay. + for (int i=0; iRenderFrame(streamId0, videoFrame0); + + GetTestVideoFrame(&videoFrame1, TEST_STREAM1_START_COLOR); + videoFrame1.set_render_time_ms(TickTime::MillisecondTimestamp() + + renderDelayMs); + renderCallback1->RenderFrame(streamId1, videoFrame1); + + GetTestVideoFrame(&videoFrame2, TEST_STREAM2_START_COLOR); + videoFrame2.set_render_time_ms(TickTime::MillisecondTimestamp() + + renderDelayMs); + renderCallback2->RenderFrame(streamId2, videoFrame2); + + GetTestVideoFrame(&videoFrame3, TEST_STREAM3_START_COLOR); + videoFrame3.set_render_time_ms(TickTime::MillisecondTimestamp() + + renderDelayMs); + renderCallback3->RenderFrame(streamId3, videoFrame3); + + SleepMs(1000/TEST_FRAME_RATE); + } + + // Shut down + printf("Closing...\n"); + error = renderModule->StopRender(streamId0); + assert(error == 0); + error = renderModule->DeleteIncomingRenderStream(streamId0); + assert(error == 0); + error = renderModule->StopRender(streamId1); + assert(error == 0); + error = renderModule->DeleteIncomingRenderStream(streamId1); + assert(error == 0); + error = renderModule->StopRender(streamId2); + assert(error == 0); + error = renderModule->DeleteIncomingRenderStream(streamId2); + assert(error == 0); + error = renderModule->StopRender(streamId3); + assert(error == 0); + error = renderModule->DeleteIncomingRenderStream(streamId3); + assert(error == 0); + + return 0; +} + +int TestExternalRender(VideoRender* renderModule) { + int error = 0; + MyRenderCallback *externalRender = new MyRenderCallback(); + + const int streamId0 = 0; + VideoRenderCallback* renderCallback0 = + renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, + 1.0f, 1.0f); + assert(renderCallback0 != NULL); + error = renderModule->AddExternalRenderCallback(streamId0, externalRender); + if (error != 0) { + // TODO(phoglund): This test will not work if compiled in release mode. + // This rather silly construct here is to avoid compilation errors when + // compiling in release. Release => no asserts => unused 'error' variable. + assert(false); + } + + error = renderModule->StartRender(streamId0); + assert(error == 0); + + const int width = 352; + const int half_width = (width + 1) / 2; + const int height = 288; + VideoFrame videoFrame0; + videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width); + + const uint32_t renderDelayMs = 500; + int frameCount = TEST_FRAME_NUM; + for (int i=0; iRenderFrame(streamId0, videoFrame0); + SleepMs(33); + } + + // Sleep and let all frames be rendered before closing + SleepMs(2*renderDelayMs); + + // Shut down + printf("Closing...\n"); + error = renderModule->StopRender(streamId0); + assert(error == 0); + error = renderModule->DeleteIncomingRenderStream(streamId0); + assert(error == 0); + assert(frameCount == externalRender->_cnt); + + delete externalRender; + externalRender = NULL; + + return 0; +} + +void RunVideoRenderTests(void* window, VideoRenderType windowType) { + int myId = 12345; + + // Create the render module + printf("Create render module\n"); + VideoRender* renderModule = NULL; + renderModule = VideoRender::CreateVideoRender(myId, + window, + false, + windowType); + assert(renderModule != NULL); + + // ##### Test single stream rendering #### + printf("#### TestSingleStream ####\n"); + if (TestSingleStream(renderModule) != 0) { + printf ("TestSingleStream failed\n"); + } + + // ##### Test fullscreen rendering #### + printf("#### TestFullscreenStream ####\n"); + if (TestFullscreenStream(renderModule, window, windowType) != 0) { + printf ("TestFullscreenStream failed\n"); + } + + // ##### Test bitmap and text #### + printf("#### TestBitmapText ####\n"); + if (TestBitmapText(renderModule) != 0) { + printf ("TestBitmapText failed\n"); + } + + // ##### Test multiple streams #### + printf("#### TestMultipleStreams ####\n"); + if (TestMultipleStreams(renderModule) != 0) { + printf ("TestMultipleStreams failed\n"); + } + + // ##### Test multiple streams #### + printf("#### TestExternalRender ####\n"); + if (TestExternalRender(renderModule) != 0) { + printf ("TestExternalRender failed\n"); + } + + delete renderModule; + renderModule = NULL; + + printf("VideoRender unit tests passed.\n"); +} + +// Note: The Mac main is implemented in testApi_mac.mm. +#if defined(_WIN32) +int _tmain(int argc, _TCHAR* argv[]) +#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID) +int main(int argc, char* argv[]) +#endif +#if !defined(WEBRTC_MAC) && !defined(WEBRTC_ANDROID) +{ + // Create a window for testing. + void* window = NULL; +#if defined (_WIN32) + HWND testHwnd; + WebRtcCreateWindow(testHwnd, 0, 352, 288); + window = (void*)testHwnd; + VideoRenderType windowType = kRenderWindows; +#elif defined(WEBRTC_LINUX) + Window testWindow; + Display* display; + WebRtcCreateWindow(&testWindow, &display, 0, 352, 288); + VideoRenderType windowType = kRenderX11; + window = (void*)testWindow; +#endif // WEBRTC_LINUX + + RunVideoRenderTests(window, windowType); + return 0; +} +#endif // !WEBRTC_MAC diff --git a/webrtc/modules/video_render/test/testAPI/testAPI.h b/webrtc/modules/video_render/test/testAPI/testAPI.h new file mode 100644 index 0000000000..0655a5b434 --- /dev/null +++ b/webrtc/modules/video_render/test/testAPI/testAPI.h @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H + +#include "webrtc/modules/video_render/video_render_defines.h" + +void RunVideoRenderTests(void* window, webrtc::VideoRenderType windowType); + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H diff --git a/webrtc/modules/video_render/test/testAPI/testAPI_android.cc b/webrtc/modules/video_render/test/testAPI/testAPI_android.cc new file mode 100644 index 0000000000..c62a62f39a --- /dev/null +++ b/webrtc/modules/video_render/test/testAPI/testAPI_android.cc @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +int main(int argc, char* argv[]) { + // TODO(leozwang): Video render test app is not ready on android, + // make it dummy test now, will add android specific tests + return 0; +} diff --git a/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm b/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm new file mode 100644 index 0000000000..dfee4c7298 --- /dev/null +++ b/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "testAPI.h" + +#include + +#import +#import +#import +#import +#include + +#import "webrtc/modules/video_render/mac/cocoa_render_view.h" +#include "webrtc/common_types.h" +#include "webrtc/modules/include/module_common_types.h" +#include "webrtc/modules/utility/include/process_thread.h" +#include "webrtc/modules/video_render/video_render.h" +#include "webrtc/modules/video_render/video_render_defines.h" +#include "webrtc/system_wrappers/include/tick_util.h" +#include "webrtc/system_wrappers/include/trace.h" + +using namespace webrtc; + +int WebRtcCreateWindow(CocoaRenderView*& cocoaRenderer, int winNum, int width, int height) +{ + // In Cocoa, rendering is not done directly to a window like in Windows and Linux. + // It is rendererd to a Subclass of NSOpenGLView + + // create cocoa container window + NSRect outWindowFrame = NSMakeRect(200, 800, width + 20, height + 20); + NSWindow* outWindow = [[NSWindow alloc] initWithContentRect:outWindowFrame + styleMask:NSTitledWindowMask + backing:NSBackingStoreBuffered + defer:NO]; + [outWindow orderOut:nil]; + [outWindow setTitle:@"Cocoa Renderer"]; + [outWindow setBackgroundColor:[NSColor blueColor]]; + + // create renderer and attach to window + NSRect cocoaRendererFrame = NSMakeRect(10, 10, width, height); + cocoaRenderer = [[CocoaRenderView alloc] initWithFrame:cocoaRendererFrame]; + [[outWindow contentView] addSubview:(NSView*)cocoaRenderer]; + + [outWindow makeKeyAndOrderFront:NSApp]; + + return 0; +} + +int main (int argc, const char * argv[]) { + NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init]; + [NSApplication sharedApplication]; + + CocoaRenderView* testWindow; + WebRtcCreateWindow(testWindow, 0, 352, 288); + VideoRenderType windowType = kRenderCocoa; + void* window = (void*)testWindow; + + RunVideoRenderTests(window, windowType); + + [pool release]; +} diff --git a/webrtc/modules/video_render/video_render.gypi b/webrtc/modules/video_render/video_render.gypi new file mode 100644 index 0000000000..e8cc03a4b0 --- /dev/null +++ b/webrtc/modules/video_render/video_render.gypi @@ -0,0 +1,218 @@ +# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +{ + 'targets': [ + { + # Note this library is missing an implementation for the video render. + # For that targets must link with 'video_render' or + # 'video_render_module_internal_impl' if they want to compile and use + # the internal render as the default renderer. + 'target_name': 'video_render_module', + 'type': 'static_library', + 'dependencies': [ + 'webrtc_utility', + '<(webrtc_root)/common.gyp:webrtc_common', + '<(webrtc_root)/common_video/common_video.gyp:common_video', + '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers', + ], + 'sources': [ + 'external/video_render_external_impl.cc', + 'external/video_render_external_impl.h', + 'i_video_render.h', + 'video_render.h', + 'video_render_defines.h', + 'video_render_impl.h', + ], + }, + { + # Default video_render_module implementation that only supports external + # renders. + 'target_name': 'video_render', + 'type': 'static_library', + 'dependencies': [ + 'video_render_module', + ], + 'sources': [ + 'video_render_impl.cc', + ], + }, + ], # targets + + 'conditions': [ + ['build_with_chromium==0', { + 'targets': [ + { + # video_render_module implementation that supports the internal + # video_render implementation. + 'target_name': 'video_render_module_internal_impl', + 'type': 'static_library', + 'dependencies': [ + '<(webrtc_root)/common.gyp:webrtc_common', + 'video_render_module', + ], + 'sources': [ + 'video_render_internal_impl.cc', + ], + # TODO(andrew): with the proper suffix, these files will be excluded + # automatically. + 'conditions': [ + ['OS=="android"', { + 'sources': [ + 'android/video_render_android_impl.h', + 'android/video_render_android_native_opengl2.h', + 'android/video_render_android_surface_view.h', + 'android/video_render_opengles20.h', + 'android/video_render_android_impl.cc', + 'android/video_render_android_native_opengl2.cc', + 'android/video_render_android_surface_view.cc', + 'android/video_render_opengles20.cc', + ], + 'link_settings': { + 'libraries': [ + '-lGLESv2', + ], + }, + }], + ['OS=="ios"', { + 'sources': [ + # iOS + 'ios/open_gles20.h', + 'ios/open_gles20.mm', + 'ios/video_render_ios_channel.h', + 'ios/video_render_ios_channel.mm', + 'ios/video_render_ios_gles20.h', + 'ios/video_render_ios_gles20.mm', + 'ios/video_render_ios_impl.h', + 'ios/video_render_ios_impl.mm', + 'ios/video_render_ios_view.h', + 'ios/video_render_ios_view.mm', + ], + 'xcode_settings': { + 'CLANG_ENABLE_OBJC_ARC': 'YES', + }, + 'all_dependent_settings': { + 'xcode_settings': { + 'OTHER_LDFLAGS': [ + '-framework OpenGLES', + '-framework QuartzCore', + '-framework UIKit', + ], + }, + }, + }], + ['OS=="linux"', { + 'sources': [ + 'linux/video_render_linux_impl.h', + 'linux/video_x11_channel.h', + 'linux/video_x11_render.h', + 'linux/video_render_linux_impl.cc', + 'linux/video_x11_channel.cc', + 'linux/video_x11_render.cc', + ], + 'link_settings': { + 'libraries': [ + '-lXext', + ], + }, + }], + ['OS=="mac"', { + 'sources': [ + 'mac/cocoa_full_screen_window.h', + 'mac/cocoa_render_view.h', + 'mac/video_render_agl.h', + 'mac/video_render_mac_carbon_impl.h', + 'mac/video_render_mac_cocoa_impl.h', + 'mac/video_render_nsopengl.h', + 'mac/video_render_nsopengl.mm', + 'mac/video_render_mac_cocoa_impl.mm', + 'mac/video_render_agl.cc', + 'mac/video_render_mac_carbon_impl.cc', + 'mac/cocoa_render_view.mm', + 'mac/cocoa_full_screen_window.mm', + ], + }], + ['OS=="win"', { + 'sources': [ + 'windows/i_video_render_win.h', + 'windows/video_render_direct3d9.h', + 'windows/video_render_windows_impl.h', + 'windows/video_render_direct3d9.cc', + 'windows/video_render_windows_impl.cc', + ], + 'include_dirs': [ + '<(directx_sdk_path)/Include', + ], + }], + ['OS=="win" and clang==1', { + 'msvs_settings': { + 'VCCLCompilerTool': { + 'AdditionalOptions': [ + # Disable warnings failing when compiling with Clang on Windows. + # https://bugs.chromium.org/p/webrtc/issues/detail?id=5366 + '-Wno-comment', + '-Wno-reorder', + '-Wno-unused-value', + '-Wno-unused-private-field', + ], + }, + }, + }], + ] # conditions + }, + ], + }], # build_with_chromium==0 + ['include_tests==1 and OS!="ios"', { + 'targets': [ + { + # Does not compile on iOS: webrtc:4755. + 'target_name': 'video_render_tests', + 'type': 'executable', + 'dependencies': [ + 'video_render_module_internal_impl', + 'webrtc_utility', + '<(webrtc_root)/common.gyp:webrtc_common', + '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers', + '<(webrtc_root)/common_video/common_video.gyp:common_video', + ], + 'sources': [ + 'test/testAPI/testAPI.cc', + 'test/testAPI/testAPI.h', + 'test/testAPI/testAPI_android.cc', + 'test/testAPI/testAPI_mac.mm', + ], + 'conditions': [ + ['OS=="mac" or OS=="linux"', { + 'cflags': [ + '-Wno-write-strings', + ], + 'ldflags': [ + '-lpthread -lm', + ], + }], + ['OS=="linux"', { + 'link_settings': { + 'libraries': [ + '-lX11', + ], + }, + }], + ['OS=="mac"', { + 'xcode_settings': { + 'OTHER_LDFLAGS': [ + '-framework Foundation -framework AppKit -framework Cocoa -framework OpenGL', + ], + }, + }], + ] # conditions + }, # video_render_module_test + ], # targets + }], # include_tests==1 and OS!=ios + ], # conditions +} + diff --git a/webrtc/modules/video_render/video_render.h b/webrtc/modules/video_render/video_render.h new file mode 100644 index 0000000000..a24acb9e7a --- /dev/null +++ b/webrtc/modules/video_render/video_render.h @@ -0,0 +1,255 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_ + +/* + * video_render.h + * + * This header file together with module.h and module_common_types.h + * contains all of the APIs that are needed for using the video render + * module class. + * + */ + +#include "webrtc/modules/include/module.h" +#include "webrtc/modules/video_render/video_render_defines.h" + +namespace webrtc { + +// Class definitions +class VideoRender: public Module +{ +public: + /* + * Create a video render module object + * + * id - unique identifier of this video render module object + * window - pointer to the window to render to + * fullscreen - true if this is a fullscreen renderer + * videoRenderType - type of renderer to create + */ + static VideoRender + * CreateVideoRender( + const int32_t id, + void* window, + const bool fullscreen, + const VideoRenderType videoRenderType = + kRenderDefault); + + /* + * Destroy a video render module object + * + * module - object to destroy + */ + static void DestroyVideoRender(VideoRender* module); + + int64_t TimeUntilNextProcess() override = 0; + void Process() override = 0; + + /************************************************************************** + * + * Window functions + * + ***************************************************************************/ + + /* + * Get window for this renderer + */ + virtual void* Window() = 0; + + /* + * Change render window + * + * window - the new render window, assuming same type as originally created. + */ + virtual int32_t ChangeWindow(void* window) = 0; + + /************************************************************************** + * + * Incoming Streams + * + ***************************************************************************/ + + /* + * Add incoming render stream + * + * streamID - id of the stream to add + * zOrder - relative render order for the streams, 0 = on top + * left - position of the stream in the window, [0.0f, 1.0f] + * top - position of the stream in the window, [0.0f, 1.0f] + * right - position of the stream in the window, [0.0f, 1.0f] + * bottom - position of the stream in the window, [0.0f, 1.0f] + * + * Return - callback class to use for delivering new frames to render. + */ + virtual VideoRenderCallback + * AddIncomingRenderStream(const uint32_t streamId, + const uint32_t zOrder, + const float left, const float top, + const float right, const float bottom) = 0; + /* + * Delete incoming render stream + * + * streamID - id of the stream to add + */ + virtual int32_t + DeleteIncomingRenderStream(const uint32_t streamId) = 0; + + /* + * Add incoming render callback, used for external rendering + * + * streamID - id of the stream the callback is used for + * renderObject - the VideoRenderCallback to use for this stream, NULL to remove + * + * Return - callback class to use for delivering new frames to render. + */ + virtual int32_t + AddExternalRenderCallback(const uint32_t streamId, + VideoRenderCallback* renderObject) = 0; + + /* + * Get the porperties for an incoming render stream + * + * streamID - [in] id of the stream to get properties for + * zOrder - [out] relative render order for the streams, 0 = on top + * left - [out] position of the stream in the window, [0.0f, 1.0f] + * top - [out] position of the stream in the window, [0.0f, 1.0f] + * right - [out] position of the stream in the window, [0.0f, 1.0f] + * bottom - [out] position of the stream in the window, [0.0f, 1.0f] + */ + virtual int32_t + GetIncomingRenderStreamProperties(const uint32_t streamId, + uint32_t& zOrder, + float& left, float& top, + float& right, float& bottom) const = 0; + /* + * The incoming frame rate to the module, not the rate rendered in the window. + */ + virtual uint32_t + GetIncomingFrameRate(const uint32_t streamId) = 0; + + /* + * Returns the number of incoming streams added to this render module + */ + virtual uint32_t GetNumIncomingRenderStreams() const = 0; + + /* + * Returns true if this render module has the streamId added, false otherwise. + */ + virtual bool + HasIncomingRenderStream(const uint32_t streamId) const = 0; + + /* + * Registers a callback to get raw images in the same time as sent + * to the renderer. To be used for external rendering. + */ + virtual int32_t + RegisterRawFrameCallback(const uint32_t streamId, + VideoRenderCallback* callbackObj) = 0; + + /************************************************************************** + * + * Start/Stop + * + ***************************************************************************/ + + /* + * Starts rendering the specified stream + */ + virtual int32_t StartRender(const uint32_t streamId) = 0; + + /* + * Stops the renderer + */ + virtual int32_t StopRender(const uint32_t streamId) = 0; + + /* + * Resets the renderer + * No streams are removed. The state should be as after AddStream was called. + */ + virtual int32_t ResetRender() = 0; + + /************************************************************************** + * + * Properties + * + ***************************************************************************/ + + /* + * Returns the preferred render video type + */ + virtual RawVideoType PreferredVideoType() const = 0; + + /* + * Returns true if the renderer is in fullscreen mode, otherwise false. + */ + virtual bool IsFullScreen() = 0; + + /* + * Gets screen resolution in pixels + */ + virtual int32_t + GetScreenResolution(uint32_t& screenWidth, + uint32_t& screenHeight) const = 0; + + /* + * Get the actual render rate for this stream. I.e rendered frame rate, + * not frames delivered to the renderer. + */ + virtual uint32_t RenderFrameRate(const uint32_t streamId) = 0; + + /* + * Set cropping of incoming stream + */ + virtual int32_t SetStreamCropping(const uint32_t streamId, + const float left, + const float top, + const float right, + const float bottom) = 0; + + /* + * re-configure renderer + */ + + // Set the expected time needed by the graphics card or external renderer, + // i.e. frames will be released for rendering |delay_ms| before set render + // time in the video frame. + virtual int32_t SetExpectedRenderDelay(uint32_t stream_id, + int32_t delay_ms) = 0; + + virtual int32_t ConfigureRenderer(const uint32_t streamId, + const unsigned int zOrder, + const float left, + const float top, + const float right, + const float bottom) = 0; + + virtual int32_t SetTransparentBackground(const bool enable) = 0; + + virtual int32_t FullScreenRender(void* window, const bool enable) = 0; + + virtual int32_t SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, const float top, + const float right, const float bottom) = 0; + + virtual int32_t SetText(const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, const float top, + const float right, const float bottom) = 0; +}; +} // namespace webrtc +#endif // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_ diff --git a/webrtc/modules/video_render/video_render_defines.h b/webrtc/modules/video_render/video_render_defines.h new file mode 100644 index 0000000000..999707cb6e --- /dev/null +++ b/webrtc/modules/video_render/video_render_defines.h @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_ + +#include "webrtc/common_types.h" +#include "webrtc/common_video/include/incoming_video_stream.h" +#include "webrtc/modules/include/module_common_types.h" + +namespace webrtc +{ +// Defines +#ifndef NULL +#define NULL 0 +#endif + +// Enums +enum VideoRenderType +{ + kRenderExternal = 0, // External + kRenderWindows = 1, // Windows + kRenderCocoa = 2, // Mac + kRenderCarbon = 3, + kRenderiOS = 4, // iPhone + kRenderAndroid = 5, // Android + kRenderX11 = 6, // Linux + kRenderDefault +}; + +// Runtime errors +enum VideoRenderError +{ + kRenderShutDown = 0, + kRenderPerformanceAlarm = 1 +}; + +// Feedback class to be implemented by module user +class VideoRenderFeedback +{ +public: + virtual void OnRenderError(const int32_t streamId, + const VideoRenderError error) = 0; + +protected: + virtual ~VideoRenderFeedback() + { + } +}; + +// Mobile enums +enum StretchMode +{ + kStretchToInsideEdge = 1, + kStretchToOutsideEdge = 2, + kStretchMatchWidth = 3, + kStretchMatchHeight = 4, + kStretchNone = 5 +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_ diff --git a/webrtc/modules/video_render/video_render_impl.cc b/webrtc/modules/video_render/video_render_impl.cc new file mode 100644 index 0000000000..f3d12dce8f --- /dev/null +++ b/webrtc/modules/video_render/video_render_impl.cc @@ -0,0 +1,550 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "webrtc/common_video/include/incoming_video_stream.h" +#include "webrtc/engine_configurations.h" +#include "webrtc/modules/video_render/external/video_render_external_impl.h" +#include "webrtc/modules/video_render/i_video_render.h" +#include "webrtc/modules/video_render/video_render_defines.h" +#include "webrtc/modules/video_render/video_render_impl.h" +#include "webrtc/system_wrappers/include/critical_section_wrapper.h" +#include "webrtc/system_wrappers/include/trace.h" + +namespace webrtc { + +VideoRender* +VideoRender::CreateVideoRender(const int32_t id, + void* window, + const bool fullscreen, + const VideoRenderType videoRenderType/*=kRenderDefault*/) +{ + VideoRenderType resultVideoRenderType = videoRenderType; + if (videoRenderType == kRenderDefault) + { + resultVideoRenderType = kRenderExternal; + } + return new ModuleVideoRenderImpl(id, resultVideoRenderType, window, + fullscreen); +} + +void VideoRender::DestroyVideoRender( + VideoRender* module) +{ + if (module) + { + delete module; + } +} + +ModuleVideoRenderImpl::ModuleVideoRenderImpl( + const int32_t id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen) : + _id(id), _moduleCrit(*CriticalSectionWrapper::CreateCriticalSection()), + _ptrWindow(window), _fullScreen(fullscreen), _ptrRenderer(NULL) +{ + + // Create platform specific renderer + switch (videoRenderType) + { + case kRenderExternal: + { + VideoRenderExternalImpl* ptrRenderer(NULL); + ptrRenderer = new VideoRenderExternalImpl(_id, videoRenderType, + window, _fullScreen); + if (ptrRenderer) + { + _ptrRenderer = reinterpret_cast (ptrRenderer); + } + } + break; + default: + // Error... + break; + } + if (_ptrRenderer) + { + if (_ptrRenderer->Init() == -1) + { + } + } +} + +ModuleVideoRenderImpl::~ModuleVideoRenderImpl() +{ + delete &_moduleCrit; + + for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin(); + it != _streamRenderMap.end(); + ++it) { + delete it->second; + } + + // Delete platform specific renderer + if (_ptrRenderer) + { + VideoRenderType videoRenderType = _ptrRenderer->RenderType(); + + switch (videoRenderType) + { + case kRenderExternal: + { + VideoRenderExternalImpl + * ptrRenderer = + reinterpret_cast (_ptrRenderer); + _ptrRenderer = NULL; + delete ptrRenderer; + } + break; + + default: + // Error... + break; + } + } +} + +int64_t ModuleVideoRenderImpl::TimeUntilNextProcess() +{ + // Not used + return 50; +} +void ModuleVideoRenderImpl::Process() {} + +void* +ModuleVideoRenderImpl::Window() +{ + CriticalSectionScoped cs(&_moduleCrit); + return _ptrWindow; +} + +int32_t ModuleVideoRenderImpl::ChangeWindow(void* window) +{ + return -1; +} + +int32_t ModuleVideoRenderImpl::Id() +{ + CriticalSectionScoped cs(&_moduleCrit); + return _id; +} + +uint32_t ModuleVideoRenderImpl::GetIncomingFrameRate(const uint32_t streamId) { + CriticalSectionScoped cs(&_moduleCrit); + + IncomingVideoStreamMap::iterator it = _streamRenderMap.find(streamId); + + if (it == _streamRenderMap.end()) { + // This stream doesn't exist + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: stream doesn't exist", + __FUNCTION__); + return 0; + } + assert(it->second != NULL); + return it->second->IncomingRate(); +} + +VideoRenderCallback* +ModuleVideoRenderImpl::AddIncomingRenderStream(const uint32_t streamId, + const uint32_t zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return NULL; + } + + if (_streamRenderMap.find(streamId) != _streamRenderMap.end()) { + // The stream already exists... + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: stream already exists", __FUNCTION__); + return NULL; + } + + VideoRenderCallback* ptrRenderCallback = + _ptrRenderer->AddIncomingRenderStream(streamId, zOrder, left, top, + right, bottom); + if (ptrRenderCallback == NULL) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Can't create incoming stream in renderer", + __FUNCTION__); + return NULL; + } + + // Create platform independant code + IncomingVideoStream* ptrIncomingStream = + new IncomingVideoStream(streamId, false); + ptrIncomingStream->SetRenderCallback(ptrRenderCallback); + VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback(); + + // Store the stream + _streamRenderMap[streamId] = ptrIncomingStream; + + return moduleCallback; +} + +int32_t ModuleVideoRenderImpl::DeleteIncomingRenderStream( + const uint32_t streamId) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return -1; + } + + IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId); + if (item == _streamRenderMap.end()) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: stream doesn't exist", __FUNCTION__); + return -1; + } + + delete item->second; + + _ptrRenderer->DeleteIncomingRenderStream(streamId); + + _streamRenderMap.erase(item); + + return 0; +} + +int32_t ModuleVideoRenderImpl::AddExternalRenderCallback( + const uint32_t streamId, + VideoRenderCallback* renderObject) { + CriticalSectionScoped cs(&_moduleCrit); + + IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId); + + if (item == _streamRenderMap.end()) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: stream doesn't exist", __FUNCTION__); + return -1; + } + + if (item->second == NULL) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: could not get stream", __FUNCTION__); + return -1; + } + item->second->SetExternalCallback(renderObject); + return 0; +} + +int32_t ModuleVideoRenderImpl::GetIncomingRenderStreamProperties( + const uint32_t streamId, + uint32_t& zOrder, + float& left, + float& top, + float& right, + float& bottom) const { + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return -1; + } + + return _ptrRenderer->GetIncomingRenderStreamProperties(streamId, zOrder, + left, top, right, + bottom); +} + +uint32_t ModuleVideoRenderImpl::GetNumIncomingRenderStreams() const +{ + CriticalSectionScoped cs(&_moduleCrit); + + return static_cast(_streamRenderMap.size()); +} + +bool ModuleVideoRenderImpl::HasIncomingRenderStream( + const uint32_t streamId) const { + CriticalSectionScoped cs(&_moduleCrit); + + return _streamRenderMap.find(streamId) != _streamRenderMap.end(); +} + +int32_t ModuleVideoRenderImpl::RegisterRawFrameCallback( + const uint32_t streamId, + VideoRenderCallback* callbackObj) { + return -1; +} + +int32_t ModuleVideoRenderImpl::StartRender(const uint32_t streamId) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return -1; + } + + // Start the stream + IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId); + + if (item == _streamRenderMap.end()) + { + return -1; + } + + if (item->second->Start() == -1) + { + return -1; + } + + // Start the HW renderer + if (_ptrRenderer->StartRender() == -1) + { + return -1; + } + return 0; +} + +int32_t ModuleVideoRenderImpl::StopRender(const uint32_t streamId) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s(%d): No renderer", __FUNCTION__, streamId); + return -1; + } + + // Stop the incoming stream + IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId); + + if (item == _streamRenderMap.end()) + { + return -1; + } + + if (item->second->Stop() == -1) + { + return -1; + } + + return 0; +} + +int32_t ModuleVideoRenderImpl::ResetRender() +{ + CriticalSectionScoped cs(&_moduleCrit); + + int32_t ret = 0; + // Loop through all incoming streams and reset them + for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin(); + it != _streamRenderMap.end(); + ++it) { + if (it->second->Reset() == -1) + ret = -1; + } + return ret; +} + +RawVideoType ModuleVideoRenderImpl::PreferredVideoType() const +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (_ptrRenderer == NULL) + { + return kVideoI420; + } + + return _ptrRenderer->PerferedVideoType(); +} + +bool ModuleVideoRenderImpl::IsFullScreen() +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return false; + } + return _ptrRenderer->FullScreen(); +} + +int32_t ModuleVideoRenderImpl::GetScreenResolution( + uint32_t& screenWidth, + uint32_t& screenHeight) const +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return false; + } + return _ptrRenderer->GetScreenResolution(screenWidth, screenHeight); +} + +uint32_t ModuleVideoRenderImpl::RenderFrameRate( + const uint32_t streamId) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return false; + } + return _ptrRenderer->RenderFrameRate(streamId); +} + +int32_t ModuleVideoRenderImpl::SetStreamCropping( + const uint32_t streamId, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return false; + } + return _ptrRenderer->SetStreamCropping(streamId, left, top, right, bottom); +} + +int32_t ModuleVideoRenderImpl::SetTransparentBackground(const bool enable) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return false; + } + return _ptrRenderer->SetTransparentBackground(enable); +} + +int32_t ModuleVideoRenderImpl::FullScreenRender(void* window, const bool enable) +{ + return -1; +} + +int32_t ModuleVideoRenderImpl::SetText( + const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return -1; + } + return _ptrRenderer->SetText(textId, text, textLength, textColorRef, + backgroundColorRef, left, top, right, bottom); +} + +int32_t ModuleVideoRenderImpl::SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return -1; + } + return _ptrRenderer->SetBitmap(bitMap, pictureId, colorKey, left, top, + right, bottom); +} + +int32_t ModuleVideoRenderImpl::SetExpectedRenderDelay( + uint32_t stream_id, int32_t delay_ms) { + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return false; + } + + IncomingVideoStreamMap::const_iterator item = + _streamRenderMap.find(stream_id); + if (item == _streamRenderMap.end()) { + // This stream doesn't exist + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s(%u, %d): stream doesn't exist", __FUNCTION__, stream_id, + delay_ms); + return -1; + } + + assert(item->second != NULL); + return item->second->SetExpectedRenderDelay(delay_ms); +} + +int32_t ModuleVideoRenderImpl::ConfigureRenderer( + const uint32_t streamId, + const unsigned int zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return false; + } + return _ptrRenderer->ConfigureRenderer(streamId, zOrder, left, top, right, + bottom); +} + +} // namespace webrtc diff --git a/webrtc/modules/video_render/video_render_impl.h b/webrtc/modules/video_render/video_render_impl.h new file mode 100644 index 0000000000..8dfa57d25b --- /dev/null +++ b/webrtc/modules/video_render/video_render_impl.h @@ -0,0 +1,208 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_ + +#include + +#include "webrtc/engine_configurations.h" +#include "webrtc/modules/video_render/video_render.h" + +namespace webrtc { +class CriticalSectionWrapper; +class IncomingVideoStream; +class IVideoRender; + +// Class definitions +class ModuleVideoRenderImpl: public VideoRender +{ +public: + /* + * VideoRenderer constructor/destructor + */ + ModuleVideoRenderImpl(const int32_t id, + const VideoRenderType videoRenderType, + void* window, const bool fullscreen); + + virtual ~ModuleVideoRenderImpl(); + + virtual int64_t TimeUntilNextProcess(); + virtual void Process(); + + /* + * Returns the render window + */ + virtual void* Window(); + + /* + * Change render window + */ + virtual int32_t ChangeWindow(void* window); + + /* + * Returns module id + */ + int32_t Id(); + + /************************************************************************** + * + * Incoming Streams + * + ***************************************************************************/ + + /* + * Add incoming render stream + */ + virtual VideoRenderCallback + * AddIncomingRenderStream(const uint32_t streamId, + const uint32_t zOrder, + const float left, const float top, + const float right, const float bottom); + /* + * Delete incoming render stream + */ + virtual int32_t + DeleteIncomingRenderStream(const uint32_t streamId); + + /* + * Add incoming render callback, used for external rendering + */ + virtual int32_t + AddExternalRenderCallback(const uint32_t streamId, + VideoRenderCallback* renderObject); + + /* + * Get the porperties for an incoming render stream + */ + virtual int32_t + GetIncomingRenderStreamProperties(const uint32_t streamId, + uint32_t& zOrder, + float& left, float& top, + float& right, float& bottom) const; + /* + * Incoming frame rate for the specified stream. + */ + virtual uint32_t GetIncomingFrameRate(const uint32_t streamId); + + /* + * Returns the number of incoming streams added to this render module + */ + virtual uint32_t GetNumIncomingRenderStreams() const; + + /* + * Returns true if this render module has the streamId added, false otherwise. + */ + virtual bool HasIncomingRenderStream(const uint32_t streamId) const; + + /* + * + */ + virtual int32_t + RegisterRawFrameCallback(const uint32_t streamId, + VideoRenderCallback* callbackObj); + + virtual int32_t SetExpectedRenderDelay(uint32_t stream_id, + int32_t delay_ms); + + /************************************************************************** + * + * Start/Stop + * + ***************************************************************************/ + + /* + * Starts rendering the specified stream + */ + virtual int32_t StartRender(const uint32_t streamId); + + /* + * Stops the renderer + */ + virtual int32_t StopRender(const uint32_t streamId); + + /* + * Sets the renderer in start state, no streams removed. + */ + virtual int32_t ResetRender(); + + /************************************************************************** + * + * Properties + * + ***************************************************************************/ + + /* + * Returns the prefered render video type + */ + virtual RawVideoType PreferredVideoType() const; + + /* + * Returns true if the renderer is in fullscreen mode, otherwise false. + */ + virtual bool IsFullScreen(); + + /* + * Gets screen resolution in pixels + */ + virtual int32_t + GetScreenResolution(uint32_t& screenWidth, + uint32_t& screenHeight) const; + + /* + * Get the actual render rate for this stream. I.e rendered frame rate, + * not frames delivered to the renderer. + */ + virtual uint32_t RenderFrameRate(const uint32_t streamId); + + /* + * Set cropping of incoming stream + */ + virtual int32_t SetStreamCropping(const uint32_t streamId, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t ConfigureRenderer(const uint32_t streamId, + const unsigned int zOrder, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t SetTransparentBackground(const bool enable); + + virtual int32_t FullScreenRender(void* window, const bool enable); + + virtual int32_t SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t SetText(const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, const float top, + const float right, const float bottom); + +private: + int32_t _id; + CriticalSectionWrapper& _moduleCrit; + void* _ptrWindow; + bool _fullScreen; + + IVideoRender* _ptrRenderer; + typedef std::map IncomingVideoStreamMap; + IncomingVideoStreamMap _streamRenderMap; +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_ diff --git a/webrtc/modules/video_render/video_render_internal.h b/webrtc/modules/video_render/video_render_internal.h new file mode 100644 index 0000000000..0508c1a708 --- /dev/null +++ b/webrtc/modules/video_render/video_render_internal.h @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_ + +#ifdef ANDROID +#include + +namespace webrtc { + +// In order to be able to use the internal webrtc video render +// for android, the jvm objects must be set via this method. +int32_t SetRenderAndroidVM(JavaVM* javaVM); + +} // namespace webrtc + +#endif // ANDROID + +#endif // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_ diff --git a/webrtc/modules/video_render/video_render_internal_impl.cc b/webrtc/modules/video_render/video_render_internal_impl.cc new file mode 100644 index 0000000000..ac89e7f2c9 --- /dev/null +++ b/webrtc/modules/video_render/video_render_internal_impl.cc @@ -0,0 +1,773 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "webrtc/common_video/include/incoming_video_stream.h" +#include "webrtc/engine_configurations.h" +#include "webrtc/modules/video_render/i_video_render.h" +#include "webrtc/modules/video_render/video_render_defines.h" +#include "webrtc/modules/video_render/video_render_impl.h" +#include "webrtc/system_wrappers/include/critical_section_wrapper.h" +#include "webrtc/system_wrappers/include/trace.h" + +#if defined (_WIN32) +#include "webrtc/modules/video_render/windows/video_render_windows_impl.h" +#define STANDARD_RENDERING kRenderWindows + +// WEBRTC_IOS should go before WEBRTC_MAC because WEBRTC_MAC +// gets defined if WEBRTC_IOS is defined +#elif defined(WEBRTC_IOS) +#define STANDARD_RENDERING kRenderiOS +#include "webrtc/modules/video_render/ios/video_render_ios_impl.h" +#elif defined(WEBRTC_MAC) +#if defined(COCOA_RENDERING) +#define STANDARD_RENDERING kRenderCocoa +#include "webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h" +#elif defined(CARBON_RENDERING) +#define STANDARD_RENDERING kRenderCarbon +#include "webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h" +#endif + +#elif defined(WEBRTC_ANDROID) +#include "webrtc/modules/video_render/android/video_render_android_impl.h" +#include "webrtc/modules/video_render/android/video_render_android_native_opengl2.h" +#include "webrtc/modules/video_render/android/video_render_android_surface_view.h" +#define STANDARD_RENDERING kRenderAndroid + +#elif defined(WEBRTC_LINUX) +#include "webrtc/modules/video_render/linux/video_render_linux_impl.h" +#define STANDARD_RENDERING kRenderX11 + +#else +//Other platforms +#endif + +// For external rendering +#include "webrtc/modules/video_render/external/video_render_external_impl.h" +#ifndef STANDARD_RENDERING +#define STANDARD_RENDERING kRenderExternal +#endif // STANDARD_RENDERING + +namespace webrtc { + +VideoRender* +VideoRender::CreateVideoRender(const int32_t id, + void* window, + const bool fullscreen, + const VideoRenderType videoRenderType/*=kRenderDefault*/) +{ + VideoRenderType resultVideoRenderType = videoRenderType; + if (videoRenderType == kRenderDefault) + { + resultVideoRenderType = STANDARD_RENDERING; + } + return new ModuleVideoRenderImpl(id, resultVideoRenderType, window, + fullscreen); +} + +void VideoRender::DestroyVideoRender( + VideoRender* module) +{ + if (module) + { + delete module; + } +} + +ModuleVideoRenderImpl::ModuleVideoRenderImpl( + const int32_t id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen) : + _id(id), _moduleCrit(*CriticalSectionWrapper::CreateCriticalSection()), + _ptrWindow(window), _fullScreen(fullscreen), _ptrRenderer(NULL) +{ + + // Create platform specific renderer + switch (videoRenderType) + { +#if defined(_WIN32) + case kRenderWindows: + { + VideoRenderWindowsImpl* ptrRenderer; + ptrRenderer = new VideoRenderWindowsImpl(_id, videoRenderType, window, _fullScreen); + if (ptrRenderer) + { + _ptrRenderer = reinterpret_cast(ptrRenderer); + } + } + break; + +#elif defined(WEBRTC_IOS) + case kRenderiOS: + { + VideoRenderIosImpl* ptrRenderer = new VideoRenderIosImpl(_id, window, _fullScreen); + if(ptrRenderer) + { + _ptrRenderer = reinterpret_cast(ptrRenderer); + } + } + break; + +#elif defined(WEBRTC_MAC) + +#if defined(COCOA_RENDERING) + case kRenderCocoa: + { + VideoRenderMacCocoaImpl* ptrRenderer = new VideoRenderMacCocoaImpl(_id, videoRenderType, window, _fullScreen); + if(ptrRenderer) + { + _ptrRenderer = reinterpret_cast(ptrRenderer); + } + } + + break; +#elif defined(CARBON_RENDERING) + case kRenderCarbon: + { + VideoRenderMacCarbonImpl* ptrRenderer = new VideoRenderMacCarbonImpl(_id, videoRenderType, window, _fullScreen); + if(ptrRenderer) + { + _ptrRenderer = reinterpret_cast(ptrRenderer); + } + } + break; +#endif + +#elif defined(WEBRTC_ANDROID) + case kRenderAndroid: + { + if(AndroidNativeOpenGl2Renderer::UseOpenGL2(window)) + { + AndroidNativeOpenGl2Renderer* ptrRenderer = NULL; + ptrRenderer = new AndroidNativeOpenGl2Renderer(_id, videoRenderType, window, _fullScreen); + if (ptrRenderer) + { + _ptrRenderer = reinterpret_cast (ptrRenderer); + } + } + else + { + AndroidSurfaceViewRenderer* ptrRenderer = NULL; + ptrRenderer = new AndroidSurfaceViewRenderer(_id, videoRenderType, window, _fullScreen); + if (ptrRenderer) + { + _ptrRenderer = reinterpret_cast (ptrRenderer); + } + } + + } + break; +#elif defined(WEBRTC_LINUX) + case kRenderX11: + { + VideoRenderLinuxImpl* ptrRenderer = NULL; + ptrRenderer = new VideoRenderLinuxImpl(_id, videoRenderType, window, _fullScreen); + if ( ptrRenderer ) + { + _ptrRenderer = reinterpret_cast (ptrRenderer); + } + } + break; + +#else + // Other platforms +#endif + case kRenderExternal: + { + VideoRenderExternalImpl* ptrRenderer(NULL); + ptrRenderer = new VideoRenderExternalImpl(_id, videoRenderType, + window, _fullScreen); + if (ptrRenderer) + { + _ptrRenderer = reinterpret_cast (ptrRenderer); + } + } + break; + default: + // Error... + break; + } + if (_ptrRenderer) + { + if (_ptrRenderer->Init() == -1) + { + } + } +} + +ModuleVideoRenderImpl::~ModuleVideoRenderImpl() +{ + delete &_moduleCrit; + + for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin(); + it != _streamRenderMap.end(); + ++it) { + delete it->second; + } + + // Delete platform specific renderer + if (_ptrRenderer) + { + VideoRenderType videoRenderType = _ptrRenderer->RenderType(); + + switch (videoRenderType) + { + case kRenderExternal: + { + VideoRenderExternalImpl + * ptrRenderer = + reinterpret_cast (_ptrRenderer); + _ptrRenderer = NULL; + delete ptrRenderer; + } + break; +#if defined(_WIN32) + case kRenderWindows: + { + VideoRenderWindowsImpl* ptrRenderer = reinterpret_cast(_ptrRenderer); + _ptrRenderer = NULL; + delete ptrRenderer; + } + break; +#elif defined(WEBRTC_IOS) + case kRenderiOS: + { + VideoRenderIosImpl* ptrRenderer = reinterpret_cast (_ptrRenderer); + _ptrRenderer = NULL; + delete ptrRenderer; + } + break; +#elif defined(WEBRTC_MAC) + +#if defined(COCOA_RENDERING) + case kRenderCocoa: + { + VideoRenderMacCocoaImpl* ptrRenderer = reinterpret_cast (_ptrRenderer); + _ptrRenderer = NULL; + delete ptrRenderer; + } + break; +#elif defined(CARBON_RENDERING) + case kRenderCarbon: + { + VideoRenderMacCarbonImpl* ptrRenderer = reinterpret_cast (_ptrRenderer); + _ptrRenderer = NULL; + delete ptrRenderer; + } + break; +#endif + +#elif defined(WEBRTC_ANDROID) + case kRenderAndroid: + { + VideoRenderAndroid* ptrRenderer = reinterpret_cast (_ptrRenderer); + _ptrRenderer = NULL; + delete ptrRenderer; + } + break; + +#elif defined(WEBRTC_LINUX) + case kRenderX11: + { + VideoRenderLinuxImpl* ptrRenderer = reinterpret_cast (_ptrRenderer); + _ptrRenderer = NULL; + delete ptrRenderer; + } + break; +#else + //other platforms +#endif + + default: + // Error... + break; + } + } +} + +int64_t ModuleVideoRenderImpl::TimeUntilNextProcess() +{ + // Not used + return 50; +} +void ModuleVideoRenderImpl::Process() {} + +void* +ModuleVideoRenderImpl::Window() +{ + CriticalSectionScoped cs(&_moduleCrit); + return _ptrWindow; +} + +int32_t ModuleVideoRenderImpl::ChangeWindow(void* window) +{ + + CriticalSectionScoped cs(&_moduleCrit); + +#if defined(WEBRTC_IOS) // WEBRTC_IOS must go before WEBRTC_MAC + _ptrRenderer = NULL; + delete _ptrRenderer; + + VideoRenderIosImpl* ptrRenderer; + ptrRenderer = new VideoRenderIosImpl(_id, window, _fullScreen); + if (!ptrRenderer) + { + return -1; + } + _ptrRenderer = reinterpret_cast(ptrRenderer); + return _ptrRenderer->ChangeWindow(window); +#elif defined(WEBRTC_MAC) + + _ptrRenderer = NULL; + delete _ptrRenderer; + +#if defined(COCOA_RENDERING) + VideoRenderMacCocoaImpl* ptrRenderer; + ptrRenderer = new VideoRenderMacCocoaImpl(_id, kRenderCocoa, window, _fullScreen); +#elif defined(CARBON_RENDERING) + VideoRenderMacCarbonImpl* ptrRenderer; + ptrRenderer = new VideoRenderMacCarbonImpl(_id, kRenderCarbon, window, _fullScreen); +#endif + if (!ptrRenderer) + { + return -1; + } + _ptrRenderer = reinterpret_cast(ptrRenderer); + return _ptrRenderer->ChangeWindow(window); + +#else + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return -1; + } + return _ptrRenderer->ChangeWindow(window); + +#endif +} + +int32_t ModuleVideoRenderImpl::Id() +{ + CriticalSectionScoped cs(&_moduleCrit); + return _id; +} + +uint32_t ModuleVideoRenderImpl::GetIncomingFrameRate(const uint32_t streamId) { + CriticalSectionScoped cs(&_moduleCrit); + + IncomingVideoStreamMap::iterator it = _streamRenderMap.find(streamId); + + if (it == _streamRenderMap.end()) { + // This stream doesn't exist + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: stream doesn't exist", + __FUNCTION__); + return 0; + } + assert(it->second != NULL); + return it->second->IncomingRate(); +} + +VideoRenderCallback* +ModuleVideoRenderImpl::AddIncomingRenderStream(const uint32_t streamId, + const uint32_t zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return NULL; + } + + if (_streamRenderMap.find(streamId) != _streamRenderMap.end()) { + // The stream already exists... + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: stream already exists", __FUNCTION__); + return NULL; + } + + VideoRenderCallback* ptrRenderCallback = + _ptrRenderer->AddIncomingRenderStream(streamId, zOrder, left, top, + right, bottom); + if (ptrRenderCallback == NULL) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Can't create incoming stream in renderer", + __FUNCTION__); + return NULL; + } + + // Create platform independant code + IncomingVideoStream* ptrIncomingStream = + new IncomingVideoStream(streamId, false); + ptrIncomingStream->SetRenderCallback(ptrRenderCallback); + VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback(); + + // Store the stream + _streamRenderMap[streamId] = ptrIncomingStream; + + return moduleCallback; +} + +int32_t ModuleVideoRenderImpl::DeleteIncomingRenderStream( + const uint32_t streamId) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return -1; + } + + IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId); + if (item == _streamRenderMap.end()) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: stream doesn't exist", __FUNCTION__); + return -1; + } + + delete item->second; + + _ptrRenderer->DeleteIncomingRenderStream(streamId); + + _streamRenderMap.erase(item); + + return 0; +} + +int32_t ModuleVideoRenderImpl::AddExternalRenderCallback( + const uint32_t streamId, + VideoRenderCallback* renderObject) { + CriticalSectionScoped cs(&_moduleCrit); + + IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId); + + if (item == _streamRenderMap.end()) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: stream doesn't exist", __FUNCTION__); + return -1; + } + + if (item->second == NULL) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: could not get stream", __FUNCTION__); + return -1; + } + item->second->SetExternalCallback(renderObject); + return 0; +} + +int32_t ModuleVideoRenderImpl::GetIncomingRenderStreamProperties( + const uint32_t streamId, + uint32_t& zOrder, + float& left, + float& top, + float& right, + float& bottom) const { + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return -1; + } + + return _ptrRenderer->GetIncomingRenderStreamProperties(streamId, zOrder, + left, top, right, + bottom); +} + +uint32_t ModuleVideoRenderImpl::GetNumIncomingRenderStreams() const +{ + CriticalSectionScoped cs(&_moduleCrit); + + return static_cast(_streamRenderMap.size()); +} + +bool ModuleVideoRenderImpl::HasIncomingRenderStream( + const uint32_t streamId) const { + CriticalSectionScoped cs(&_moduleCrit); + + return _streamRenderMap.find(streamId) != _streamRenderMap.end(); +} + +int32_t ModuleVideoRenderImpl::RegisterRawFrameCallback( + const uint32_t streamId, + VideoRenderCallback* callbackObj) { + return -1; +} + +int32_t ModuleVideoRenderImpl::StartRender(const uint32_t streamId) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return -1; + } + + // Start the stream + IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId); + + if (item == _streamRenderMap.end()) + { + return -1; + } + + if (item->second->Start() == -1) + { + return -1; + } + + // Start the HW renderer + if (_ptrRenderer->StartRender() == -1) + { + return -1; + } + return 0; +} + +int32_t ModuleVideoRenderImpl::StopRender(const uint32_t streamId) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s(%d): No renderer", __FUNCTION__, streamId); + return -1; + } + + // Stop the incoming stream + IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId); + + if (item == _streamRenderMap.end()) + { + return -1; + } + + if (item->second->Stop() == -1) + { + return -1; + } + + return 0; +} + +int32_t ModuleVideoRenderImpl::ResetRender() +{ + CriticalSectionScoped cs(&_moduleCrit); + + int32_t ret = 0; + // Loop through all incoming streams and reset them + for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin(); + it != _streamRenderMap.end(); + ++it) { + if (it->second->Reset() == -1) + ret = -1; + } + return ret; +} + +RawVideoType ModuleVideoRenderImpl::PreferredVideoType() const +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (_ptrRenderer == NULL) + { + return kVideoI420; + } + + return _ptrRenderer->PerferedVideoType(); +} + +bool ModuleVideoRenderImpl::IsFullScreen() +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return false; + } + return _ptrRenderer->FullScreen(); +} + +int32_t ModuleVideoRenderImpl::GetScreenResolution( + uint32_t& screenWidth, + uint32_t& screenHeight) const +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return false; + } + return _ptrRenderer->GetScreenResolution(screenWidth, screenHeight); +} + +uint32_t ModuleVideoRenderImpl::RenderFrameRate( + const uint32_t streamId) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return false; + } + return _ptrRenderer->RenderFrameRate(streamId); +} + +int32_t ModuleVideoRenderImpl::SetStreamCropping( + const uint32_t streamId, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return false; + } + return _ptrRenderer->SetStreamCropping(streamId, left, top, right, bottom); +} + +int32_t ModuleVideoRenderImpl::SetTransparentBackground(const bool enable) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return false; + } + return _ptrRenderer->SetTransparentBackground(enable); +} + +int32_t ModuleVideoRenderImpl::FullScreenRender(void* window, const bool enable) +{ + return -1; +} + +int32_t ModuleVideoRenderImpl::SetText( + const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return -1; + } + return _ptrRenderer->SetText(textId, text, textLength, textColorRef, + backgroundColorRef, left, top, right, bottom); +} + +int32_t ModuleVideoRenderImpl::SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return -1; + } + return _ptrRenderer->SetBitmap(bitMap, pictureId, colorKey, left, top, + right, bottom); +} + +int32_t ModuleVideoRenderImpl::SetExpectedRenderDelay( + uint32_t stream_id, int32_t delay_ms) { + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return false; + } + + IncomingVideoStreamMap::const_iterator item = + _streamRenderMap.find(stream_id); + if (item == _streamRenderMap.end()) { + // This stream doesn't exist + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s(%u, %d): stream doesn't exist", __FUNCTION__, stream_id, + delay_ms); + return -1; + } + + assert(item->second != NULL); + return item->second->SetExpectedRenderDelay(delay_ms); +} + +int32_t ModuleVideoRenderImpl::ConfigureRenderer( + const uint32_t streamId, + const unsigned int zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_moduleCrit); + + if (!_ptrRenderer) + { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No renderer", __FUNCTION__); + return false; + } + return _ptrRenderer->ConfigureRenderer(streamId, zOrder, left, top, right, + bottom); +} + +} // namespace webrtc diff --git a/webrtc/modules/video_render/windows/i_video_render_win.h b/webrtc/modules/video_render/windows/i_video_render_win.h new file mode 100644 index 0000000000..6dbb4fd3cb --- /dev/null +++ b/webrtc/modules/video_render/windows/i_video_render_win.h @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_ + +#include "webrtc/modules/video_render/video_render.h" + +namespace webrtc { + +// Class definitions +class IVideoRenderWin +{ +public: + /************************************************************************** + * + * Constructor/destructor + * + ***************************************************************************/ + virtual ~IVideoRenderWin() + { + }; + + virtual int32_t Init() = 0; + + /************************************************************************** + * + * Incoming Streams + * + ***************************************************************************/ + + virtual VideoRenderCallback + * CreateChannel(const uint32_t streamId, + const uint32_t zOrder, + const float left, + const float top, + const float right, + const float bottom) = 0; + + virtual int32_t DeleteChannel(const uint32_t streamId) = 0; + + virtual int32_t GetStreamSettings(const uint32_t channel, + const uint16_t streamId, + uint32_t& zOrder, + float& left, float& top, + float& right, float& bottom) = 0; + + /************************************************************************** + * + * Start/Stop + * + ***************************************************************************/ + + virtual int32_t StartRender() = 0; + + virtual int32_t StopRender() = 0; + + /************************************************************************** + * + * Properties + * + ***************************************************************************/ + + virtual bool IsFullScreen() = 0; + + virtual int32_t SetCropping(const uint32_t channel, + const uint16_t streamId, + const float left, const float top, + const float right, const float bottom) = 0; + + virtual int32_t ConfigureRenderer(const uint32_t channel, + const uint16_t streamId, + const unsigned int zOrder, + const float left, + const float top, + const float right, + const float bottom) = 0; + + virtual int32_t SetTransparentBackground(const bool enable) = 0; + + virtual int32_t SetText(const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t colorText, + const uint32_t colorBg, + const float left, const float top, + const float rigth, const float bottom) = 0; + + virtual int32_t SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, const float top, + const float right, const float bottom) = 0; + + virtual int32_t ChangeWindow(void* window) = 0; + + virtual int32_t GetGraphicsMemory(uint64_t& totalMemory, + uint64_t& availableMemory) = 0; + +}; + +} // namespace webrtc +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_ diff --git a/webrtc/modules/video_render/windows/video_render_direct3d9.cc b/webrtc/modules/video_render/windows/video_render_direct3d9.cc new file mode 100644 index 0000000000..b59b944e48 --- /dev/null +++ b/webrtc/modules/video_render/windows/video_render_direct3d9.cc @@ -0,0 +1,1160 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// Own include file +#include "webrtc/modules/video_render/windows/video_render_direct3d9.h" + +// System include files +#include + +// WebRtc include files +#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" +#include "webrtc/system_wrappers/include/critical_section_wrapper.h" +#include "webrtc/system_wrappers/include/event_wrapper.h" +#include "webrtc/system_wrappers/include/trace.h" + +namespace webrtc { + +// A structure for our custom vertex type +struct CUSTOMVERTEX +{ + FLOAT x, y, z; + DWORD color; // The vertex color + FLOAT u, v; +}; + +// Our custom FVF, which describes our custom vertex structure +#define D3DFVF_CUSTOMVERTEX (D3DFVF_XYZ|D3DFVF_DIFFUSE|D3DFVF_TEX1) + +/* + * + * D3D9Channel + * + */ +D3D9Channel::D3D9Channel(LPDIRECT3DDEVICE9 pd3DDevice, + CriticalSectionWrapper* critSect, + Trace* trace) : + _width(0), + _height(0), + _pd3dDevice(pd3DDevice), + _pTexture(NULL), + _bufferIsUpdated(false), + _critSect(critSect), + _streamId(0), + _zOrder(0), + _startWidth(0), + _startHeight(0), + _stopWidth(0), + _stopHeight(0) +{ + +} + +D3D9Channel::~D3D9Channel() +{ + //release the texture + if (_pTexture != NULL) + { + _pTexture->Release(); + _pTexture = NULL; + } +} + +void D3D9Channel::SetStreamSettings(uint16_t streamId, + uint32_t zOrder, + float startWidth, + float startHeight, + float stopWidth, + float stopHeight) +{ + _streamId = streamId; + _zOrder = zOrder; + _startWidth = startWidth; + _startHeight = startHeight; + _stopWidth = stopWidth; + _stopHeight = stopHeight; +} + +int D3D9Channel::GetStreamSettings(uint16_t streamId, + uint32_t& zOrder, + float& startWidth, + float& startHeight, + float& stopWidth, + float& stopHeight) +{ + streamId = _streamId; + zOrder = _zOrder; + startWidth = _startWidth; + startHeight = _startHeight; + stopWidth = _stopWidth; + stopHeight = _stopHeight; + return 0; +} + +int D3D9Channel::GetTextureWidth() +{ + return _width; +} + +int D3D9Channel::GetTextureHeight() +{ + return _height; +} + +// Called from video engine when a the frame size changed +int D3D9Channel::FrameSizeChange(int width, int height, int numberOfStreams) +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, + "FrameSizeChange, wifth: %d, height: %d, streams: %d", width, + height, numberOfStreams); + + CriticalSectionScoped cs(_critSect); + _width = width; + _height = height; + + //clean the previous texture + if (_pTexture != NULL) + { + _pTexture->Release(); + _pTexture = NULL; + } + + HRESULT ret = E_POINTER; + + if (_pd3dDevice) + ret = _pd3dDevice->CreateTexture(_width, _height, 1, 0, D3DFMT_A8R8G8B8, + D3DPOOL_MANAGED, &_pTexture, NULL); + + if (FAILED(ret)) + { + _pTexture = NULL; + return -1; + } + + return 0; +} + +int32_t D3D9Channel::RenderFrame(const uint32_t streamId, + const VideoFrame& videoFrame) { + CriticalSectionScoped cs(_critSect); + if (_width != videoFrame.width() || _height != videoFrame.height()) + { + if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) + { + return -1; + } + } + return DeliverFrame(videoFrame); +} + +// Called from video engine when a new frame should be rendered. +int D3D9Channel::DeliverFrame(const VideoFrame& videoFrame) { + WEBRTC_TRACE(kTraceStream, kTraceVideo, -1, + "DeliverFrame to D3D9Channel"); + + CriticalSectionScoped cs(_critSect); + + // FIXME if _bufferIsUpdated is still true (not be renderred), do we want to + // update the texture? probably not + if (_bufferIsUpdated) { + WEBRTC_TRACE(kTraceStream, kTraceVideo, -1, + "Last frame hasn't been rendered yet. Drop this frame."); + return -1; + } + + if (!_pd3dDevice) { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "D3D for rendering not initialized."); + return -1; + } + + if (!_pTexture) { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "Texture for rendering not initialized."); + return -1; + } + + D3DLOCKED_RECT lr; + + if (FAILED(_pTexture->LockRect(0, &lr, NULL, 0))) { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "Failed to lock a texture in D3D9 Channel."); + return -1; + } + UCHAR* pRect = (UCHAR*) lr.pBits; + + ConvertFromI420(videoFrame, kARGB, 0, pRect); + + if (FAILED(_pTexture->UnlockRect(0))) { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "Failed to unlock a texture in D3D9 Channel."); + return -1; + } + + _bufferIsUpdated = true; + return 0; +} + +// Called by d3d channel owner to indicate the frame/texture has been rendered off +int D3D9Channel::RenderOffFrame() +{ + WEBRTC_TRACE(kTraceStream, kTraceVideo, -1, + "Frame has been rendered to the screen."); + CriticalSectionScoped cs(_critSect); + _bufferIsUpdated = false; + return 0; +} + +// Called by d3d channel owner to check if the texture is updated +int D3D9Channel::IsUpdated(bool& isUpdated) +{ + CriticalSectionScoped cs(_critSect); + isUpdated = _bufferIsUpdated; + return 0; +} + +// Called by d3d channel owner to get the texture +LPDIRECT3DTEXTURE9 D3D9Channel::GetTexture() +{ + CriticalSectionScoped cs(_critSect); + return _pTexture; +} + +int D3D9Channel::ReleaseTexture() +{ + CriticalSectionScoped cs(_critSect); + + //release the texture + if (_pTexture != NULL) + { + _pTexture->Release(); + _pTexture = NULL; + } + _pd3dDevice = NULL; + return 0; +} + +int D3D9Channel::RecreateTexture(LPDIRECT3DDEVICE9 pd3DDevice) +{ + CriticalSectionScoped cs(_critSect); + + _pd3dDevice = pd3DDevice; + + if (_pTexture != NULL) + { + _pTexture->Release(); + _pTexture = NULL; + } + + HRESULT ret; + + ret = _pd3dDevice->CreateTexture(_width, _height, 1, 0, D3DFMT_A8R8G8B8, + D3DPOOL_MANAGED, &_pTexture, NULL); + + if (FAILED(ret)) + { + _pTexture = NULL; + return -1; + } + + return 0; +} + +/* + * + * VideoRenderDirect3D9 + * + */ +VideoRenderDirect3D9::VideoRenderDirect3D9(Trace* trace, + HWND hWnd, + bool fullScreen) : + _refD3DCritsect(*CriticalSectionWrapper::CreateCriticalSection()), + _trace(trace), + _hWnd(hWnd), + _fullScreen(fullScreen), + _pTextureLogo(NULL), + _pVB(NULL), + _pd3dDevice(NULL), + _pD3D(NULL), + _d3dChannels(), + _d3dZorder(), + _screenUpdateEvent(NULL), + _logoLeft(0), + _logoTop(0), + _logoRight(0), + _logoBottom(0), + _pd3dSurface(NULL), + _totalMemory(0), + _availableMemory(0) +{ + _screenUpdateThread.reset(new rtc::PlatformThread( + ScreenUpdateThreadProc, this, "ScreenUpdateThread")); + _screenUpdateEvent = EventTimerWrapper::Create(); + SetRect(&_originalHwndRect, 0, 0, 0, 0); +} + +VideoRenderDirect3D9::~VideoRenderDirect3D9() +{ + //NOTE: we should not enter CriticalSection in here! + + // Signal event to exit thread, then delete it + rtc::PlatformThread* tmpPtr = _screenUpdateThread.release(); + if (tmpPtr) + { + _screenUpdateEvent->Set(); + _screenUpdateEvent->StopTimer(); + + tmpPtr->Stop(); + delete tmpPtr; + } + delete _screenUpdateEvent; + + //close d3d device + CloseDevice(); + + // Delete all channels + std::map::iterator it = _d3dChannels.begin(); + while (it != _d3dChannels.end()) + { + delete it->second; + it = _d3dChannels.erase(it); + } + // Clean the zOrder map + _d3dZorder.clear(); + + if (_fullScreen) + { + // restore hwnd to original size and position + ::SetWindowPos(_hWnd, HWND_NOTOPMOST, _originalHwndRect.left, + _originalHwndRect.top, _originalHwndRect.right + - _originalHwndRect.left, + _originalHwndRect.bottom - _originalHwndRect.top, + SWP_FRAMECHANGED); + ::RedrawWindow(_hWnd, NULL, NULL, RDW_INVALIDATE | RDW_UPDATENOW + | RDW_ERASE); + ::RedrawWindow(NULL, NULL, NULL, RDW_INVALIDATE | RDW_UPDATENOW + | RDW_ERASE); + } + + delete &_refD3DCritsect; +} + +DWORD VideoRenderDirect3D9::GetVertexProcessingCaps() +{ + D3DCAPS9 caps; + DWORD dwVertexProcessing = D3DCREATE_SOFTWARE_VERTEXPROCESSING; + if (SUCCEEDED(_pD3D->GetDeviceCaps(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, + &caps))) + { + if ((caps.DevCaps & D3DDEVCAPS_HWTRANSFORMANDLIGHT) + == D3DDEVCAPS_HWTRANSFORMANDLIGHT) + { + dwVertexProcessing = D3DCREATE_HARDWARE_VERTEXPROCESSING; + } + } + return dwVertexProcessing; +} + +int VideoRenderDirect3D9::InitializeD3D(HWND hWnd, + D3DPRESENT_PARAMETERS* pd3dpp) +{ + // initialize Direct3D + if (NULL == (_pD3D = Direct3DCreate9(D3D_SDK_VERSION))) + { + return -1; + } + + // determine what type of vertex processing to use based on the device capabilities + DWORD dwVertexProcessing = GetVertexProcessingCaps(); + + // get the display mode + D3DDISPLAYMODE d3ddm; + _pD3D->GetAdapterDisplayMode(D3DADAPTER_DEFAULT, &d3ddm); + pd3dpp->BackBufferFormat = d3ddm.Format; + + // create the D3D device + if (FAILED(_pD3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, hWnd, + dwVertexProcessing | D3DCREATE_MULTITHREADED + | D3DCREATE_FPU_PRESERVE, pd3dpp, + &_pd3dDevice))) + { + //try the ref device + if (FAILED(_pD3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_REF, + hWnd, dwVertexProcessing + | D3DCREATE_MULTITHREADED + | D3DCREATE_FPU_PRESERVE, + pd3dpp, &_pd3dDevice))) + { + return -1; + } + } + + return 0; +} + +int VideoRenderDirect3D9::ResetDevice() +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, + "VideoRenderDirect3D9::ResetDevice"); + + CriticalSectionScoped cs(&_refD3DCritsect); + + //release the channel texture + std::map::iterator it; + it = _d3dChannels.begin(); + while (it != _d3dChannels.end()) + { + if (it->second) + { + it->second->ReleaseTexture(); + } + it++; + } + + //close d3d device + if (CloseDevice() != 0) + { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "VideoRenderDirect3D9::ResetDevice failed to CloseDevice"); + return -1; + } + + //reinit d3d device + if (InitDevice() != 0) + { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "VideoRenderDirect3D9::ResetDevice failed to InitDevice"); + return -1; + } + + //recreate channel texture + it = _d3dChannels.begin(); + while (it != _d3dChannels.end()) + { + if (it->second) + { + it->second->RecreateTexture(_pd3dDevice); + } + it++; + } + + return 0; +} + +int VideoRenderDirect3D9::InitDevice() +{ + // Set up the structure used to create the D3DDevice + ZeroMemory(&_d3dpp, sizeof(_d3dpp)); + _d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD; + _d3dpp.BackBufferFormat = D3DFMT_A8R8G8B8; + if (GetWindowRect(_hWnd, &_originalHwndRect) == 0) + { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "VideoRenderDirect3D9::InitDevice Could not get window size"); + return -1; + } + if (!_fullScreen) + { + _winWidth = _originalHwndRect.right - _originalHwndRect.left; + _winHeight = _originalHwndRect.bottom - _originalHwndRect.top; + _d3dpp.Windowed = TRUE; + _d3dpp.BackBufferHeight = 0; + _d3dpp.BackBufferWidth = 0; + } + else + { + _winWidth = (LONG) ::GetSystemMetrics(SM_CXSCREEN); + _winHeight = (LONG) ::GetSystemMetrics(SM_CYSCREEN); + _d3dpp.Windowed = FALSE; + _d3dpp.BackBufferWidth = _winWidth; + _d3dpp.BackBufferHeight = _winHeight; + _d3dpp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE; + } + + if (InitializeD3D(_hWnd, &_d3dpp) == -1) + { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "VideoRenderDirect3D9::InitDevice failed in InitializeD3D"); + return -1; + } + + // Turn off culling, so we see the front and back of the triangle + _pd3dDevice->SetRenderState(D3DRS_CULLMODE, D3DCULL_NONE); + + // Turn off D3D lighting, since we are providing our own vertex colors + _pd3dDevice->SetRenderState(D3DRS_LIGHTING, FALSE); + + // Settings for alpha blending + _pd3dDevice->SetRenderState(D3DRS_ALPHABLENDENABLE, TRUE); + _pd3dDevice->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_SRCALPHA); + _pd3dDevice->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_INVSRCALPHA); + + _pd3dDevice->SetSamplerState( 0, D3DSAMP_MINFILTER, D3DTEXF_LINEAR ); + _pd3dDevice->SetSamplerState( 0, D3DSAMP_MAGFILTER, D3DTEXF_LINEAR ); + _pd3dDevice->SetSamplerState( 0, D3DSAMP_MIPFILTER, D3DTEXF_LINEAR ); + + // Initialize Vertices + CUSTOMVERTEX Vertices[] = { + //front + { -1.0f, -1.0f, 0.0f, 0xffffffff, 0, 1 }, { -1.0f, 1.0f, 0.0f, + 0xffffffff, 0, 0 }, + { 1.0f, -1.0f, 0.0f, 0xffffffff, 1, 1 }, { 1.0f, 1.0f, 0.0f, + 0xffffffff, 1, 0 } }; + + // Create the vertex buffer. + if (FAILED(_pd3dDevice->CreateVertexBuffer(sizeof(Vertices), 0, + D3DFVF_CUSTOMVERTEX, + D3DPOOL_DEFAULT, &_pVB, NULL ))) + { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "Failed to create the vertex buffer."); + return -1; + } + + // Now we fill the vertex buffer. + VOID* pVertices; + if (FAILED(_pVB->Lock(0, sizeof(Vertices), (void**) &pVertices, 0))) + { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "Failed to lock the vertex buffer."); + return -1; + } + memcpy(pVertices, Vertices, sizeof(Vertices)); + _pVB->Unlock(); + + return 0; +} + +int32_t VideoRenderDirect3D9::Init() +{ + WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, + "VideoRenderDirect3D9::Init"); + + CriticalSectionScoped cs(&_refD3DCritsect); + + // Start rendering thread... + if (!_screenUpdateThread) + { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Thread not created"); + return -1; + } + _screenUpdateThread->Start(); + _screenUpdateThread->SetPriority(rtc::kRealtimePriority); + + // Start the event triggering the render process + unsigned int monitorFreq = 60; + DEVMODE dm; + // initialize the DEVMODE structure + ZeroMemory(&dm, sizeof(dm)); + dm.dmSize = sizeof(dm); + if (0 != EnumDisplaySettings(NULL, ENUM_CURRENT_SETTINGS, &dm)) + { + monitorFreq = dm.dmDisplayFrequency; + } + _screenUpdateEvent->StartTimer(true, 1000 / monitorFreq); + + return InitDevice(); +} + +int32_t VideoRenderDirect3D9::ChangeWindow(void* window) +{ + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported."); + return -1; +} + +int VideoRenderDirect3D9::UpdateRenderSurface() +{ + CriticalSectionScoped cs(&_refD3DCritsect); + + // Check if there are any updated buffers + bool updated = false; + std::map::iterator it; + it = _d3dChannels.begin(); + while (it != _d3dChannels.end()) + { + + D3D9Channel* channel = it->second; + channel->IsUpdated(updated); + if (updated) + { + break; + } + it++; + } + //nothing is updated, continue + if (!updated) + return -1; + + // Clear the backbuffer to a black color + _pd3dDevice->Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0, 0, 0), 1.0f, + 0); + + // Begin the scene + if (SUCCEEDED(_pd3dDevice->BeginScene())) + { + _pd3dDevice->SetStreamSource(0, _pVB, 0, sizeof(CUSTOMVERTEX)); + _pd3dDevice->SetFVF(D3DFVF_CUSTOMVERTEX); + + //draw all the channels + //get texture from the channels + LPDIRECT3DTEXTURE9 textureFromChannel = NULL; + DWORD textureWidth, textureHeight; + + std::multimap::reverse_iterator it; + it = _d3dZorder.rbegin(); + while (it != _d3dZorder.rend()) + { + // loop through all channels and streams in Z order + int channel = it->second & 0x0000ffff; + + std::map::iterator ddIt; + ddIt = _d3dChannels.find(channel); + if (ddIt != _d3dChannels.end()) + { + // found the channel + D3D9Channel* channelObj = ddIt->second; + if (channelObj) + { + textureFromChannel = channelObj->GetTexture(); + textureWidth = channelObj->GetTextureWidth(); + textureHeight = channelObj->GetTextureHeight(); + + uint32_t zOrder; + float startWidth, startHeight, stopWidth, stopHeight; + channelObj->GetStreamSettings(0, zOrder, startWidth, + startHeight, stopWidth, + stopHeight); + + //draw the video stream + UpdateVerticeBuffer(_pVB, 0, startWidth, startHeight, + stopWidth, stopHeight); + _pd3dDevice->SetTexture(0, textureFromChannel); + _pd3dDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2); + + //Notice channel that this frame as been rendered + channelObj->RenderOffFrame(); + } + } + it++; + } + + //draw the logo + if (_pTextureLogo) + { + UpdateVerticeBuffer(_pVB, 0, _logoLeft, _logoTop, _logoRight, + _logoBottom); + _pd3dDevice->SetTexture(0, _pTextureLogo); + _pd3dDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2); + } + + // End the scene + _pd3dDevice->EndScene(); + } + + // Present the backbuffer contents to the display + _pd3dDevice->Present(NULL, NULL, NULL, NULL ); + + return 0; +} + +//set the alpha value of the pixal with a particular colorkey as 0 +int VideoRenderDirect3D9::SetTransparentColor(LPDIRECT3DTEXTURE9 pTexture, + DDCOLORKEY* transparentColorKey, + DWORD width, + DWORD height) +{ + D3DLOCKED_RECT lr; + if (!pTexture) + return -1; + + CriticalSectionScoped cs(&_refD3DCritsect); + if (SUCCEEDED(pTexture->LockRect(0, &lr, NULL, D3DLOCK_DISCARD))) + { + for (DWORD y = 0; y < height; y++) + { + DWORD dwOffset = y * width; + + for (DWORD x = 0; x < width; x) + { + DWORD temp = ((DWORD*) lr.pBits)[dwOffset + x]; + if ((temp & 0x00FFFFFF) + == transparentColorKey->dwColorSpaceLowValue) + { + temp &= 0x00FFFFFF; + } + else + { + temp |= 0xFF000000; + } + ((DWORD*) lr.pBits)[dwOffset + x] = temp; + x++; + } + } + pTexture->UnlockRect(0); + return 0; + } + return -1; +} + +/* + * + * Rendering process + * + */ +bool VideoRenderDirect3D9::ScreenUpdateThreadProc(void* obj) +{ + return static_cast (obj)->ScreenUpdateProcess(); +} + +bool VideoRenderDirect3D9::ScreenUpdateProcess() +{ + _screenUpdateEvent->Wait(100); + + if (!_screenUpdateThread) + { + //stop the thread + return false; + } + if (!_pd3dDevice) + { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "d3dDevice not created."); + return true; + } + + HRESULT hr = _pd3dDevice->TestCooperativeLevel(); + + if (SUCCEEDED(hr)) + { + UpdateRenderSurface(); + } + + if (hr == D3DERR_DEVICELOST) + { + //Device is lost and cannot be reset yet + + } + else if (hr == D3DERR_DEVICENOTRESET) + { + //Lost but we can reset it now + //Note: the standard way is to call Reset, however for some reason doesn't work here. + //so we will release the device and create it again. + ResetDevice(); + } + + return true; +} + +int VideoRenderDirect3D9::CloseDevice() +{ + CriticalSectionScoped cs(&_refD3DCritsect); + WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, + "VideoRenderDirect3D9::CloseDevice"); + + if (_pTextureLogo != NULL) + { + _pTextureLogo->Release(); + _pTextureLogo = NULL; + } + + if (_pVB != NULL) + { + _pVB->Release(); + _pVB = NULL; + } + + if (_pd3dDevice != NULL) + { + _pd3dDevice->Release(); + _pd3dDevice = NULL; + } + + if (_pD3D != NULL) + { + _pD3D->Release(); + _pD3D = NULL; + } + + if (_pd3dSurface != NULL) + _pd3dSurface->Release(); + return 0; +} + +D3D9Channel* VideoRenderDirect3D9::GetD3DChannel(int channel) +{ + std::map::iterator ddIt; + ddIt = _d3dChannels.find(channel & 0x0000ffff); + D3D9Channel* ddobj = NULL; + if (ddIt != _d3dChannels.end()) + { + ddobj = ddIt->second; + } + if (ddobj == NULL) + { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "Direct3D render failed to find channel"); + return NULL; + } + return ddobj; +} + +int32_t VideoRenderDirect3D9::DeleteChannel(const uint32_t streamId) +{ + CriticalSectionScoped cs(&_refD3DCritsect); + + + std::multimap::iterator it; + it = _d3dZorder.begin(); + while (it != _d3dZorder.end()) + { + if ((streamId & 0x0000ffff) == (it->second & 0x0000ffff)) + { + it = _d3dZorder.erase(it); + break; + } + it++; + } + + std::map::iterator ddIt; + ddIt = _d3dChannels.find(streamId & 0x0000ffff); + if (ddIt != _d3dChannels.end()) + { + delete ddIt->second; + _d3dChannels.erase(ddIt); + return 0; + } + return -1; +} + +VideoRenderCallback* VideoRenderDirect3D9::CreateChannel(const uint32_t channel, + const uint32_t zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_refD3DCritsect); + + //FIXME this should be done in VideoAPIWindows? stop the frame deliver first + //remove the old channel + DeleteChannel(channel); + + D3D9Channel* d3dChannel = new D3D9Channel(_pd3dDevice, + &_refD3DCritsect, _trace); + d3dChannel->SetStreamSettings(0, zOrder, left, top, right, bottom); + + // store channel + _d3dChannels[channel & 0x0000ffff] = d3dChannel; + + // store Z order + // default streamID is 0 + _d3dZorder.insert( + std::pair(zOrder, channel & 0x0000ffff)); + + return d3dChannel; +} + +int32_t VideoRenderDirect3D9::GetStreamSettings(const uint32_t channel, + const uint16_t streamId, + uint32_t& zOrder, + float& left, float& top, + float& right, float& bottom) +{ + std::map::iterator ddIt; + ddIt = _d3dChannels.find(channel & 0x0000ffff); + D3D9Channel* ddobj = NULL; + if (ddIt != _d3dChannels.end()) + { + ddobj = ddIt->second; + } + if (ddobj == NULL) + { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "Direct3D render failed to find channel"); + return -1; + } + // Only allow one stream per channel, demuxing is + return ddobj->GetStreamSettings(0, zOrder, left, top, right, bottom); +} + +int VideoRenderDirect3D9::UpdateVerticeBuffer(LPDIRECT3DVERTEXBUFFER9 pVB, + int offset, + float startWidth, + float startHeight, + float stopWidth, + float stopHeight) +{ + if (pVB == NULL) + return -1; + + float left, right, top, bottom; + + //update the vertice buffer + //0,1 => -1,1 + left = startWidth * 2 - 1; + right = stopWidth * 2 - 1; + + //0,1 => 1,-1 + top = 1 - startHeight * 2; + bottom = 1 - stopHeight * 2; + + CUSTOMVERTEX newVertices[] = { + //logo + { left, bottom, 0.0f, 0xffffffff, 0, 1 }, { left, top, 0.0f, + 0xffffffff, 0, 0 }, + { right, bottom, 0.0f, 0xffffffff, 1, 1 }, { right, top, 0.0f, + 0xffffffff, 1, 0 }, }; + // Now we fill the vertex buffer. + VOID* pVertices; + if (FAILED(pVB->Lock(sizeof(CUSTOMVERTEX) * offset, sizeof(newVertices), + (void**) &pVertices, 0))) + { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "Failed to lock the vertex buffer."); + return -1; + } + memcpy(pVertices, newVertices, sizeof(newVertices)); + pVB->Unlock(); + + return 0; +} + +int32_t VideoRenderDirect3D9::StartRender() +{ + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported."); + return 0; +} + +int32_t VideoRenderDirect3D9::StopRender() +{ + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported."); + return 0; +} + +bool VideoRenderDirect3D9::IsFullScreen() +{ + return _fullScreen; +} + +int32_t VideoRenderDirect3D9::SetCropping(const uint32_t channel, + const uint16_t streamId, + const float left, const float top, + const float right, const float bottom) +{ + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported."); + return 0; +} + +int32_t VideoRenderDirect3D9::SetTransparentBackground( + const bool enable) +{ + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported."); + return 0; +} + +int32_t VideoRenderDirect3D9::SetText(const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t colorText, + const uint32_t colorBg, + const float left, const float top, + const float rigth, const float bottom) +{ + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported."); + return 0; +} + +int32_t VideoRenderDirect3D9::SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, const float top, + const float right, const float bottom) +{ + if (!bitMap) + { + if (_pTextureLogo != NULL) + { + _pTextureLogo->Release(); + _pTextureLogo = NULL; + } + WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "Remove bitmap."); + return 0; + } + + // sanity + if (left > 1.0f || left < 0.0f || + top > 1.0f || top < 0.0f || + right > 1.0f || right < 0.0f || + bottom > 1.0f || bottom < 0.0f) + { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "Direct3D SetBitmap invalid parameter"); + return -1; + } + + if ((bottom <= top) || (right <= left)) + { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "Direct3D SetBitmap invalid parameter"); + return -1; + } + + CriticalSectionScoped cs(&_refD3DCritsect); + + unsigned char* srcPtr; + HGDIOBJ oldhand; + BITMAPINFO pbi; + BITMAP bmap; + HDC hdcNew; + hdcNew = CreateCompatibleDC(0); + // Fill out the BITMAP structure. + GetObject((HBITMAP)bitMap, sizeof(bmap), &bmap); + //Select the bitmap handle into the new device context. + oldhand = SelectObject(hdcNew, (HGDIOBJ) bitMap); + // we are done with this object + DeleteObject(oldhand); + pbi.bmiHeader.biSize = 40; + pbi.bmiHeader.biWidth = bmap.bmWidth; + pbi.bmiHeader.biHeight = bmap.bmHeight; + pbi.bmiHeader.biPlanes = 1; + pbi.bmiHeader.biBitCount = bmap.bmBitsPixel; + pbi.bmiHeader.biCompression = BI_RGB; + pbi.bmiHeader.biSizeImage = bmap.bmWidth * bmap.bmHeight * 3; + srcPtr = new unsigned char[bmap.bmWidth * bmap.bmHeight * 4]; + // the original un-stretched image in RGB24 + int pixelHeight = GetDIBits(hdcNew, (HBITMAP)bitMap, 0, bmap.bmHeight, srcPtr, &pbi, + DIB_RGB_COLORS); + if (pixelHeight == 0) + { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "Direct3D failed to GetDIBits in SetBitmap"); + delete[] srcPtr; + return -1; + } + DeleteDC(hdcNew); + if (pbi.bmiHeader.biBitCount != 24 && pbi.bmiHeader.biBitCount != 32) + { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "Direct3D failed to SetBitmap invalid bit depth"); + delete[] srcPtr; + return -1; + } + + HRESULT ret; + //release the previous logo texture + if (_pTextureLogo != NULL) + { + _pTextureLogo->Release(); + _pTextureLogo = NULL; + } + ret = _pd3dDevice->CreateTexture(bmap.bmWidth, bmap.bmHeight, 1, 0, + D3DFMT_A8R8G8B8, D3DPOOL_MANAGED, + &_pTextureLogo, NULL); + if (FAILED(ret)) + { + _pTextureLogo = NULL; + delete[] srcPtr; + return -1; + } + if (!_pTextureLogo) + { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "Texture for rendering not initialized."); + delete[] srcPtr; + return -1; + } + + D3DLOCKED_RECT lr; + if (FAILED(_pTextureLogo->LockRect(0, &lr, NULL, 0))) + { + delete[] srcPtr; + return -1; + } + unsigned char* dstPtr = (UCHAR*) lr.pBits; + int pitch = bmap.bmWidth * 4; + + if (pbi.bmiHeader.biBitCount == 24) + { + ConvertRGB24ToARGB(srcPtr, dstPtr, bmap.bmWidth, bmap.bmHeight, 0); + } + else + { + unsigned char* srcTmp = srcPtr + (bmap.bmWidth * 4) * (bmap.bmHeight - 1); + for (int i = 0; i < bmap.bmHeight; ++i) + { + memcpy(dstPtr, srcTmp, bmap.bmWidth * 4); + srcTmp -= bmap.bmWidth * 4; + dstPtr += pitch; + } + } + + delete[] srcPtr; + if (FAILED(_pTextureLogo->UnlockRect(0))) + { + return -1; + } + + if (colorKey) + { + DDCOLORKEY* ddColorKey = + static_cast (const_cast (colorKey)); + SetTransparentColor(_pTextureLogo, ddColorKey, bmap.bmWidth, + bmap.bmHeight); + } + + //update the vertice buffer + //0,1 => -1,1 + _logoLeft = left; + _logoRight = right; + + //0,1 => 1,-1 + _logoTop = top; + _logoBottom = bottom; + + return 0; + +} + +int32_t VideoRenderDirect3D9::GetGraphicsMemory(uint64_t& totalMemory, + uint64_t& availableMemory) +{ + totalMemory = _totalMemory; + availableMemory = _availableMemory; + return 0; +} + +int32_t VideoRenderDirect3D9::ConfigureRenderer(const uint32_t channel, + const uint16_t streamId, + const unsigned int zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + std::map::iterator ddIt; + ddIt = _d3dChannels.find(channel & 0x0000ffff); + D3D9Channel* ddobj = NULL; + if (ddIt != _d3dChannels.end()) + { + ddobj = ddIt->second; + } + if (ddobj == NULL) + { + WEBRTC_TRACE(kTraceError, kTraceVideo, -1, + "Direct3D render failed to find channel"); + return -1; + } + // Only allow one stream per channel, demuxing is + ddobj->SetStreamSettings(0, zOrder, left, top, right, bottom); + + return 0; +} + +} // namespace webrtc diff --git a/webrtc/modules/video_render/windows/video_render_direct3d9.h b/webrtc/modules/video_render/windows/video_render_direct3d9.h new file mode 100644 index 0000000000..eaa8c147e2 --- /dev/null +++ b/webrtc/modules/video_render/windows/video_render_direct3d9.h @@ -0,0 +1,256 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_ + +#include + +#include "webrtc/modules/video_render/windows/i_video_render_win.h" + +#include +#include + +#include + +// Added +#include "webrtc/base/platform_thread.h" +#include "webrtc/modules/video_render/video_render_defines.h" + +#pragma comment(lib, "d3d9.lib") // located in DirectX SDK + +namespace webrtc { +class CriticalSectionWrapper; +class EventTimerWrapper; +class Trace; + +class D3D9Channel: public VideoRenderCallback +{ +public: + D3D9Channel(LPDIRECT3DDEVICE9 pd3DDevice, + CriticalSectionWrapper* critSect, Trace* trace); + + virtual ~D3D9Channel(); + + // Inherited from VideoRencerCallback, called from VideoAPI class. + // Called when the incomming frame size and/or number of streams in mix changes + virtual int FrameSizeChange(int width, int height, int numberOfStreams); + + // A new frame is delivered. + virtual int DeliverFrame(const VideoFrame& videoFrame); + virtual int32_t RenderFrame(const uint32_t streamId, + const VideoFrame& videoFrame); + + // Called to check if the video frame is updated. + int IsUpdated(bool& isUpdated); + // Called after the video frame has been render to the screen + int RenderOffFrame(); + // Called to get the texture that contains the video frame + LPDIRECT3DTEXTURE9 GetTexture(); + // Called to get the texture(video frame) size + int GetTextureWidth(); + int GetTextureHeight(); + // + void SetStreamSettings(uint16_t streamId, + uint32_t zOrder, + float startWidth, + float startHeight, + float stopWidth, + float stopHeight); + int GetStreamSettings(uint16_t streamId, + uint32_t& zOrder, + float& startWidth, + float& startHeight, + float& stopWidth, + float& stopHeight); + + int ReleaseTexture(); + int RecreateTexture(LPDIRECT3DDEVICE9 pd3DDevice); + +protected: + +private: + //critical section passed from the owner + CriticalSectionWrapper* _critSect; + LPDIRECT3DDEVICE9 _pd3dDevice; + LPDIRECT3DTEXTURE9 _pTexture; + + bool _bufferIsUpdated; + // the frame size + int _width; + int _height; + //sream settings + //TODO support multiple streams in one channel + uint16_t _streamId; + uint32_t _zOrder; + float _startWidth; + float _startHeight; + float _stopWidth; + float _stopHeight; +}; + +class VideoRenderDirect3D9: IVideoRenderWin +{ +public: + VideoRenderDirect3D9(Trace* trace, HWND hWnd, bool fullScreen); + ~VideoRenderDirect3D9(); + +public: + //IVideoRenderWin + + /************************************************************************** + * + * Init + * + ***************************************************************************/ + virtual int32_t Init(); + + /************************************************************************** + * + * Incoming Streams + * + ***************************************************************************/ + virtual VideoRenderCallback + * CreateChannel(const uint32_t streamId, + const uint32_t zOrder, + const float left, + const float top, + const float right, + const float bottom); + + virtual int32_t DeleteChannel(const uint32_t streamId); + + virtual int32_t GetStreamSettings(const uint32_t channel, + const uint16_t streamId, + uint32_t& zOrder, + float& left, float& top, + float& right, float& bottom); + + /************************************************************************** + * + * Start/Stop + * + ***************************************************************************/ + + virtual int32_t StartRender(); + virtual int32_t StopRender(); + + /************************************************************************** + * + * Properties + * + ***************************************************************************/ + + virtual bool IsFullScreen(); + + virtual int32_t SetCropping(const uint32_t channel, + const uint16_t streamId, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t ConfigureRenderer(const uint32_t channel, + const uint16_t streamId, + const unsigned int zOrder, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t SetTransparentBackground(const bool enable); + + virtual int32_t ChangeWindow(void* window); + + virtual int32_t GetGraphicsMemory(uint64_t& totalMemory, + uint64_t& availableMemory); + + virtual int32_t SetText(const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t colorText, + const uint32_t colorBg, + const float left, const float top, + const float rigth, const float bottom); + + virtual int32_t SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, const float top, + const float right, const float bottom); + +public: + // Get a channel by channel id + D3D9Channel* GetD3DChannel(int channel); + int UpdateRenderSurface(); + +protected: + // The thread rendering the screen + static bool ScreenUpdateThreadProc(void* obj); + bool ScreenUpdateProcess(); + +private: + // Init/close the d3d device + int InitDevice(); + int CloseDevice(); + + // Transparent related functions + int SetTransparentColor(LPDIRECT3DTEXTURE9 pTexture, + DDCOLORKEY* transparentColorKey, + DWORD width, + DWORD height); + + CriticalSectionWrapper& _refD3DCritsect; + Trace* _trace; + // TODO(pbos): Remove unique_ptr and use PlatformThread directly. + std::unique_ptr _screenUpdateThread; + EventTimerWrapper* _screenUpdateEvent; + + HWND _hWnd; + bool _fullScreen; + RECT _originalHwndRect; + //FIXME we probably don't need this since all the information can be get from _d3dChannels + int _channel; + //Window size + UINT _winWidth; + UINT _winHeight; + + // Device + LPDIRECT3D9 _pD3D; // Used to create the D3DDevice + LPDIRECT3DDEVICE9 _pd3dDevice; // Our rendering device + LPDIRECT3DVERTEXBUFFER9 _pVB; // Buffer to hold Vertices + LPDIRECT3DTEXTURE9 _pTextureLogo; + + std::map _d3dChannels; + std::multimap _d3dZorder; + + // The position where the logo will be placed + float _logoLeft; + float _logoTop; + float _logoRight; + float _logoBottom; + + typedef HRESULT (WINAPI *DIRECT3DCREATE9EX)(UINT SDKVersion, IDirect3D9Ex**); + LPDIRECT3DSURFACE9 _pd3dSurface; + + DWORD GetVertexProcessingCaps(); + int InitializeD3D(HWND hWnd, D3DPRESENT_PARAMETERS* pd3dpp); + + D3DPRESENT_PARAMETERS _d3dpp; + int ResetDevice(); + + int UpdateVerticeBuffer(LPDIRECT3DVERTEXBUFFER9 pVB, int offset, + float startWidth, float startHeight, + float stopWidth, float stopHeight); + + //code for providing graphics settings + DWORD _totalMemory; + DWORD _availableMemory; +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_ diff --git a/webrtc/modules/video_render/windows/video_render_windows_impl.cc b/webrtc/modules/video_render/windows/video_render_windows_impl.cc new file mode 100644 index 0000000000..042d7fdfa3 --- /dev/null +++ b/webrtc/modules/video_render/windows/video_render_windows_impl.cc @@ -0,0 +1,337 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "webrtc/engine_configurations.h" +#include "webrtc/modules/video_render/windows/video_render_windows_impl.h" + +#include "webrtc/system_wrappers/include/critical_section_wrapper.h" +#include "webrtc/system_wrappers/include/trace.h" +#ifdef DIRECT3D9_RENDERING +#include "webrtc/modules/video_render/windows/video_render_direct3d9.h" +#endif + +#include + +namespace webrtc { + +VideoRenderWindowsImpl::VideoRenderWindowsImpl(const int32_t id, + const VideoRenderType videoRenderType, void* window, const bool fullscreen) + : _renderWindowsCritsect(*CriticalSectionWrapper::CreateCriticalSection()), + _prtWindow(window), + _fullscreen(fullscreen), + _renderMethod(kVideoRenderWinD3D9), + _ptrRendererWin(NULL) { +} + +VideoRenderWindowsImpl::~VideoRenderWindowsImpl() +{ + delete &_renderWindowsCritsect; + if (_ptrRendererWin) + { + delete _ptrRendererWin; + _ptrRendererWin = NULL; + } +} + +int32_t VideoRenderWindowsImpl::Init() +{ + // Create the win renderer + switch (_renderMethod) + { + case kVideoRenderWinD3D9: + { +#ifdef DIRECT3D9_RENDERING + VideoRenderDirect3D9* ptrRenderer; + ptrRenderer = new VideoRenderDirect3D9(NULL, (HWND) _prtWindow, _fullscreen); + if (ptrRenderer == NULL) + { + break; + } + _ptrRendererWin = reinterpret_cast(ptrRenderer); +#else + return NULL; +#endif //DIRECT3D9_RENDERING + } + break; + default: + break; + } + + //Init renderer + if (_ptrRendererWin) + return _ptrRendererWin->Init(); + else + return -1; +} + +int32_t VideoRenderWindowsImpl::ChangeWindow(void* window) +{ + CriticalSectionScoped cs(&_renderWindowsCritsect); + if (!_ptrRendererWin) + { + return -1; + } + else + { + return _ptrRendererWin->ChangeWindow(window); + } +} + +VideoRenderCallback* +VideoRenderWindowsImpl::AddIncomingRenderStream(const uint32_t streamId, + const uint32_t zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_renderWindowsCritsect); + VideoRenderCallback* renderCallback = NULL; + + if (!_ptrRendererWin) + { + } + else + { + renderCallback = _ptrRendererWin->CreateChannel(streamId, zOrder, left, + top, right, bottom); + } + + return renderCallback; +} + +int32_t VideoRenderWindowsImpl::DeleteIncomingRenderStream( + const uint32_t streamId) +{ + CriticalSectionScoped cs(&_renderWindowsCritsect); + int32_t error = -1; + if (!_ptrRendererWin) + { + } + else + { + error = _ptrRendererWin->DeleteChannel(streamId); + } + return error; +} + +int32_t VideoRenderWindowsImpl::GetIncomingRenderStreamProperties( + const uint32_t streamId, + uint32_t& zOrder, + float& left, + float& top, + float& right, + float& bottom) const +{ + CriticalSectionScoped cs(&_renderWindowsCritsect); + zOrder = 0; + left = 0; + top = 0; + right = 0; + bottom = 0; + + int32_t error = -1; + if (!_ptrRendererWin) + { + } + else + { + error = _ptrRendererWin->GetStreamSettings(streamId, 0, zOrder, left, + top, right, bottom); + } + return error; +} + +int32_t VideoRenderWindowsImpl::StartRender() +{ + CriticalSectionScoped cs(&_renderWindowsCritsect); + int32_t error = -1; + if (!_ptrRendererWin) + { + } + else + { + error = _ptrRendererWin->StartRender(); + } + return error; +} + +int32_t VideoRenderWindowsImpl::StopRender() +{ + CriticalSectionScoped cs(&_renderWindowsCritsect); + int32_t error = -1; + if (!_ptrRendererWin) + { + } + else + { + error = _ptrRendererWin->StopRender(); + } + return error; +} + +VideoRenderType VideoRenderWindowsImpl::RenderType() +{ + return kRenderWindows; +} + +RawVideoType VideoRenderWindowsImpl::PerferedVideoType() +{ + return kVideoI420; +} + +bool VideoRenderWindowsImpl::FullScreen() +{ + CriticalSectionScoped cs(&_renderWindowsCritsect); + bool fullscreen = false; + if (!_ptrRendererWin) + { + } + else + { + fullscreen = _ptrRendererWin->IsFullScreen(); + } + return fullscreen; +} + +int32_t VideoRenderWindowsImpl::GetGraphicsMemory( + uint64_t& totalGraphicsMemory, + uint64_t& availableGraphicsMemory) const +{ + if (_ptrRendererWin) + { + return _ptrRendererWin->GetGraphicsMemory(totalGraphicsMemory, + availableGraphicsMemory); + } + + totalGraphicsMemory = 0; + availableGraphicsMemory = 0; + return -1; +} + +int32_t VideoRenderWindowsImpl::GetScreenResolution( + uint32_t& screenWidth, + uint32_t& screenHeight) const +{ + CriticalSectionScoped cs(&_renderWindowsCritsect); + screenWidth = 0; + screenHeight = 0; + return 0; +} + +uint32_t VideoRenderWindowsImpl::RenderFrameRate( + const uint32_t streamId) +{ + CriticalSectionScoped cs(&_renderWindowsCritsect); + return 0; +} + +int32_t VideoRenderWindowsImpl::SetStreamCropping( + const uint32_t streamId, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_renderWindowsCritsect); + int32_t error = -1; + if (!_ptrRendererWin) + { + } + else + { + error = _ptrRendererWin->SetCropping(streamId, 0, left, top, right, + bottom); + } + return error; +} + +int32_t VideoRenderWindowsImpl::ConfigureRenderer( + const uint32_t streamId, + const unsigned int zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_renderWindowsCritsect); + int32_t error = -1; + if (!_ptrRendererWin) + { + } + else + { + error = _ptrRendererWin->ConfigureRenderer(streamId, 0, zOrder, left, + top, right, bottom); + } + + return error; +} + +int32_t VideoRenderWindowsImpl::SetTransparentBackground( + const bool enable) +{ + CriticalSectionScoped cs(&_renderWindowsCritsect); + int32_t error = -1; + if (!_ptrRendererWin) + { + } + else + { + error = _ptrRendererWin->SetTransparentBackground(enable); + } + return error; +} + +int32_t VideoRenderWindowsImpl::SetText( + const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, + const float top, + const float right, + const float bottom) +{ + CriticalSectionScoped cs(&_renderWindowsCritsect); + int32_t error = -1; + if (!_ptrRendererWin) + { + } + else + { + error = _ptrRendererWin->SetText(textId, text, textLength, + textColorRef, backgroundColorRef, + left, top, right, bottom); + } + return error; +} + +int32_t VideoRenderWindowsImpl::SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, const float top, + const float right, const float bottom) +{ + CriticalSectionScoped cs(&_renderWindowsCritsect); + int32_t error = -1; + if (!_ptrRendererWin) + { + } + else + { + error = _ptrRendererWin->SetBitmap(bitMap, pictureId, colorKey, left, + top, right, bottom); + } + return error; +} + +} // namespace webrtc diff --git a/webrtc/modules/video_render/windows/video_render_windows_impl.h b/webrtc/modules/video_render/windows/video_render_windows_impl.h new file mode 100644 index 0000000000..aaa3f81fc7 --- /dev/null +++ b/webrtc/modules/video_render/windows/video_render_windows_impl.h @@ -0,0 +1,137 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_ + +#include +#include + +#include "webrtc/modules/video_render/i_video_render.h" +#include "webrtc/modules/video_render/windows/i_video_render_win.h" + +namespace webrtc { +class CriticalSectionWrapper; + +#define EXPAND(x) x, sizeof(x)/sizeof(TCHAR) + +enum VideoRenderWinMethod { + kVideoRenderWinD3D9 = 0, +}; + +// Class definitions +class VideoRenderWindowsImpl: IVideoRender +{ +public: + /* + * Constructor/destructor + */ + + VideoRenderWindowsImpl(const int32_t id, + const VideoRenderType videoRenderType, + void* window, const bool fullscreen); + + virtual ~VideoRenderWindowsImpl(); + + virtual int32_t Init(); + + virtual int32_t ChangeWindow(void* window); + + /************************************************************************** + * + * Incoming Streams + * + ***************************************************************************/ + + virtual VideoRenderCallback + * AddIncomingRenderStream(const uint32_t streamId, + const uint32_t zOrder, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t + DeleteIncomingRenderStream(const uint32_t streamId); + + virtual int32_t + GetIncomingRenderStreamProperties(const uint32_t streamId, + uint32_t& zOrder, + float& left, float& top, + float& right, float& bottom) const; + + /************************************************************************** + * + * Start/Stop + * + ***************************************************************************/ + + virtual int32_t StartRender(); + + virtual int32_t StopRender(); + + /************************************************************************** + * + * Properties + * + ***************************************************************************/ + + virtual VideoRenderType RenderType(); + + virtual RawVideoType PerferedVideoType(); + + virtual bool FullScreen(); + + virtual int32_t + GetGraphicsMemory(uint64_t& totalGraphicsMemory, + uint64_t& availableGraphicsMemory) const; + + virtual int32_t + GetScreenResolution(uint32_t& screenWidth, + uint32_t& screenHeight) const; + + virtual uint32_t RenderFrameRate(const uint32_t streamId); + + virtual int32_t SetStreamCropping(const uint32_t streamId, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t ConfigureRenderer(const uint32_t streamId, + const unsigned int zOrder, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t SetTransparentBackground(const bool enable); + + virtual int32_t SetText(const uint8_t textId, + const uint8_t* text, + const int32_t textLength, + const uint32_t textColorRef, + const uint32_t backgroundColorRef, + const float left, const float top, + const float right, const float bottom); + + virtual int32_t SetBitmap(const void* bitMap, + const uint8_t pictureId, + const void* colorKey, + const float left, const float top, + const float right, const float bottom); + +private: + CriticalSectionWrapper& _renderWindowsCritsect; + + void* _prtWindow; + bool _fullscreen; + + VideoRenderWinMethod _renderMethod; + IVideoRenderWin* _ptrRendererWin; +}; + +} // namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_ diff --git a/webrtc/test/test.gyp b/webrtc/test/test.gyp index a04a128a0e..65349ccff4 100644 --- a/webrtc/test/test.gyp +++ b/webrtc/test/test.gyp @@ -304,6 +304,7 @@ '<(webrtc_root)/base/base.gyp:rtc_base_approved', '<(webrtc_root)/common.gyp:webrtc_common', '<(webrtc_root)/modules/modules.gyp:media_file', + '<(webrtc_root)/modules/modules.gyp:video_render', '<(webrtc_root)/webrtc.gyp:webrtc', 'rtp_test_utils', 'test_support', diff --git a/webrtc/video/BUILD.gn b/webrtc/video/BUILD.gn index da5ffc3b01..4f1b7ae197 100644 --- a/webrtc/video/BUILD.gn +++ b/webrtc/video/BUILD.gn @@ -71,6 +71,7 @@ source_set("video") { "../modules/video_capture:video_capture_module", "../modules/video_coding", "../modules/video_processing", + "../modules/video_render:video_render_module", "../system_wrappers", "../voice_engine", ] diff --git a/webrtc/video/DEPS b/webrtc/video/DEPS index 7e53144d33..8c54066fd9 100644 --- a/webrtc/video/DEPS +++ b/webrtc/video/DEPS @@ -12,6 +12,7 @@ include_rules = [ "+webrtc/modules/video_coding", "+webrtc/modules/video_capture", "+webrtc/modules/video_processing", + "+webrtc/modules/video_render", "+webrtc/system_wrappers", "+webrtc/voice_engine", ] diff --git a/webrtc/video/video_capture_input.cc b/webrtc/video/video_capture_input.cc index 8f574e2115..54b41b5006 100644 --- a/webrtc/video/video_capture_input.cc +++ b/webrtc/video/video_capture_input.cc @@ -16,6 +16,7 @@ #include "webrtc/modules/include/module_common_types.h" #include "webrtc/modules/video_capture/video_capture_factory.h" #include "webrtc/modules/video_processing/include/video_processing.h" +#include "webrtc/modules/video_render/video_render_defines.h" #include "webrtc/video/overuse_frame_detector.h" #include "webrtc/video/send_statistics_proxy.h" #include "webrtc/video/vie_encoder.h" diff --git a/webrtc/video/video_receive_stream.h b/webrtc/video/video_receive_stream.h index a0edcac8d4..316e281fcc 100644 --- a/webrtc/video/video_receive_stream.h +++ b/webrtc/video/video_receive_stream.h @@ -18,6 +18,7 @@ #include "webrtc/call/transport_adapter.h" #include "webrtc/common_video/include/incoming_video_stream.h" #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" +#include "webrtc/modules/video_render/video_render_defines.h" #include "webrtc/modules/video_coding/video_coding_impl.h" #include "webrtc/system_wrappers/include/clock.h" #include "webrtc/video/encoded_frame_callback_adapter.h" diff --git a/webrtc/video/vie_channel.cc b/webrtc/video/vie_channel.cc index a1bdfb0333..a8ee4bcad0 100644 --- a/webrtc/video/vie_channel.cc +++ b/webrtc/video/vie_channel.cc @@ -25,6 +25,7 @@ #include "webrtc/modules/utility/include/process_thread.h" #include "webrtc/modules/video_coding/video_coding_impl.h" #include "webrtc/modules/video_processing/include/video_processing.h" +#include "webrtc/modules/video_render/video_render_defines.h" #include "webrtc/system_wrappers/include/metrics.h" #include "webrtc/video/call_stats.h" #include "webrtc/video/payload_router.h" diff --git a/webrtc/video/webrtc_video.gypi b/webrtc/video/webrtc_video.gypi index f7e1fd937b..f11ce95727 100644 --- a/webrtc/video/webrtc_video.gypi +++ b/webrtc/video/webrtc_video.gypi @@ -17,6 +17,7 @@ '<(webrtc_root)/modules/modules.gyp:rtp_rtcp', '<(webrtc_root)/modules/modules.gyp:video_capture_module', '<(webrtc_root)/modules/modules.gyp:video_processing', + '<(webrtc_root)/modules/modules.gyp:video_render_module', '<(webrtc_root)/modules/modules.gyp:webrtc_utility', '<(webrtc_root)/modules/modules.gyp:webrtc_video_coding', '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers', diff --git a/webrtc/webrtc.gyp b/webrtc/webrtc.gyp index abc48e981e..8e7c99bc2b 100644 --- a/webrtc/webrtc.gyp +++ b/webrtc/webrtc.gyp @@ -148,6 +148,7 @@ ['build_with_chromium==1', { 'dependencies': [ '<(webrtc_root)/modules/modules.gyp:video_capture', + '<(webrtc_root)/modules/modules.gyp:video_render', ], }], ], diff --git a/webrtc/webrtc_tests.gypi b/webrtc/webrtc_tests.gypi index e10aec119d..57996f29b1 100644 --- a/webrtc/webrtc_tests.gypi +++ b/webrtc/webrtc_tests.gypi @@ -57,6 +57,7 @@ ], 'dependencies': [ '<(DEPTH)/testing/gtest.gyp:gtest', + '<(webrtc_root)/modules/modules.gyp:video_render', '<(webrtc_root)/modules/modules.gyp:video_capture_module_internal_impl', '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers', 'webrtc', @@ -143,6 +144,7 @@ 'test/test.gyp:test_common', 'test/test.gyp:test_renderer', '<(webrtc_root)/modules/modules.gyp:video_capture', + '<(webrtc_root)/modules/modules.gyp:video_render', '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers_default', 'webrtc', ], @@ -182,6 +184,7 @@ '<(webrtc_root)/common.gyp:webrtc_common', '<(webrtc_root)/modules/modules.gyp:rtp_rtcp', '<(webrtc_root)/modules/modules.gyp:video_capture', + '<(webrtc_root)/modules/modules.gyp:video_render', '<(webrtc_root)/test/test.gyp:channel_transport', '<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine', 'test/metrics.gyp:metrics',