diff --git a/talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java b/talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java index 097d1cd906..ae164cb264 100644 --- a/talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java +++ b/talk/app/webrtc/java/android/org/webrtc/Camera2Enumerator.java @@ -34,11 +34,11 @@ import android.hardware.camera2.CameraManager; import android.hardware.camera2.params.StreamConfigurationMap; import android.os.Build; import android.os.SystemClock; +import android.util.Log; import android.util.Range; import android.util.Size; import org.webrtc.CameraEnumerationAndroid.CaptureFormat; -import org.webrtc.Logging; import java.util.ArrayList; import java.util.HashMap; @@ -69,14 +69,14 @@ public class Camera2Enumerator implements CameraEnumerationAndroid.Enumerator { if (cachedSupportedFormats.containsKey(cameraId)) { return cachedSupportedFormats.get(cameraId); } - Logging.d(TAG, "Get supported formats for camera index " + cameraId + "."); + Log.d(TAG, "Get supported formats for camera index " + cameraId + "."); final long startTimeMs = SystemClock.elapsedRealtime(); final CameraCharacteristics cameraCharacteristics; try { cameraCharacteristics = cameraManager.getCameraCharacteristics(Integer.toString(cameraId)); } catch (Exception ex) { - Logging.e(TAG, "getCameraCharacteristics(): " + ex); + Log.e(TAG, "getCameraCharacteristics(): " + ex); return new ArrayList(); } @@ -111,7 +111,7 @@ public class Camera2Enumerator implements CameraEnumerationAndroid.Enumerator { } cachedSupportedFormats.put(cameraId, formatList); final long endTimeMs = SystemClock.elapsedRealtime(); - Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done." + Log.d(TAG, "Get supported formats for camera index " + cameraId + " done." + " Time spent: " + (endTimeMs - startTimeMs) + " ms."); return formatList; } diff --git a/talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java b/talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java index 5b623ce143..c8f9a30024 100644 --- a/talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java +++ b/talk/app/webrtc/java/android/org/webrtc/CameraEnumerationAndroid.java @@ -29,17 +29,14 @@ package org.webrtc; import static java.lang.Math.abs; import static java.lang.Math.ceil; - import android.hardware.Camera; +import android.util.Log; import android.graphics.ImageFormat; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; -import org.webrtc.Logging; - -import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; @@ -148,7 +145,7 @@ public class CameraEnumerationAndroid { try { Camera.getCameraInfo(index, info); } catch (Exception e) { - Logging.e(TAG, "getCameraInfo failed on index " + index,e); + Log.e(TAG, "getCameraInfo failed on index " + index,e); return null; } @@ -180,7 +177,7 @@ public class CameraEnumerationAndroid { json_format.put("framerate", (format.maxFramerate + 999) / 1000); json_formats.put(json_format); } - Logging.d(TAG, "Supported formats for camera " + id + ": " + Log.d(TAG, "Supported formats for camera " + id + ": " + json_formats.toString(2)); return json_formats.toString(); } @@ -199,7 +196,7 @@ public class CameraEnumerationAndroid { public static int[] getFramerateRange(Camera.Parameters parameters, final int framerate) { List listFpsRange = parameters.getSupportedPreviewFpsRange(); if (listFpsRange.isEmpty()) { - Logging.w(TAG, "No supported preview fps range"); + Log.w(TAG, "No supported preview fps range"); return new int[]{0, 0}; } return Collections.min(listFpsRange, @@ -230,7 +227,7 @@ public class CameraEnumerationAndroid { return getDeviceName(i); } } catch (Exception e) { - Logging.e(TAG, "getCameraInfo() failed on index " + i, e); + Log.e(TAG, "getCameraInfo() failed on index " + i, e); } } return null; diff --git a/talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java b/talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java index 2f35dc3493..0e6b978c9d 100644 --- a/talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java +++ b/talk/app/webrtc/java/android/org/webrtc/CameraEnumerator.java @@ -29,9 +29,9 @@ package org.webrtc; import android.hardware.Camera; import android.os.SystemClock; +import android.util.Log; import org.webrtc.CameraEnumerationAndroid.CaptureFormat; -import org.webrtc.Logging; import java.util.ArrayList; import java.util.List; @@ -58,16 +58,16 @@ public class CameraEnumerator implements CameraEnumerationAndroid.Enumerator { } private List enumerateFormats(int cameraId) { - Logging.d(TAG, "Get supported formats for camera index " + cameraId + "."); + Log.d(TAG, "Get supported formats for camera index " + cameraId + "."); final long startTimeMs = SystemClock.elapsedRealtime(); final Camera.Parameters parameters; Camera camera = null; try { - Logging.d(TAG, "Opening camera with index " + cameraId); + Log.d(TAG, "Opening camera with index " + cameraId); camera = Camera.open(cameraId); parameters = camera.getParameters(); } catch (RuntimeException e) { - Logging.e(TAG, "Open camera failed on camera index " + cameraId, e); + Log.e(TAG, "Open camera failed on camera index " + cameraId, e); return new ArrayList(); } finally { if (camera != null) { @@ -91,11 +91,11 @@ public class CameraEnumerator implements CameraEnumerationAndroid.Enumerator { formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps)); } } catch (Exception e) { - Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e); + Log.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e); } final long endTimeMs = SystemClock.elapsedRealtime(); - Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done." + Log.d(TAG, "Get supported formats for camera index " + cameraId + " done." + " Time spent: " + (endTimeMs - startTimeMs) + " ms."); return formatList; } diff --git a/talk/app/webrtc/java/android/org/webrtc/EglBase.java b/talk/app/webrtc/java/android/org/webrtc/EglBase.java index 2aa28074c5..20af8ecc75 100644 --- a/talk/app/webrtc/java/android/org/webrtc/EglBase.java +++ b/talk/app/webrtc/java/android/org/webrtc/EglBase.java @@ -32,10 +32,9 @@ import android.opengl.EGLConfig; import android.opengl.EGLContext; import android.opengl.EGLDisplay; import android.opengl.EGLSurface; +import android.util.Log; import android.view.Surface; -import org.webrtc.Logging; - /** * Holds EGL state and utility methods for handling an EGLContext, an EGLDisplay, and an EGLSurface. */ @@ -53,7 +52,7 @@ public final class EglBase { private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE; public static boolean isEGL14Supported() { - Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION); + Log.d(TAG, "SDK version: " + CURRENT_SDK_VERSION); return (CURRENT_SDK_VERSION >= EGL14_SDK_VERSION); } @@ -87,7 +86,7 @@ public final class EglBase { public void createSurface(Surface surface) { checkIsNotReleased(); if (configType == ConfigType.PIXEL_BUFFER) { - Logging.w(TAG, "This EGL context is configured for PIXEL_BUFFER, but uses regular Surface"); + Log.w(TAG, "This EGL context is configured for PIXEL_BUFFER, but uses regular Surface"); } if (eglSurface != EGL14.EGL_NO_SURFACE) { throw new RuntimeException("Already has an EGLSurface"); diff --git a/talk/app/webrtc/java/android/org/webrtc/GlShader.java b/talk/app/webrtc/java/android/org/webrtc/GlShader.java index 966f0f5794..3014aab407 100644 --- a/talk/app/webrtc/java/android/org/webrtc/GlShader.java +++ b/talk/app/webrtc/java/android/org/webrtc/GlShader.java @@ -28,8 +28,7 @@ package org.webrtc; import android.opengl.GLES20; - -import org.webrtc.Logging; +import android.util.Log; import java.nio.FloatBuffer; @@ -46,7 +45,7 @@ public class GlShader { GLES20.glCompileShader(shader); GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, 0); if (result[0] != GLES20.GL_TRUE) { - Logging.e(TAG, "Could not compile shader " + shaderType + ":" + + Log.e(TAG, "Could not compile shader " + shaderType + ":" + GLES20.glGetShaderInfoLog(shader)); throw new RuntimeException(GLES20.glGetShaderInfoLog(shader)); } @@ -73,7 +72,7 @@ public class GlShader { }; GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); if (linkStatus[0] != GLES20.GL_TRUE) { - Logging.e(TAG, "Could not link program: " + + Log.e(TAG, "Could not link program: " + GLES20.glGetProgramInfoLog(program)); throw new RuntimeException(GLES20.glGetProgramInfoLog(program)); } @@ -125,7 +124,7 @@ public class GlShader { } public void release() { - Logging.d(TAG, "Deleting shader."); + Log.d(TAG, "Deleting shader."); // Flag shaders for deletion (does not delete until no longer attached to a program). if (vertexShader != -1) { GLES20.glDeleteShader(vertexShader); diff --git a/talk/app/webrtc/java/android/org/webrtc/GlUtil.java b/talk/app/webrtc/java/android/org/webrtc/GlUtil.java index 7a2f1bc2a3..8b4357969d 100644 --- a/talk/app/webrtc/java/android/org/webrtc/GlUtil.java +++ b/talk/app/webrtc/java/android/org/webrtc/GlUtil.java @@ -37,7 +37,6 @@ import java.nio.FloatBuffer; * Some OpenGL static utility functions. */ public class GlUtil { - private static final String TAG = "GlUtil"; private GlUtil() {} // Assert that no OpenGL ES 2.0 error has been raised. diff --git a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java index c319a066f3..95cceffa3d 100644 --- a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java +++ b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java @@ -27,8 +27,6 @@ package org.webrtc; -import java.nio.ByteBuffer; - import android.content.Context; import android.graphics.Point; import android.graphics.SurfaceTexture; @@ -38,11 +36,10 @@ import android.opengl.Matrix; import android.os.Handler; import android.os.HandlerThread; import android.util.AttributeSet; +import android.util.Log; import android.view.SurfaceHolder; import android.view.SurfaceView; -import org.webrtc.Logging; - /** * Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on a SurfaceView. * renderFrame() is asynchronous to avoid blocking the calling thread. @@ -147,7 +144,7 @@ public class SurfaceViewRenderer extends SurfaceView if (renderThreadHandler != null) { throw new IllegalStateException("Already initialized"); } - Logging.d(TAG, "Initializing"); + Log.d(TAG, "Initializing"); this.rendererEvents = rendererEvents; renderThread = new HandlerThread(TAG); renderThread.start(); @@ -164,7 +161,7 @@ public class SurfaceViewRenderer extends SurfaceView public void release() { synchronized (threadLock) { if (renderThreadHandler == null) { - Logging.d(TAG, "Already released"); + Log.d(TAG, "Already released"); return; } // Release EGL and GL resources on render thread. @@ -224,7 +221,7 @@ public class SurfaceViewRenderer extends SurfaceView } synchronized (threadLock) { if (renderThreadHandler == null) { - Logging.d(TAG, "Dropping frame - SurfaceViewRenderer not initialized or already released."); + Log.d(TAG, "Dropping frame - SurfaceViewRenderer not initialized or already released."); } else { synchronized (frameLock) { if (pendingFrame == null) { @@ -284,7 +281,7 @@ public class SurfaceViewRenderer extends SurfaceView // SurfaceHolder.Callback interface. @Override public void surfaceCreated(final SurfaceHolder holder) { - Logging.d(TAG, "Surface created"); + Log.d(TAG, "Surface created"); runOnRenderThread(new Runnable() { @Override public void run() { eglBase.createSurface(holder.getSurface()); @@ -297,7 +294,7 @@ public class SurfaceViewRenderer extends SurfaceView @Override public void surfaceDestroyed(SurfaceHolder holder) { - Logging.d(TAG, "Surface destroyed"); + Log.d(TAG, "Surface destroyed"); synchronized (layoutLock) { surfaceWidth = 0; surfaceHeight = 0; @@ -311,7 +308,7 @@ public class SurfaceViewRenderer extends SurfaceView @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { - Logging.d(TAG, "Surface changed: " + width + "x" + height); + Log.d(TAG, "Surface changed: " + width + "x" + height); synchronized (layoutLock) { surfaceWidth = width; surfaceHeight = height; @@ -338,7 +335,7 @@ public class SurfaceViewRenderer extends SurfaceView synchronized (layoutLock) { final Point desiredLayoutSize = getDesiredLayoutSize(); if (desiredLayoutSize.x != layoutWidth || desiredLayoutSize.y != layoutHeight) { - Logging.d(TAG, "Requesting new layout with size: " + Log.d(TAG, "Requesting new layout with size: " + desiredLayoutSize.x + "x" + desiredLayoutSize.y); // Request layout update on UI thread. post(new Runnable() { @@ -359,7 +356,7 @@ public class SurfaceViewRenderer extends SurfaceView */ private void renderFrameOnRenderThread() { if (eglBase == null || !eglBase.hasSurface()) { - Logging.d(TAG, "No surface to draw on"); + Log.d(TAG, "No surface to draw on"); return; } if (!checkConsistentLayout()) { @@ -452,10 +449,10 @@ public class SurfaceViewRenderer extends SurfaceView if (rendererEvents != null) { final String id = getResources().getResourceEntryName(getId()); if (frameWidth == 0 || frameHeight == 0) { - Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame."); + Log.d(TAG, "ID: " + id + ". Reporting first rendered frame."); rendererEvents.onFirstFrameRendered(); } - Logging.d(TAG, "ID: " + id + ". Reporting frame resolution changed to " + Log.d(TAG, "ID: " + id + ". Reporting frame resolution changed to " + frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree); rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree); } @@ -468,13 +465,13 @@ public class SurfaceViewRenderer extends SurfaceView private void logStatistics() { synchronized (statisticsLock) { - Logging.d(TAG, "ID: " + getResources().getResourceEntryName(getId()) + ". Frames received: " + Log.d(TAG, "ID: " + getResources().getResourceEntryName(getId()) + ". Frames received: " + framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered); if (framesReceived > 0 && framesRendered > 0) { final long timeSinceFirstFrameNs = System.nanoTime() - firstFrameTimeNs; - Logging.d(TAG, "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) + + Log.d(TAG, "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) + " ms. FPS: " + (float) framesRendered * 1e9 / timeSinceFirstFrameNs); - Logging.d(TAG, "Average render time: " + Log.d(TAG, "Average render time: " + (int) (renderTimeNs / (1000 * framesRendered)) + " us."); } } diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java index ebd858a962..dc3f8e8700 100644 --- a/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java +++ b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java @@ -36,13 +36,12 @@ import android.opengl.GLES20; import android.os.Handler; import android.os.Looper; import android.os.SystemClock; +import android.util.Log; import android.view.Surface; import android.view.WindowManager; import org.json.JSONException; - import org.webrtc.CameraEnumerationAndroid.CaptureFormat; -import org.webrtc.Logging; import java.io.IOException; import java.nio.ByteBuffer; @@ -106,7 +105,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba } else { errorMessage = "Camera error: " + error; } - Logging.e(TAG, errorMessage); + Log.e(TAG, errorMessage); if (errorHandler != null) { errorHandler.onCameraError(errorMessage); } @@ -125,11 +124,11 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba averageCaptureBuffersCount = (double)captureBuffersCount / cameraFramesCount; } - Logging.d(TAG, "Camera fps: " + cameraFps + ". CaptureBuffers: " + + Log.d(TAG, "Camera fps: " + cameraFps + ". CaptureBuffers: " + String.format("%.1f", averageCaptureBuffersCount) + ". Pending buffers: " + videoBuffers.pendingFramesTimeStamps()); if (cameraFramesCount == 0) { - Logging.e(TAG, "Camera freezed."); + Log.e(TAG, "Camera freezed."); if (errorHandler != null) { errorHandler.onCameraError("Camera failure."); } @@ -169,13 +168,13 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba return false; if (cameraThreadHandler == null) { - Logging.e(TAG, "Calling switchCamera() for stopped camera."); + Log.e(TAG, "Calling switchCamera() for stopped camera."); return false; } if (pendingCameraSwitch) { // Do not handle multiple camera switch request to avoid blocking // camera thread by handling too many switch request from a queue. - Logging.w(TAG, "Ignoring camera switch request."); + Log.w(TAG, "Ignoring camera switch request."); return false; } @@ -194,7 +193,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba public synchronized void onOutputFormatRequest( final int width, final int height, final int fps) { if (cameraThreadHandler == null) { - Logging.e(TAG, "Calling onOutputFormatRequest() for already stopped camera."); + Log.e(TAG, "Calling onOutputFormatRequest() for already stopped camera."); return; } cameraThreadHandler.post(new Runnable() { @@ -209,7 +208,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba public synchronized void changeCaptureFormat( final int width, final int height, final int framerate) { if (cameraThreadHandler == null) { - Logging.e(TAG, "Calling changeCaptureFormat() for already stopped camera."); + Log.e(TAG, "Calling changeCaptureFormat() for already stopped camera."); return; } cameraThreadHandler.post(new Runnable() { @@ -230,14 +229,14 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba } private VideoCapturerAndroid() { - Logging.d(TAG, "VideoCapturerAndroid"); + Log.d(TAG, "VideoCapturerAndroid"); } // Called by native code. // Initializes local variables for the camera named |deviceName|. If |deviceName| is empty, the // first available device is used in order to be compatible with the generic VideoCapturer class. synchronized boolean init(String deviceName) { - Logging.d(TAG, "init: " + deviceName); + Log.d(TAG, "init: " + deviceName); if (deviceName == null) return false; @@ -282,7 +281,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba synchronized void startCapture( final int width, final int height, final int framerate, final Context applicationContext, final CapturerObserver frameObserver) { - Logging.d(TAG, "startCapture requested: " + width + "x" + height + Log.d(TAG, "startCapture requested: " + width + "x" + height + "@" + framerate); if (applicationContext == null) { throw new RuntimeException("applicationContext not set."); @@ -313,7 +312,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba this.applicationContext = applicationContext; this.frameObserver = frameObserver; try { - Logging.d(TAG, "Opening camera " + id); + Log.d(TAG, "Opening camera " + id); camera = Camera.open(id); info = new Camera.CameraInfo(); Camera.getCameraInfo(id, info); @@ -329,11 +328,11 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba camera.setPreviewTexture(cameraSurfaceTexture); } catch (IOException e) { - Logging.e(TAG, "setPreviewTexture failed", error); + Log.e(TAG, "setPreviewTexture failed", error); throw new RuntimeException(e); } - Logging.d(TAG, "Camera orientation: " + info.orientation + + Log.d(TAG, "Camera orientation: " + info.orientation + " .Device orientation: " + getDeviceOrientation()); camera.setErrorCallback(cameraErrorCallback); startPreviewOnCameraThread(width, height, framerate); @@ -347,7 +346,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba } catch (RuntimeException e) { error = e; } - Logging.e(TAG, "startCapture failed", error); + Log.e(TAG, "startCapture failed", error); stopCaptureOnCameraThread(); cameraThreadHandler = null; frameObserver.OnCapturerStarted(false); @@ -359,10 +358,10 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba // (Re)start preview with the closest supported format to |width| x |height| @ |framerate|. private void startPreviewOnCameraThread(int width, int height, int framerate) { - Logging.d( + Log.d( TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + "@" + framerate); if (camera == null) { - Logging.e(TAG, "Calling startPreviewOnCameraThread on stopped camera."); + Log.e(TAG, "Calling startPreviewOnCameraThread on stopped camera."); return; } @@ -386,7 +385,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba } // Update camera parameters. - Logging.d(TAG, "isVideoStabilizationSupported: " + + Log.d(TAG, "isVideoStabilizationSupported: " + parameters.isVideoStabilizationSupported()); if (parameters.isVideoStabilizationSupported()) { parameters.setVideoStabilization(true); @@ -413,7 +412,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba } // (Re)start preview. - Logging.d(TAG, "Start capturing: " + captureFormat); + Log.d(TAG, "Start capturing: " + captureFormat); this.captureFormat = captureFormat; camera.setParameters(parameters); videoBuffers.queueCameraBuffers(captureFormat.frameSize(), camera); @@ -424,10 +423,10 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba // Called by native code. Returns true when camera is known to be stopped. synchronized void stopCapture() throws InterruptedException { if (cameraThreadHandler == null) { - Logging.e(TAG, "Calling stopCapture() for already stopped camera."); + Log.e(TAG, "Calling stopCapture() for already stopped camera."); return; } - Logging.d(TAG, "stopCapture"); + Log.d(TAG, "stopCapture"); cameraThreadHandler.post(new Runnable() { @Override public void run() { stopCaptureOnCameraThread(); @@ -435,7 +434,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba }); cameraThread.join(); cameraThreadHandler = null; - Logging.d(TAG, "stopCapture done"); + Log.d(TAG, "stopCapture done"); } private void stopCaptureOnCameraThread() { @@ -445,13 +444,13 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba } private void doStopCaptureOnCameraThread() { - Logging.d(TAG, "stopCaptureOnCameraThread"); + Log.d(TAG, "stopCaptureOnCameraThread"); if (camera == null) { return; } try { cameraThreadHandler.removeCallbacks(cameraObserver); - Logging.d(TAG, "Stop preview."); + Log.d(TAG, "Stop preview."); camera.stopPreview(); camera.setPreviewCallbackWithBuffer(null); videoBuffers.stopReturnBuffersToCamera(); @@ -463,22 +462,22 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba GLES20.glDeleteTextures(1, new int[] {cameraGlTexture}, 0); cameraGlTexture = 0; } - Logging.d(TAG, "Release camera."); + Log.d(TAG, "Release camera."); camera.release(); camera = null; } catch (IOException e) { - Logging.e(TAG, "Failed to stop camera", e); + Log.e(TAG, "Failed to stop camera", e); } } private void switchCameraOnCameraThread(Runnable switchDoneEvent) { - Logging.d(TAG, "switchCameraOnCameraThread"); + Log.d(TAG, "switchCameraOnCameraThread"); doStopCaptureOnCameraThread(); startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate, frameObserver, applicationContext); pendingCameraSwitch = false; - Logging.d(TAG, "switchCameraOnCameraThread done"); + Log.d(TAG, "switchCameraOnCameraThread done"); if (switchDoneEvent != null) { switchDoneEvent.run(); } @@ -489,7 +488,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba if (camera == null) { return; } - Logging.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + height + + Log.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + height + "@" + fps); frameObserver.OnOutputFormatRequest(width, height, fps); } @@ -551,7 +550,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba frameObserver.OnFrameCaptured(data, videoBuffers.frameSize, captureFormat.width, captureFormat.height, rotation, captureTimeNs); } else { - Logging.w(TAG, "reserveByteBuffer failed - dropping frame."); + Log.w(TAG, "reserveByteBuffer failed - dropping frame."); } } @@ -614,7 +613,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba camera.addCallbackBuffer(buffer.array()); queuedBuffers.put(buffer.array(), buffer); } - Logging.d(TAG, "queueCameraBuffers enqueued " + numCaptureBuffers + Log.d(TAG, "queueCameraBuffers enqueued " + numCaptureBuffers + " buffers of size " + frameSize + "."); } @@ -630,7 +629,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba this.camera = null; queuedBuffers.clear(); // Frames in |pendingBuffers| need to be kept alive until they are returned. - Logging.d(TAG, "stopReturnBuffersToCamera called." + Log.d(TAG, "stopReturnBuffersToCamera called." + (pendingBuffers.isEmpty() ? " All buffers have been returned." : " Pending buffers: " + pendingFramesTimeStamps() + ".")); @@ -641,7 +640,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba if (buffer == null) { // Frames might be posted to |onPreviewFrame| with the previous format while changing // capture format in |startPreviewOnCameraThread|. Drop these old frames. - Logging.w(TAG, "Received callback buffer from previous configuration with length: " + Log.w(TAG, "Received callback buffer from previous configuration with length: " + (data == null ? "null" : data.length)); return false; } @@ -649,12 +648,12 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba throw new IllegalStateException("Callback buffer has unexpected frame size"); } if (pendingBuffers.containsKey(timeStamp)) { - Logging.e(TAG, "Timestamp already present in pending buffers - they need to be unique"); + Log.e(TAG, "Timestamp already present in pending buffers - they need to be unique"); return false; } pendingBuffers.put(timeStamp, buffer); if (queuedBuffers.isEmpty()) { - Logging.v(TAG, "Camera is running out of capture buffers." + Log.v(TAG, "Camera is running out of capture buffers." + " Pending buffers: " + pendingFramesTimeStamps()); } return true; @@ -670,7 +669,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba if (camera != null && returnedFrame.capacity() == frameSize) { camera.addCallbackBuffer(returnedFrame.array()); if (queuedBuffers.isEmpty()) { - Logging.v(TAG, "Frame returned when camera is running out of capture" + Log.v(TAG, "Frame returned when camera is running out of capture" + " buffers for TS " + TimeUnit.NANOSECONDS.toMillis(timeStamp)); } queuedBuffers.put(returnedFrame.array(), returnedFrame); @@ -678,7 +677,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba } if (returnedFrame.capacity() != frameSize) { - Logging.d(TAG, "returnBuffer with time stamp " + Log.d(TAG, "returnBuffer with time stamp " + TimeUnit.NANOSECONDS.toMillis(timeStamp) + " called with old frame size, " + returnedFrame.capacity() + "."); // Since this frame has the wrong size, don't requeue it. Frames with the correct size are @@ -686,7 +685,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba return; } - Logging.d(TAG, "returnBuffer with time stamp " + Log.d(TAG, "returnBuffer with time stamp " + TimeUnit.NANOSECONDS.toMillis(timeStamp) + " called after camera has been stopped."); } diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java index 9909bff492..3072613fa4 100644 --- a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java +++ b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java @@ -44,7 +44,6 @@ import android.opengl.GLSurfaceView; import android.opengl.Matrix; import android.util.Log; -import org.webrtc.Logging; import org.webrtc.VideoRenderer.I420Frame; /** @@ -159,7 +158,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { GLSurfaceView surface, int id, int x, int y, int width, int height, RendererCommon.ScalingType scalingType, boolean mirror) { - Logging.d(TAG, "YuvImageRenderer.Create id: " + id); + Log.d(TAG, "YuvImageRenderer.Create id: " + id); this.surface = surface; this.id = id; this.scalingType = scalingType; @@ -180,7 +179,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { } private void createTextures() { - Logging.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:" + + Log.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:" + Thread.currentThread().getId()); // Generate 3 texture ids for Y/U/V and place them into |yuvTextures|. @@ -201,7 +200,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { (screenHeight * layoutInPercentage.top + 99) / 100, (screenWidth * layoutInPercentage.right) / 100, (screenHeight * layoutInPercentage.bottom) / 100); - Logging.d(TAG, "ID: " + id + ". AdjustTextureCoords. Allowed display size: " + Log.d(TAG, "ID: " + id + ". AdjustTextureCoords. Allowed display size: " + displayLayout.width() + " x " + displayLayout.height() + ". Video: " + videoWidth + " x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror); final float videoAspectRatio = (rotationDegree % 180 == 0) @@ -212,12 +211,12 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { videoAspectRatio, displayLayout.width(), displayLayout.height()); displayLayout.inset((displayLayout.width() - displaySize.x) / 2, (displayLayout.height() - displaySize.y) / 2); - Logging.d(TAG, " Adjusted display size: " + displayLayout.width() + " x " + Log.d(TAG, " Adjusted display size: " + displayLayout.width() + " x " + displayLayout.height()); layoutMatrix = RendererCommon.getLayoutMatrix( mirror, videoAspectRatio, (float) displayLayout.width() / displayLayout.height()); updateLayoutProperties = false; - Logging.d(TAG, " AdjustTextureCoords done"); + Log.d(TAG, " AdjustTextureCoords done"); } } @@ -284,13 +283,13 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { private void logStatistics() { long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs; - Logging.d(TAG, "ID: " + id + ". Type: " + rendererType + + Log.d(TAG, "ID: " + id + ". Type: " + rendererType + ". Frames received: " + framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered); if (framesReceived > 0 && framesRendered > 0) { - Logging.d(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) + + Log.d(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) + " ms. FPS: " + (float)framesRendered * 1e9 / timeSinceFirstFrameNs); - Logging.d(TAG, "Draw time: " + + Log.d(TAG, "Draw time: " + (int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " + (int) (copyTimeNs / (1000 * framesReceived)) + " us"); } @@ -301,7 +300,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { if (screenWidth == this.screenWidth && screenHeight == this.screenHeight) { return; } - Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setScreenSize: " + + Log.d(TAG, "ID: " + id + ". YuvImageRenderer.setScreenSize: " + screenWidth + " x " + screenHeight); this.screenWidth = screenWidth; this.screenHeight = screenHeight; @@ -318,7 +317,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { && mirror == this.mirror) { return; } - Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y + + Log.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y + ") " + width + " x " + height + ". Scaling: " + scalingType + ". Mirror: " + mirror); this.layoutInPercentage.set(layoutInPercentage); @@ -334,20 +333,20 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { return; } if (rendererEvents != null) { - Logging.d(TAG, "ID: " + id + + Log.d(TAG, "ID: " + id + ". Reporting frame resolution changed to " + videoWidth + " x " + videoHeight); rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation); } synchronized (updateLayoutLock) { - Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " + + Log.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " + videoWidth + " x " + videoHeight + " rotation " + rotation); this.videoWidth = videoWidth; this.videoHeight = videoHeight; rotationDegree = rotation; updateLayoutProperties = true; - Logging.d(TAG, " YuvImageRenderer.setSize done."); + Log.d(TAG, " YuvImageRenderer.setSize done."); } } @@ -359,7 +358,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { return; } if (!seenFrame && rendererEvents != null) { - Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame."); + Log.d(TAG, "ID: " + id + ". Reporting first rendered frame."); rendererEvents.onFirstFrameRendered(); } framesReceived++; @@ -369,7 +368,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { if (frame.yuvStrides[0] < frame.width || frame.yuvStrides[1] < frame.width / 2 || frame.yuvStrides[2] < frame.width / 2) { - Logging.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " + + Log.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " + frame.yuvStrides[1] + ", " + frame.yuvStrides[2]); VideoRenderer.renderFrameDone(frame); return; @@ -395,7 +394,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { /** Passes GLSurfaceView to video renderer. */ public static synchronized void setView(GLSurfaceView surface, Runnable eglContextReadyCallback) { - Logging.d(TAG, "VideoRendererGui.setView"); + Log.d(TAG, "VideoRendererGui.setView"); instance = new VideoRendererGui(surface); eglContextReady = eglContextReadyCallback; } @@ -409,7 +408,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { if (instance == null){ return; } - Logging.d(TAG, "VideoRendererGui.dispose"); + Log.d(TAG, "VideoRendererGui.dispose"); synchronized (instance.yuvImageRenderers) { for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) { yuvImageRenderer.release(); @@ -490,7 +489,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { public static synchronized void update( VideoRenderer.Callbacks renderer, int x, int y, int width, int height, RendererCommon.ScalingType scalingType, boolean mirror) { - Logging.d(TAG, "VideoRendererGui.update"); + Log.d(TAG, "VideoRendererGui.update"); if (instance == null) { throw new RuntimeException( "Attempt to update yuv renderer before setting GLSurfaceView"); @@ -506,7 +505,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { public static synchronized void setRendererEvents( VideoRenderer.Callbacks renderer, RendererCommon.RendererEvents rendererEvents) { - Logging.d(TAG, "VideoRendererGui.setRendererEvents"); + Log.d(TAG, "VideoRendererGui.setRendererEvents"); if (instance == null) { throw new RuntimeException( "Attempt to set renderer events before setting GLSurfaceView"); @@ -521,7 +520,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { } public static synchronized void remove(VideoRenderer.Callbacks renderer) { - Logging.d(TAG, "VideoRendererGui.remove"); + Log.d(TAG, "VideoRendererGui.remove"); if (instance == null) { throw new RuntimeException( "Attempt to remove yuv renderer before setting GLSurfaceView"); @@ -529,7 +528,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { synchronized (instance.yuvImageRenderers) { final int index = instance.yuvImageRenderers.indexOf(renderer); if (index == -1) { - Logging.w(TAG, "Couldn't remove renderer (not present in current list)"); + Log.w(TAG, "Couldn't remove renderer (not present in current list)"); } else { instance.yuvImageRenderers.remove(index).release(); } @@ -539,12 +538,12 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { @SuppressLint("NewApi") @Override public void onSurfaceCreated(GL10 unused, EGLConfig config) { - Logging.d(TAG, "VideoRendererGui.onSurfaceCreated"); + Log.d(TAG, "VideoRendererGui.onSurfaceCreated"); // Store render EGL context. if (CURRENT_SDK_VERSION >= EGL14_SDK_VERSION) { synchronized (VideoRendererGui.class) { eglContext = EGL14.eglGetCurrentContext(); - Logging.d(TAG, "VideoRendererGui EGL Context: " + eglContext); + Log.d(TAG, "VideoRendererGui EGL Context: " + eglContext); } } @@ -571,7 +570,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer { @Override public void onSurfaceChanged(GL10 unused, int width, int height) { - Logging.d(TAG, "VideoRendererGui.onSurfaceChanged: " + + Log.d(TAG, "VideoRendererGui.onSurfaceChanged: " + width + " x " + height + " "); screenWidth = width; screenHeight = height; diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java index 6518dc6d00..777f6f9e6a 100644 --- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java +++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java @@ -33,15 +33,13 @@ import android.media.MediaCodecInfo; import android.media.MediaCodecInfo.CodecCapabilities; import android.media.MediaCodecList; import android.media.MediaFormat; -import android.opengl.EGL14; import android.opengl.EGLContext; import android.opengl.GLES11Ext; import android.opengl.GLES20; import android.os.Build; +import android.util.Log; import android.view.Surface; -import org.webrtc.Logging; - import java.nio.ByteBuffer; // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder. @@ -130,7 +128,7 @@ public class MediaCodecVideoDecoder { if (name == null) { continue; // No HW support in this codec; try the next one. } - Logging.v(TAG, "Found candidate decoder " + name); + Log.v(TAG, "Found candidate decoder " + name); // Check if this is supported decoder. boolean supportedCodec = false; @@ -148,13 +146,13 @@ public class MediaCodecVideoDecoder { CodecCapabilities capabilities = info.getCapabilitiesForType(mime); for (int colorFormat : capabilities.colorFormats) { - Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat)); + Log.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat)); } for (int supportedColorFormat : supportedColorList) { for (int codecColorFormat : capabilities.colorFormats) { if (codecColorFormat == supportedColorFormat) { // Found supported HW decoder. - Logging.d(TAG, "Found target decoder " + name + + Log.d(TAG, "Found target decoder " + name + ". Color: 0x" + Integer.toHexString(codecColorFormat)); return new DecoderProperties(name, codecColorFormat); } @@ -204,11 +202,11 @@ public class MediaCodecVideoDecoder { if (properties == null) { throw new RuntimeException("Cannot find HW decoder for " + type); } - Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height + + Log.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height + ". Color: 0x" + Integer.toHexString(properties.colorFormat) + ". Use Surface: " + useSurface); if (sharedContext != null) { - Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext); + Log.d(TAG, "Decoder shared EGL Context: " + sharedContext); } mediaCodecThread = Thread.currentThread(); try { @@ -227,7 +225,7 @@ public class MediaCodecVideoDecoder { // Create output surface textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); - Logging.d(TAG, "Video decoder TextureID = " + textureID); + Log.d(TAG, "Video decoder TextureID = " + textureID); surfaceTexture = new SurfaceTexture(textureID); surface = new Surface(surfaceTexture); decodeSurface = surface; @@ -237,7 +235,7 @@ public class MediaCodecVideoDecoder { if (!useSurface) { format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); } - Logging.d(TAG, " Format: " + format); + Log.d(TAG, " Format: " + format); mediaCodec = MediaCodecVideoEncoder.createByCodecName(properties.codecName); if (mediaCodec == null) { @@ -248,30 +246,30 @@ public class MediaCodecVideoDecoder { colorFormat = properties.colorFormat; outputBuffers = mediaCodec.getOutputBuffers(); inputBuffers = mediaCodec.getInputBuffers(); - Logging.d(TAG, "Input buffers: " + inputBuffers.length + + Log.d(TAG, "Input buffers: " + inputBuffers.length + ". Output buffers: " + outputBuffers.length); return true; } catch (IllegalStateException e) { - Logging.e(TAG, "initDecode failed", e); + Log.e(TAG, "initDecode failed", e); return false; } } private void release() { - Logging.d(TAG, "Java releaseDecoder"); + Log.d(TAG, "Java releaseDecoder"); checkOnMediaCodecThread(); try { mediaCodec.stop(); mediaCodec.release(); } catch (IllegalStateException e) { - Logging.e(TAG, "release failed", e); + Log.e(TAG, "release failed", e); } mediaCodec = null; mediaCodecThread = null; if (useSurface) { surface.release(); if (textureID != 0) { - Logging.d(TAG, "Delete video decoder TextureID " + textureID); + Log.d(TAG, "Delete video decoder TextureID " + textureID); GLES20.glDeleteTextures(1, new int[] {textureID}, 0); textureID = 0; } @@ -287,7 +285,7 @@ public class MediaCodecVideoDecoder { try { return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT); } catch (IllegalStateException e) { - Logging.e(TAG, "dequeueIntputBuffer failed", e); + Log.e(TAG, "dequeueIntputBuffer failed", e); return -2; } } @@ -302,7 +300,7 @@ public class MediaCodecVideoDecoder { return true; } catch (IllegalStateException e) { - Logging.e(TAG, "decode failed", e); + Log.e(TAG, "decode failed", e); return false; } } @@ -334,15 +332,15 @@ public class MediaCodecVideoDecoder { result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { outputBuffers = mediaCodec.getOutputBuffers(); - Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length); + Log.d(TAG, "Decoder output buffers changed: " + outputBuffers.length); } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat format = mediaCodec.getOutputFormat(); - Logging.d(TAG, "Decoder format changed: " + format.toString()); + Log.d(TAG, "Decoder format changed: " + format.toString()); width = format.getInteger(MediaFormat.KEY_WIDTH); height = format.getInteger(MediaFormat.KEY_HEIGHT); if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); - Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); + Log.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); // Check if new color space is supported. boolean validColorFormat = false; for (int supportedColorFormat : supportedColorList) { @@ -352,7 +350,7 @@ public class MediaCodecVideoDecoder { } } if (!validColorFormat) { - Logging.e(TAG, "Non supported color format"); + Log.e(TAG, "Non supported color format"); return new DecoderOutputBufferInfo(-1, 0, 0, -1); } } @@ -362,7 +360,7 @@ public class MediaCodecVideoDecoder { if (format.containsKey("slice-height")) { sliceHeight = format.getInteger("slice-height"); } - Logging.d(TAG, "Frame stride and slice height: " + Log.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight); stride = Math.max(width, stride); sliceHeight = Math.max(height, sliceHeight); @@ -375,7 +373,7 @@ public class MediaCodecVideoDecoder { } return null; } catch (IllegalStateException e) { - Logging.e(TAG, "dequeueOutputBuffer failed", e); + Log.e(TAG, "dequeueOutputBuffer failed", e); return new DecoderOutputBufferInfo(-1, 0, 0, -1); } } @@ -391,7 +389,7 @@ public class MediaCodecVideoDecoder { mediaCodec.releaseOutputBuffer(index, render); return true; } catch (IllegalStateException e) { - Logging.e(TAG, "releaseOutputBuffer failed", e); + Log.e(TAG, "releaseOutputBuffer failed", e); return false; } } diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java index dfff8f2691..d85dd3a5dd 100644 --- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java +++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java @@ -35,8 +35,7 @@ import android.media.MediaCodecList; import android.media.MediaFormat; import android.os.Build; import android.os.Bundle; - -import org.webrtc.Logging; +import android.util.Log; import java.nio.ByteBuffer; import java.util.Arrays; @@ -126,7 +125,7 @@ public class MediaCodecVideoEncoder { if (mime.equals(H264_MIME_TYPE)) { List exceptionModels = Arrays.asList(H264_HW_EXCEPTION_MODELS); if (exceptionModels.contains(Build.MODEL)) { - Logging.w(TAG, "Model: " + Build.MODEL + + Log.w(TAG, "Model: " + Build.MODEL + " has black listed H.264 encoder."); return null; } @@ -147,7 +146,7 @@ public class MediaCodecVideoEncoder { if (name == null) { continue; // No HW support in this codec; try the next one. } - Logging.v(TAG, "Found candidate encoder " + name); + Log.v(TAG, "Found candidate encoder " + name); // Check if this is supported HW encoder. boolean supportedCodec = false; @@ -163,7 +162,7 @@ public class MediaCodecVideoEncoder { CodecCapabilities capabilities = info.getCapabilitiesForType(mime); for (int colorFormat : capabilities.colorFormats) { - Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat)); + Log.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat)); } // Check if codec supports either yuv420 or nv12. @@ -171,7 +170,7 @@ public class MediaCodecVideoEncoder { for (int codecColorFormat : capabilities.colorFormats) { if (codecColorFormat == supportedColorFormat) { // Found supported HW encoder. - Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name + + Log.d(TAG, "Found target encoder for mime " + mime + " : " + name + ". Color: 0x" + Integer.toHexString(codecColorFormat)); return new EncoderProperties(name, codecColorFormat); } @@ -210,7 +209,7 @@ public class MediaCodecVideoEncoder { // Return the array of input buffers, or null on failure. private ByteBuffer[] initEncode( VideoCodecType type, int width, int height, int kbps, int fps) { - Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height + + Log.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height + ". @ " + kbps + " kbps. Fps: " + fps + ". Color: 0x" + Integer.toHexString(colorFormat)); if (mediaCodecThread != null) { @@ -240,7 +239,7 @@ public class MediaCodecVideoEncoder { format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); format.setInteger(MediaFormat.KEY_FRAME_RATE, fps); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec); - Logging.d(TAG, " Format: " + format); + Log.d(TAG, " Format: " + format); mediaCodec = createByCodecName(properties.codecName); if (mediaCodec == null) { return null; @@ -251,11 +250,11 @@ public class MediaCodecVideoEncoder { colorFormat = properties.colorFormat; outputBuffers = mediaCodec.getOutputBuffers(); ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers(); - Logging.d(TAG, "Input buffers: " + inputBuffers.length + + Log.d(TAG, "Input buffers: " + inputBuffers.length + ". Output buffers: " + outputBuffers.length); return inputBuffers; } catch (IllegalStateException e) { - Logging.e(TAG, "initEncode failed", e); + Log.e(TAG, "initEncode failed", e); return null; } } @@ -270,7 +269,7 @@ public class MediaCodecVideoEncoder { // indicate this in queueInputBuffer() below and guarantee _this_ frame // be encoded as a key frame, but sadly that flag is ignored. Instead, // we request a key frame "soon". - Logging.d(TAG, "Sync frame request"); + Log.d(TAG, "Sync frame request"); Bundle b = new Bundle(); b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0); mediaCodec.setParameters(b); @@ -280,19 +279,19 @@ public class MediaCodecVideoEncoder { return true; } catch (IllegalStateException e) { - Logging.e(TAG, "encode failed", e); + Log.e(TAG, "encode failed", e); return false; } } private void release() { - Logging.d(TAG, "Java releaseEncoder"); + Log.d(TAG, "Java releaseEncoder"); checkOnMediaCodecThread(); try { mediaCodec.stop(); mediaCodec.release(); } catch (IllegalStateException e) { - Logging.e(TAG, "release failed", e); + Log.e(TAG, "release failed", e); } mediaCodec = null; mediaCodecThread = null; @@ -302,14 +301,14 @@ public class MediaCodecVideoEncoder { // frameRate argument is ignored - HW encoder is supposed to use // video frame timestamps for bit allocation. checkOnMediaCodecThread(); - Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + frameRateIgnored); + Log.v(TAG, "setRates: " + kbps + " kbps. Fps: " + frameRateIgnored); try { Bundle params = new Bundle(); params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, 1000 * kbps); mediaCodec.setParameters(params); return true; } catch (IllegalStateException e) { - Logging.e(TAG, "setRates failed", e); + Log.e(TAG, "setRates failed", e); return false; } } @@ -321,7 +320,7 @@ public class MediaCodecVideoEncoder { try { return mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT); } catch (IllegalStateException e) { - Logging.e(TAG, "dequeueIntputBuffer failed", e); + Log.e(TAG, "dequeueIntputBuffer failed", e); return -2; } } @@ -355,7 +354,7 @@ public class MediaCodecVideoEncoder { boolean isConfigFrame = (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0; if (isConfigFrame) { - Logging.d(TAG, "Config frame generated. Offset: " + info.offset + + Log.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size); configData = ByteBuffer.allocateDirect(info.size); outputBuffers[result].position(info.offset); @@ -378,10 +377,10 @@ public class MediaCodecVideoEncoder { boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0; if (isKeyFrame) { - Logging.d(TAG, "Sync frame generated"); + Log.d(TAG, "Sync frame generated"); } if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) { - Logging.d(TAG, "Appending config frame of size " + configData.capacity() + + Log.d(TAG, "Appending config frame of size " + configData.capacity() + " to output buffer with offset " + info.offset + ", size " + info.size); // For H.264 key frame append SPS and PPS NALs at the start @@ -407,7 +406,7 @@ public class MediaCodecVideoEncoder { } throw new RuntimeException("dequeueOutputBuffer: " + result); } catch (IllegalStateException e) { - Logging.e(TAG, "dequeueOutputBuffer failed", e); + Log.e(TAG, "dequeueOutputBuffer failed", e); return new OutputBufferInfo(-1, null, false, -1); } } @@ -420,7 +419,7 @@ public class MediaCodecVideoEncoder { mediaCodec.releaseOutputBuffer(index, false); return true; } catch (IllegalStateException e) { - Logging.e(TAG, "releaseOutputBuffer failed", e); + Log.e(TAG, "releaseOutputBuffer failed", e); return false; } }