tools_webrtc dir converted to py3 + top level PRESUBMIT script

Bug: webrtc:13607
Change-Id: Ib018e43ea977cc24dd71048e68e3343741f7f31b
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/249083
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Reviewed-by: Jeremy Leconte <jleconte@google.com>
Commit-Queue: Christoffer Jansson <jansson@google.com>
Cr-Commit-Position: refs/heads/main@{#35953}
This commit is contained in:
Christoffer Jansson 2022-02-08 09:01:12 +01:00 committed by WebRTC LUCI CQ
parent b5cba85c2f
commit 4e8a773b4b
50 changed files with 4570 additions and 4673 deletions

View File

@ -33,10 +33,16 @@ wheel: <
# Used by tools_webrtc/perf/webrtc_dashboard_upload.py. # Used by tools_webrtc/perf/webrtc_dashboard_upload.py.
wheel: < wheel: <
name: "infra/python/wheels/httplib2-py2_py3" name: "infra/python/wheels/httplib2-py3"
version: "version:0.10.3" version: "version:0.19.1"
> >
wheel: <
name: "infra/python/wheels/pyparsing-py2_py3"
version: "version:2.4.7"
>
# Used by: # Used by:
# build/toolchain/win # build/toolchain/win
wheel: < wheel: <

View File

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -13,6 +15,9 @@ import sys
from collections import defaultdict from collections import defaultdict
from contextlib import contextmanager from contextlib import contextmanager
# Runs PRESUBMIT.py in py3 mode by git cl presubmit.
USE_PYTHON3 = True
# Files and directories that are *skipped* by cpplint in the presubmit script. # Files and directories that are *skipped* by cpplint in the presubmit script.
CPPLINT_EXCEPTIONS = [ CPPLINT_EXCEPTIONS = [
'api/video_codecs/video_decoder.h', 'api/video_codecs/video_decoder.h',
@ -31,6 +36,9 @@ CPPLINT_EXCEPTIONS = [
'modules/video_capture', 'modules/video_capture',
'p2p/base/pseudo_tcp.cc', 'p2p/base/pseudo_tcp.cc',
'p2p/base/pseudo_tcp.h', 'p2p/base/pseudo_tcp.h',
'PRESUBMIT.py',
'presubmit_test_mocks.py',
'presubmit_test.py',
'rtc_base', 'rtc_base',
'sdk/android/src/jni', 'sdk/android/src/jni',
'sdk/objc', 'sdk/objc',
@ -137,8 +145,8 @@ def VerifyNativeApiHeadersListIsValid(input_api, output_api):
"""Ensures the list of native API header directories is up to date.""" """Ensures the list of native API header directories is up to date."""
non_existing_paths = [] non_existing_paths = []
native_api_full_paths = [ native_api_full_paths = [
input_api.os_path.join(input_api.PresubmitLocalPath(), input_api.os_path.join(input_api.PresubmitLocalPath(), *path.split('/'))
*path.split('/')) for path in API_DIRS for path in API_DIRS
] ]
for path in native_api_full_paths: for path in native_api_full_paths:
if not os.path.isdir(path): if not os.path.isdir(path):
@ -200,8 +208,8 @@ def CheckNoIOStreamInHeaders(input_api, output_api, source_file_filter):
files = [] files = []
pattern = input_api.re.compile(r'^#include\s*<iostream>', pattern = input_api.re.compile(r'^#include\s*<iostream>',
input_api.re.MULTILINE) input_api.re.MULTILINE)
file_filter = lambda x: (input_api.FilterSourceFile(x) and file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter(
source_file_filter(x)) x))
for f in input_api.AffectedSourceFiles(file_filter): for f in input_api.AffectedSourceFiles(file_filter):
if not f.LocalPath().endswith('.h'): if not f.LocalPath().endswith('.h'):
continue continue
@ -209,13 +217,12 @@ def CheckNoIOStreamInHeaders(input_api, output_api, source_file_filter):
if pattern.search(contents): if pattern.search(contents):
files.append(f) files.append(f)
if len(files): if len(files) > 0:
return [ return [
output_api.PresubmitError( output_api.PresubmitError(
'Do not #include <iostream> in header files, since it inserts ' 'Do not #include <iostream> in header files, since it inserts '
'static initialization into every file including the header. ' 'static initialization into every file including the header. '
'Instead, #include <ostream>. See http://crbug.com/94794', 'Instead, #include <ostream>. See http://crbug.com/94794', files)
files)
] ]
return [] return []
@ -224,8 +231,8 @@ def CheckNoPragmaOnce(input_api, output_api, source_file_filter):
"""Make sure that banned functions are not used.""" """Make sure that banned functions are not used."""
files = [] files = []
pattern = input_api.re.compile(r'^#pragma\s+once', input_api.re.MULTILINE) pattern = input_api.re.compile(r'^#pragma\s+once', input_api.re.MULTILINE)
file_filter = lambda x: (input_api.FilterSourceFile(x) and file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter(
source_file_filter(x)) x))
for f in input_api.AffectedSourceFiles(file_filter): for f in input_api.AffectedSourceFiles(file_filter):
if not f.LocalPath().endswith('.h'): if not f.LocalPath().endswith('.h'):
continue continue
@ -238,11 +245,11 @@ def CheckNoPragmaOnce(input_api, output_api, source_file_filter):
output_api.PresubmitError( output_api.PresubmitError(
'Do not use #pragma once in header files.\n' 'Do not use #pragma once in header files.\n'
'See http://www.chromium.org/developers/coding-style' 'See http://www.chromium.org/developers/coding-style'
'#TOC-File-headers', '#TOC-File-headers', files)
files)
] ]
return [] return []
def CheckNoFRIEND_TEST(# pylint: disable=invalid-name def CheckNoFRIEND_TEST(# pylint: disable=invalid-name
input_api, input_api,
output_api, output_api,
@ -308,8 +315,7 @@ def CheckApprovedFilesLintClean(input_api, output_api,
files = [] files = []
for f in input_api.AffectedSourceFiles(source_file_filter): for f in input_api.AffectedSourceFiles(source_file_filter):
# Note that moved/renamed files also count as added. # Note that moved/renamed files also count as added.
if f.Action() == 'A' or not IsLintDisabled(disabled_paths, if f.Action() == 'A' or not IsLintDisabled(disabled_paths, f.LocalPath()):
f.LocalPath()):
files.append(f.AbsoluteLocalPath()) files.append(f.AbsoluteLocalPath())
for file_name in files: for file_name in files:
@ -337,8 +343,7 @@ def CheckNoSourcesAbove(input_api, gn_files, output_api):
for source_block_match in source_pattern.finditer(contents): for source_block_match in source_pattern.finditer(contents):
# Find all source list entries starting with ../ in the source block # Find all source list entries starting with ../ in the source block
# (exclude overrides entries). # (exclude overrides entries).
for file_list_match in file_pattern.finditer( for file_list_match in file_pattern.finditer(source_block_match.group(1)):
source_block_match.group(1)):
source_file = file_list_match.group(1) source_file = file_list_match.group(1)
if 'overrides/' not in source_file: if 'overrides/' not in source_file:
violating_source_entries.append(source_file) violating_source_entries.append(source_file)
@ -370,6 +375,7 @@ def CheckAbseilDependencies(input_api, gn_files, output_api):
'should be moved to the "absl_deps" parameter.') 'should be moved to the "absl_deps" parameter.')
errors = [] errors = []
# pylint: disable=too-many-nested-blocks
for gn_file in gn_files: for gn_file in gn_files:
gn_file_content = input_api.ReadFile(gn_file) gn_file_content = input_api.ReadFile(gn_file)
for target_match in TARGET_RE.finditer(gn_file_content): for target_match in TARGET_RE.finditer(gn_file_content):
@ -382,8 +388,7 @@ def CheckAbseilDependencies(input_api, gn_files, output_api):
for dep in deps: for dep in deps:
if re.search(absl_re, dep): if re.search(absl_re, dep):
errors.append( errors.append(
output_api.PresubmitError( output_api.PresubmitError(error_msg %
error_msg %
(target_name, gn_file.LocalPath()))) (target_name, gn_file.LocalPath())))
break # no need to warn more than once per target break # no need to warn more than once per target
return errors return errors
@ -398,7 +403,7 @@ def CheckNoMixingSources(input_api, gn_files, output_api):
def _MoreThanOneSourceUsed(*sources_lists): def _MoreThanOneSourceUsed(*sources_lists):
sources_used = 0 sources_used = 0
for source_list in sources_lists: for source_list in sources_lists:
if len(source_list): if len(source_list) > 0:
sources_used += 1 sources_used += 1
return sources_used > 1 return sources_used > 1
@ -432,8 +437,7 @@ def CheckNoMixingSources(input_api, gn_files, output_api):
c_files = [] c_files = []
cc_files = [] cc_files = []
objc_files = [] objc_files = []
for file_match in FILE_PATH_RE.finditer( for file_match in FILE_PATH_RE.finditer(sources_match.group(1)):
sources_match.group(1)):
file_path = file_match.group('file_path') file_path = file_match.group('file_path')
extension = file_match.group('extension') extension = file_match.group('extension')
if extension == '.c': if extension == '.c':
@ -444,12 +448,9 @@ def CheckNoMixingSources(input_api, gn_files, output_api):
objc_files.append(file_path + extension) objc_files.append(file_path + extension)
list_of_sources.append((c_files, cc_files, objc_files)) list_of_sources.append((c_files, cc_files, objc_files))
for c_files_list, cc_files_list, objc_files_list in list_of_sources: for c_files_list, cc_files_list, objc_files_list in list_of_sources:
if _MoreThanOneSourceUsed(c_files_list, cc_files_list, if _MoreThanOneSourceUsed(c_files_list, cc_files_list, objc_files_list):
objc_files_list): all_sources = sorted(c_files_list + cc_files_list + objc_files_list)
all_sources = sorted(c_files_list + cc_files_list + errors[gn_file.LocalPath()].append((target_name, all_sources))
objc_files_list)
errors[gn_file.LocalPath()].append(
(target_name, all_sources))
if errors: if errors:
return [ return [
output_api.PresubmitError( output_api.PresubmitError(
@ -459,7 +460,7 @@ def CheckNoMixingSources(input_api, gn_files, output_api):
'Mixed sources: \n' 'Mixed sources: \n'
'%s\n' '%s\n'
'Violating GN files:\n%s\n' % 'Violating GN files:\n%s\n' %
(json.dumps(errors, indent=2), '\n'.join(errors.keys()))) (json.dumps(errors, indent=2), '\n'.join(list(errors.keys()))))
] ]
return [] return []
@ -467,18 +468,16 @@ def CheckNoMixingSources(input_api, gn_files, output_api):
def CheckNoPackageBoundaryViolations(input_api, gn_files, output_api): def CheckNoPackageBoundaryViolations(input_api, gn_files, output_api):
cwd = input_api.PresubmitLocalPath() cwd = input_api.PresubmitLocalPath()
with _AddToPath( with _AddToPath(
input_api.os_path.join(cwd, 'tools_webrtc', input_api.os_path.join(cwd, 'tools_webrtc', 'presubmit_checks_lib')):
'presubmit_checks_lib')):
from check_package_boundaries import CheckPackageBoundaries from check_package_boundaries import CheckPackageBoundaries
build_files = [ build_files = [os.path.join(cwd, gn_file.LocalPath()) for gn_file in gn_files]
os.path.join(cwd, gn_file.LocalPath()) for gn_file in gn_files
]
errors = CheckPackageBoundaries(cwd, build_files)[:5] errors = CheckPackageBoundaries(cwd, build_files)[:5]
if errors: if errors:
return [ return [
output_api.PresubmitError( output_api.PresubmitError(
'There are package boundary violations in the following GN ' 'There are package boundary violations in the following GN '
'files:', long_text='\n\n'.join(str(err) for err in errors)) 'files:',
long_text='\n\n'.join(str(err) for err in errors))
] ]
return [] return []
@ -492,7 +491,7 @@ def CheckNoWarningSuppressionFlagsAreAdded(gn_files,
input_api, input_api,
output_api, output_api,
error_formatter=_ReportFileAndLine): error_formatter=_ReportFileAndLine):
"""Ensure warning suppression flags are not added wihtout a reason.""" """Ensure warning suppression flags are not added without a reason."""
msg = ('Usage of //build/config/clang:extra_warnings is discouraged ' msg = ('Usage of //build/config/clang:extra_warnings is discouraged '
'in WebRTC.\n' 'in WebRTC.\n'
'If you are not adding this code (e.g. you are just moving ' 'If you are not adding this code (e.g. you are just moving '
@ -502,8 +501,8 @@ def CheckNoWarningSuppressionFlagsAreAdded(gn_files,
'\n' '\n'
'Affected files:\n') 'Affected files:\n')
errors = [] # 2-element tuples with (file, line number) errors = [] # 2-element tuples with (file, line number)
clang_warn_re = input_api.re.compile( clang_warn_re = input_api.re.compile(r'//build/config/clang:extra_warnings')
r'//build/config/clang:extra_warnings') # pylint: disable-next=fixme
no_presubmit_re = input_api.re.compile( no_presubmit_re = input_api.re.compile(
r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)') r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)')
for f in gn_files: for f in gn_files:
@ -541,8 +540,7 @@ def CheckNoStreamUsageIsAdded(input_api,
source_file_filter, source_file_filter,
error_formatter=_ReportFileAndLine): error_formatter=_ReportFileAndLine):
"""Make sure that no more dependencies on stringstream are added.""" """Make sure that no more dependencies on stringstream are added."""
error_msg = ( error_msg = ('Usage of <sstream>, <istream> and <ostream> in WebRTC is '
'Usage of <sstream>, <istream> and <ostream> in WebRTC is '
'deprecated.\n' 'deprecated.\n'
'This includes the following types:\n' 'This includes the following types:\n'
'std::istringstream, std::ostringstream, std::wistringstream, ' 'std::istringstream, std::ostringstream, std::wistringstream, '
@ -562,19 +560,18 @@ def CheckNoStreamUsageIsAdded(input_api,
'Affected files:\n') 'Affected files:\n')
errors = [] # 2-element tuples with (file, line number) errors = [] # 2-element tuples with (file, line number)
include_re = input_api.re.compile(r'#include <(i|o|s)stream>') include_re = input_api.re.compile(r'#include <(i|o|s)stream>')
usage_re = input_api.re.compile( usage_re = input_api.re.compile(r'std::(w|i|o|io|wi|wo|wio)(string)*stream')
r'std::(w|i|o|io|wi|wo|wio)(string)*stream')
no_presubmit_re = input_api.re.compile( no_presubmit_re = input_api.re.compile(
r'// no-presubmit-check TODO\(webrtc:8982\)') r'// no-presubmit-check TODO\(webrtc:8982\)')
file_filter = lambda x: (input_api.FilterSourceFile(x) and file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter(
source_file_filter(x)) x))
def _IsException(file_path): def _IsException(file_path):
is_test = any( is_test = any(
file_path.endswith(x) for x in file_path.endswith(x)
['_test.cc', '_tests.cc', '_unittest.cc', '_unittests.cc']) for x in ['_test.cc', '_tests.cc', '_unittest.cc', '_unittests.cc'])
return (file_path.startswith('examples') return (file_path.startswith('examples') or file_path.startswith('test')
or file_path.startswith('test') or is_test) or is_test)
for f in input_api.AffectedSourceFiles(file_filter): for f in input_api.AffectedSourceFiles(file_filter):
# Usage of stringstream is allowed under examples/ and in tests. # Usage of stringstream is allowed under examples/ and in tests.
@ -608,21 +605,20 @@ def CheckPublicDepsIsNotUsed(gn_files, input_api, output_api):
if not surpressed: if not surpressed:
result.append( result.append(
output_api.PresubmitError( output_api.PresubmitError(
error_msg % error_msg % (affected_file.LocalPath(), line_number)))
(affected_file.LocalPath(), line_number)))
return result return result
def CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api): def CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api):
result = [] result = []
error_msg = ( error_msg = ('check_includes overrides are not allowed since it can cause '
'check_includes overrides are not allowed since it can cause '
'incorrect dependencies to form. It effectively means that your ' 'incorrect dependencies to form. It effectively means that your '
'module can include any .h file without depending on its ' 'module can include any .h file without depending on its '
'corresponding target. There are some exceptional cases when ' 'corresponding target. There are some exceptional cases when '
'this is allowed: if so, get approval from a .gn owner in the ' 'this is allowed: if so, get approval from a .gn owner in the '
'root OWNERS file.\n' 'root OWNERS file.\n'
'Used in: %s (line %d).') 'Used in: %s (line %d).')
# pylint: disable-next=fixme
no_presubmit_re = input_api.re.compile( no_presubmit_re = input_api.re.compile(
r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)') r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)')
for affected_file in gn_files: for affected_file in gn_files:
@ -630,8 +626,8 @@ def CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api):
if ('check_includes' in affected_line if ('check_includes' in affected_line
and not no_presubmit_re.search(affected_line)): and not no_presubmit_re.search(affected_line)):
result.append( result.append(
output_api.PresubmitError( output_api.PresubmitError(error_msg %
error_msg % (affected_file.LocalPath(), line_number))) (affected_file.LocalPath(), line_number)))
return result return result
@ -652,13 +648,10 @@ def CheckGnChanges(input_api, output_api):
result.extend(CheckAbseilDependencies(input_api, gn_files, output_api)) result.extend(CheckAbseilDependencies(input_api, gn_files, output_api))
result.extend( result.extend(
CheckNoPackageBoundaryViolations(input_api, gn_files, output_api)) CheckNoPackageBoundaryViolations(input_api, gn_files, output_api))
result.extend(CheckPublicDepsIsNotUsed(gn_files, input_api, result.extend(CheckPublicDepsIsNotUsed(gn_files, input_api, output_api))
output_api)) result.extend(CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api))
result.extend( result.extend(
CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api)) CheckNoWarningSuppressionFlagsAreAdded(gn_files, input_api, output_api))
result.extend(
CheckNoWarningSuppressionFlagsAreAdded(gn_files, input_api,
output_api))
return result return result
@ -668,8 +661,8 @@ def CheckGnGen(input_api, output_api):
errors. errors.
""" """
with _AddToPath( with _AddToPath(
input_api.os_path.join(input_api.PresubmitLocalPath(), input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools_webrtc',
'tools_webrtc', 'presubmit_checks_lib')): 'presubmit_checks_lib')):
from build_helpers import RunGnCheck from build_helpers import RunGnCheck
errors = RunGnCheck(FindSrcDirPath(input_api.PresubmitLocalPath()))[:5] errors = RunGnCheck(FindSrcDirPath(input_api.PresubmitLocalPath()))[:5]
if errors: if errors:
@ -694,8 +687,7 @@ def CheckUnwantedDependencies(input_api, output_api, source_file_filter):
# roundabout construct to import checkdeps because this file is # roundabout construct to import checkdeps because this file is
# eval-ed and thus doesn't have __file__. # eval-ed and thus doesn't have __file__.
src_path = FindSrcDirPath(input_api.PresubmitLocalPath()) src_path = FindSrcDirPath(input_api.PresubmitLocalPath())
checkdeps_path = input_api.os_path.join(src_path, 'buildtools', checkdeps_path = input_api.os_path.join(src_path, 'buildtools', 'checkdeps')
'checkdeps')
if not os.path.exists(checkdeps_path): if not os.path.exists(checkdeps_path):
return [ return [
output_api.PresubmitError( output_api.PresubmitError(
@ -752,8 +744,7 @@ def CheckCommitMessageBugEntry(input_api, output_api):
"""Check that bug entries are well-formed in commit message.""" """Check that bug entries are well-formed in commit message."""
bogus_bug_msg = ( bogus_bug_msg = (
'Bogus Bug entry: %s. Please specify the issue tracker prefix and the ' 'Bogus Bug entry: %s. Please specify the issue tracker prefix and the '
'issue number, separated by a colon, e.g. webrtc:123 or chromium:12345.' 'issue number, separated by a colon, e.g. webrtc:123 or chromium:12345.')
)
results = [] results = []
for bug in input_api.change.BugsFromDescription(): for bug in input_api.change.BugsFromDescription():
bug = bug.strip() bug = bug.strip()
@ -766,8 +757,7 @@ def CheckCommitMessageBugEntry(input_api, output_api):
prefix_guess = 'chromium' prefix_guess = 'chromium'
else: else:
prefix_guess = 'webrtc' prefix_guess = 'webrtc'
results.append( results.append('Bug entry requires issue tracker prefix, e.g. %s:%s' %
'Bug entry requires issue tracker prefix, e.g. %s:%s' %
(prefix_guess, bug)) (prefix_guess, bug))
except ValueError: except ValueError:
results.append(bogus_bug_msg % bug) results.append(bogus_bug_msg % bug)
@ -788,15 +778,13 @@ def CheckChangeHasBugField(input_api, output_api):
""" """
if input_api.change.BugsFromDescription(): if input_api.change.BugsFromDescription():
return [] return []
else:
return [ return [
output_api.PresubmitError( output_api.PresubmitError(
'The "Bug: [bug number]" footer is mandatory. Please create a ' 'The "Bug: [bug number]" footer is mandatory. Please create a '
'bug and reference it using either of:\n' 'bug and reference it using either of:\n'
' * https://bugs.webrtc.org - reference it using Bug: ' ' * https://bugs.webrtc.org - reference it using Bug: '
'webrtc:XXXX\n' 'webrtc:XXXX\n'
' * https://crbug.com - reference it using Bug: chromium:XXXXXX' ' * https://crbug.com - reference it using Bug: chromium:XXXXXX')
)
] ]
@ -804,8 +792,7 @@ def CheckJSONParseErrors(input_api, output_api, source_file_filter):
"""Check that JSON files do not contain syntax errors.""" """Check that JSON files do not contain syntax errors."""
def FilterFile(affected_file): def FilterFile(affected_file):
return (input_api.os_path.splitext( return (input_api.os_path.splitext(affected_file.LocalPath())[1] == '.json'
affected_file.LocalPath())[1] == '.json'
and source_file_filter(affected_file)) and source_file_filter(affected_file))
def GetJSONParseError(input_api, filename): def GetJSONParseError(input_api, filename):
@ -823,8 +810,7 @@ def CheckJSONParseErrors(input_api, output_api, source_file_filter):
affected_file.AbsoluteLocalPath()) affected_file.AbsoluteLocalPath())
if parse_error: if parse_error:
results.append( results.append(
output_api.PresubmitError( output_api.PresubmitError('%s could not be parsed: %s' %
'%s could not be parsed: %s' %
(affected_file.LocalPath(), parse_error))) (affected_file.LocalPath(), parse_error)))
return results return results
@ -849,7 +835,8 @@ def RunPythonTests(input_api, output_api):
input_api, input_api,
output_api, output_api,
directory, directory,
files_to_check=[r'.+_test\.py$'])) files_to_check=[r'.+_test\.py$'],
run_on_python2=False))
return input_api.RunTests(tests, parallel=True) return input_api.RunTests(tests, parallel=True)
@ -859,8 +846,8 @@ def CheckUsageOfGoogleProtobufNamespace(input_api, output_api,
files = [] files = []
pattern = input_api.re.compile(r'google::protobuf') pattern = input_api.re.compile(r'google::protobuf')
proto_utils_path = os.path.join('rtc_base', 'protobuf_utils.h') proto_utils_path = os.path.join('rtc_base', 'protobuf_utils.h')
file_filter = lambda x: (input_api.FilterSourceFile(x) and file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter(
source_file_filter(x)) x))
for f in input_api.AffectedSourceFiles(file_filter): for f in input_api.AffectedSourceFiles(file_filter):
if f.LocalPath() in [proto_utils_path, 'PRESUBMIT.py']: if f.LocalPath() in [proto_utils_path, 'PRESUBMIT.py']:
continue continue
@ -872,8 +859,8 @@ def CheckUsageOfGoogleProtobufNamespace(input_api, output_api,
return [ return [
output_api.PresubmitError( output_api.PresubmitError(
'Please avoid to use namespace `google::protobuf` directly.\n' 'Please avoid to use namespace `google::protobuf` directly.\n'
'Add a using directive in `%s` and include that header instead.' 'Add a using directive in `%s` and include that header instead.' %
% proto_utils_path, files) proto_utils_path, files)
] ]
return [] return []
@ -882,7 +869,7 @@ def _LicenseHeader(input_api):
"""Returns the license header regexp.""" """Returns the license header regexp."""
# Accept any year number from 2003 to the current year # Accept any year number from 2003 to the current year
current_year = int(input_api.time.strftime('%Y')) current_year = int(input_api.time.strftime('%Y'))
allowed_years = (str(s) for s in reversed(xrange(2003, current_year + 1))) allowed_years = (str(s) for s in reversed(range(2003, current_year + 1)))
years_re = '(' + '|'.join(allowed_years) + ')' years_re = '(' + '|'.join(allowed_years) + ')'
license_header = ( license_header = (
r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. ' r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. '
@ -921,8 +908,11 @@ def CommonChecks(input_api, output_api):
# all python files. This is a temporary solution. # all python files. This is a temporary solution.
python_file_filter = lambda f: (f.LocalPath().endswith('.py') and python_file_filter = lambda f: (f.LocalPath().endswith('.py') and
source_file_filter(f)) source_file_filter(f))
python_changed_files = [f.LocalPath() for f in input_api.AffectedFiles( python_changed_files = [
include_deletes=False, file_filter=python_file_filter)] f.LocalPath()
for f in input_api.AffectedFiles(include_deletes=False,
file_filter=python_file_filter)
]
results.extend( results.extend(
input_api.canned_checks.RunPylint( input_api.canned_checks.RunPylint(
@ -939,13 +929,14 @@ def CommonChecks(input_api, output_api):
r'^testing[\\\/].*\.py$', r'^testing[\\\/].*\.py$',
r'^third_party[\\\/].*\.py$', r'^third_party[\\\/].*\.py$',
r'^tools[\\\/].*\.py$', r'^tools[\\\/].*\.py$',
# TODO(phoglund): should arguably be checked. # TODO(bugs.webrtc.org/13605): should arguably be checked.
r'^tools_webrtc[\\\/]mb[\\\/].*\.py$', r'^tools_webrtc[\\\/]mb[\\\/].*\.py$',
r'^xcodebuild.*[\\\/].*\.py$', r'^xcodebuild.*[\\\/].*\.py$',
), ),
pylintrc='pylintrc')) pylintrc='pylintrc',
version='2.7'))
# TODO(nisse): talk/ is no more, so make below checks simpler? # TODO(bugs.webrtc.org/13606): talk/ is no more, so make below checks simpler?
# WebRTC can't use the presubmit_canned_checks.PanProjectChecks function # WebRTC can't use the presubmit_canned_checks.PanProjectChecks function
# since we need to have different license checks # since we need to have different license checks
# in talk/ and webrtc/directories. # in talk/ and webrtc/directories.
@ -1032,8 +1023,7 @@ def CommonChecks(input_api, output_api):
CheckNewlineAtTheEndOfProtoFiles( CheckNewlineAtTheEndOfProtoFiles(
input_api, output_api, source_file_filter=non_third_party_sources)) input_api, output_api, source_file_filter=non_third_party_sources))
results.extend( results.extend(
CheckNoStreamUsageIsAdded(input_api, output_api, CheckNoStreamUsageIsAdded(input_api, output_api, non_third_party_sources))
non_third_party_sources))
results.extend( results.extend(
CheckNoTestCaseUsageIsAdded(input_api, output_api, CheckNoTestCaseUsageIsAdded(input_api, output_api,
non_third_party_sources)) non_third_party_sources))
@ -1044,8 +1034,7 @@ def CommonChecks(input_api, output_api):
results.extend( results.extend(
CheckAssertUsage(input_api, output_api, non_third_party_sources)) CheckAssertUsage(input_api, output_api, non_third_party_sources))
results.extend( results.extend(
CheckBannedAbslMakeUnique(input_api, output_api, CheckBannedAbslMakeUnique(input_api, output_api, non_third_party_sources))
non_third_party_sources))
results.extend( results.extend(
CheckObjcApiSymbols(input_api, output_api, non_third_party_sources)) CheckObjcApiSymbols(input_api, output_api, non_third_party_sources))
return results return results
@ -1075,8 +1064,7 @@ def CheckApiDepsFileIsUpToDate(input_api, output_api):
path_tokens = [t for t in f.LocalPath().split(os.sep) if t] path_tokens = [t for t in f.LocalPath().split(os.sep) if t]
if len(path_tokens) > 1: if len(path_tokens) > 1:
if (path_tokens[0] not in dirs_to_skip and os.path.isdir( if (path_tokens[0] not in dirs_to_skip and os.path.isdir(
os.path.join(input_api.PresubmitLocalPath(), os.path.join(input_api.PresubmitLocalPath(), path_tokens[0]))):
path_tokens[0]))):
dirs_to_check.add(path_tokens[0]) dirs_to_check.add(path_tokens[0])
missing_include_rules = set() missing_include_rules = set()
@ -1119,7 +1107,7 @@ def CheckBannedAbslMakeUnique(input_api, output_api, source_file_filter):
files.append(f) files.append(f)
break break
if len(files): if files:
return [ return [
output_api.PresubmitError( output_api.PresubmitError(
'Please use std::make_unique instead of absl::make_unique.\n' 'Please use std::make_unique instead of absl::make_unique.\n'
@ -1135,8 +1123,8 @@ def CheckObjcApiSymbols(input_api, output_api, source_file_filter):
source_file_filter(f)) source_file_filter(f))
files = [] files = []
file_filter = lambda x: (input_api.FilterSourceFile(x) and file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter(
source_file_filter(x)) x))
for f in input_api.AffectedSourceFiles(file_filter): for f in input_api.AffectedSourceFiles(file_filter):
if not f.LocalPath().endswith('.h') or not 'sdk/objc' in f.LocalPath(): if not f.LocalPath().endswith('.h') or not 'sdk/objc' in f.LocalPath():
continue continue
@ -1148,11 +1136,11 @@ def CheckObjcApiSymbols(input_api, output_api, source_file_filter):
if 'RTC_OBJC_TYPE' not in export_block: if 'RTC_OBJC_TYPE' not in export_block:
files.append(f.LocalPath()) files.append(f.LocalPath())
if len(files): if len(files) > 0:
return [ return [
output_api.PresubmitError( output_api.PresubmitError(
'RTC_OBJC_EXPORT types must be wrapped into an RTC_OBJC_TYPE() ' 'RTC_OBJC_EXPORT types must be wrapped into an RTC_OBJC_TYPE() ' +
+ 'macro.\n\n' + 'For example:\n' + 'macro.\n\n' + 'For example:\n' +
'RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RtcFoo)\n\n' + 'RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RtcFoo)\n\n' +
'RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE(RtcFoo)\n\n' + 'RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE(RtcFoo)\n\n' +
'Please fix the following files:', files) 'Please fix the following files:', files)
@ -1173,7 +1161,7 @@ def CheckAssertUsage(input_api, output_api, source_file_filter):
files.append(f.LocalPath()) files.append(f.LocalPath())
break break
if len(files): if len(files) > 0:
return [ return [
output_api.PresubmitError( output_api.PresubmitError(
'Usage of assert() has been detected in the following files, ' 'Usage of assert() has been detected in the following files, '
@ -1199,7 +1187,7 @@ def CheckAbslMemoryInclude(input_api, output_api, source_file_filter):
files.append(f) files.append(f)
break break
if len(files): if len(files) > 0:
return [ return [
output_api.PresubmitError( output_api.PresubmitError(
'Please include "absl/memory/memory.h" header for ' 'Please include "absl/memory/memory.h" header for '
@ -1213,8 +1201,8 @@ def CheckChangeOnUpload(input_api, output_api):
results = [] results = []
results.extend(CommonChecks(input_api, output_api)) results.extend(CommonChecks(input_api, output_api))
results.extend(CheckGnGen(input_api, output_api)) results.extend(CheckGnGen(input_api, output_api))
results.extend( results.extend(input_api.canned_checks.CheckGNFormatted(
input_api.canned_checks.CheckGNFormatted(input_api, output_api)) input_api, output_api))
return results return results
@ -1226,8 +1214,7 @@ def CheckChangeOnCommit(input_api, output_api):
results.extend( results.extend(
input_api.canned_checks.CheckChangeWasUploaded(input_api, output_api)) input_api.canned_checks.CheckChangeWasUploaded(input_api, output_api))
results.extend( results.extend(
input_api.canned_checks.CheckChangeHasDescription( input_api.canned_checks.CheckChangeHasDescription(input_api, output_api))
input_api, output_api))
results.extend(CheckChangeHasBugField(input_api, output_api)) results.extend(CheckChangeHasBugField(input_api, output_api))
results.extend(CheckCommitMessageBugEntry(input_api, output_api)) results.extend(CheckCommitMessageBugEntry(input_api, output_api))
results.extend( results.extend(
@ -1248,8 +1235,8 @@ def CheckOrphanHeaders(input_api, output_api, source_file_filter):
os.path.join('tools_webrtc', 'ios', 'SDK'), os.path.join('tools_webrtc', 'ios', 'SDK'),
] ]
with _AddToPath( with _AddToPath(
input_api.os_path.join(input_api.PresubmitLocalPath(), input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools_webrtc',
'tools_webrtc', 'presubmit_checks_lib')): 'presubmit_checks_lib')):
from check_orphan_headers import GetBuildGnPathFromFilePath from check_orphan_headers import GetBuildGnPathFromFilePath
from check_orphan_headers import IsHeaderInBuildGn from check_orphan_headers import IsHeaderInBuildGn
@ -1259,14 +1246,13 @@ def CheckOrphanHeaders(input_api, output_api, source_file_filter):
if f.LocalPath().endswith('.h'): if f.LocalPath().endswith('.h'):
file_path = os.path.abspath(f.LocalPath()) file_path = os.path.abspath(f.LocalPath())
root_dir = os.getcwd() root_dir = os.getcwd()
gn_file_path = GetBuildGnPathFromFilePath(file_path, gn_file_path = GetBuildGnPathFromFilePath(file_path, os.path.exists,
os.path.exists, root_dir) root_dir)
in_build_gn = IsHeaderInBuildGn(file_path, gn_file_path) in_build_gn = IsHeaderInBuildGn(file_path, gn_file_path)
if not in_build_gn: if not in_build_gn:
results.append( results.append(
output_api.PresubmitError( output_api.PresubmitError(
error_msg.format(f.LocalPath(), error_msg.format(f.LocalPath(), os.path.relpath(gn_file_path))))
os.path.relpath(gn_file_path))))
return results return results
@ -1282,8 +1268,7 @@ def CheckNewlineAtTheEndOfProtoFiles(input_api, output_api,
with open(file_path) as f: with open(file_path) as f:
lines = f.readlines() lines = f.readlines()
if len(lines) > 0 and not lines[-1].endswith('\n'): if len(lines) > 0 and not lines[-1].endswith('\n'):
results.append( results.append(output_api.PresubmitError(error_msg.format(file_path)))
output_api.PresubmitError(error_msg.format(file_path)))
return results return results
@ -1297,7 +1282,7 @@ def _ExtractAddRulesFromParsedDeps(parsed_deps):
rule[1:] for rule in parsed_deps.get('include_rules', []) rule[1:] for rule in parsed_deps.get('include_rules', [])
if rule.startswith('+') or rule.startswith('!') if rule.startswith('+') or rule.startswith('!')
]) ])
for _, rules in parsed_deps.get('specific_include_rules', {}).iteritems(): for _, rules in parsed_deps.get('specific_include_rules', {}).items():
add_rules.update([ add_rules.update([
rule[1:] for rule in rules rule[1:] for rule in rules
if rule.startswith('+') or rule.startswith('!') if rule.startswith('+') or rule.startswith('!')
@ -1309,7 +1294,7 @@ def _ParseDeps(contents):
"""Simple helper for parsing DEPS files.""" """Simple helper for parsing DEPS files."""
# Stubs for handling special syntax in the root DEPS file. # Stubs for handling special syntax in the root DEPS file.
class VarImpl(object): class VarImpl:
def __init__(self, local_scope): def __init__(self, local_scope):
self._local_scope = local_scope self._local_scope = local_scope
@ -1317,14 +1302,15 @@ def _ParseDeps(contents):
"""Implements the Var syntax.""" """Implements the Var syntax."""
try: try:
return self._local_scope['vars'][var_name] return self._local_scope['vars'][var_name]
except KeyError: except KeyError as var_not_defined:
raise Exception('Var is not defined: %s' % var_name) raise Exception('Var is not defined: %s' %
var_name) from var_not_defined
local_scope = {} local_scope = {}
global_scope = { global_scope = {
'Var': VarImpl(local_scope).Lookup, 'Var': VarImpl(local_scope).Lookup,
} }
exec contents in global_scope, local_scope exec(contents, global_scope, local_scope)
return local_scope return local_scope
@ -1371,8 +1357,7 @@ def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
filename = input_api.os_path.basename(f.LocalPath()) filename = input_api.os_path.basename(f.LocalPath())
if filename == 'DEPS': if filename == 'DEPS':
virtual_depended_on_files.update( virtual_depended_on_files.update(
_CalculateAddedDeps(input_api.os_path, _CalculateAddedDeps(input_api.os_path, '\n'.join(f.OldContents()),
'\n'.join(f.OldContents()),
'\n'.join(f.NewContents()))) '\n'.join(f.NewContents())))
if not virtual_depended_on_files: if not virtual_depended_on_files:
@ -1383,15 +1368,13 @@ def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
return [ return [
output_api.PresubmitNotifyResult( output_api.PresubmitNotifyResult(
'--tbr was specified, skipping OWNERS check for DEPS ' '--tbr was specified, skipping OWNERS check for DEPS '
'additions' 'additions')
)
] ]
if input_api.dry_run: if input_api.dry_run:
return [ return [
output_api.PresubmitNotifyResult( output_api.PresubmitNotifyResult(
'This is a dry run, skipping OWNERS check for DEPS ' 'This is a dry run, skipping OWNERS check for DEPS '
'additions' 'additions')
)
] ]
if not input_api.change.issue: if not input_api.change.issue:
return [ return [
@ -1405,9 +1388,7 @@ def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
owner_email, reviewers = ( owner_email, reviewers = (
input_api.canned_checks.GetCodereviewOwnerAndReviewers( input_api.canned_checks.GetCodereviewOwnerAndReviewers(
input_api, input_api, None, approval_needed=input_api.is_committing))
None,
approval_needed=input_api.is_committing))
owner_email = owner_email or input_api.change.author_email owner_email = owner_email or input_api.change.author_email
@ -1415,7 +1396,8 @@ def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
virtual_depended_on_files, reviewers.union([owner_email]), []) virtual_depended_on_files, reviewers.union([owner_email]), [])
missing_files = [ missing_files = [
f for f in virtual_depended_on_files f for f in virtual_depended_on_files
if approval_status[f] != input_api.owners_client.APPROVED] if approval_status[f] != input_api.owners_client.APPROVED
]
# We strip the /DEPS part that was added by # We strip the /DEPS part that was added by
# _FilesToCheckForIncomingDeps to fake a path to a file in a # _FilesToCheckForIncomingDeps to fake a path to a file in a
@ -1424,7 +1406,6 @@ def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
start_deps = path.rfind('/DEPS') start_deps = path.rfind('/DEPS')
if start_deps != -1: if start_deps != -1:
return path[:start_deps] return path[:start_deps]
else:
return path return path
unapproved_dependencies = [ unapproved_dependencies = [
@ -1433,8 +1414,7 @@ def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
if unapproved_dependencies: if unapproved_dependencies:
output_list = [ output_list = [
output( output('You need LGTM from owners of depends-on paths in DEPS that '
'You need LGTM from owners of depends-on paths in DEPS that '
' were modified in this CL:\n %s' % ' were modified in this CL:\n %s' %
'\n '.join(sorted(unapproved_dependencies))) '\n '.join(sorted(unapproved_dependencies)))
] ]

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright 2017 The WebRTC project authors. All Rights Reserved. # Copyright 2017 The WebRTC project authors. All Rights Reserved.
# #
@ -8,6 +8,7 @@
# in the file PATENTS. All contributing project authors may # in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
from __future__ import absolute_import
import os import os
import shutil import shutil
import tempfile import tempfile
@ -88,8 +89,8 @@ class CheckNewlineAtTheEndOfProtoFilesTest(unittest.TestCase):
self.input_api, self.output_api, lambda x: True) self.input_api, self.output_api, lambda x: True)
self.assertEqual(1, len(errors)) self.assertEqual(1, len(errors))
self.assertEqual( self.assertEqual(
'File %s must end with exactly one newline.' % 'File %s must end with exactly one newline.' % self.proto_file_path,
self.proto_file_path, str(errors[0])) str(errors[0]))
def testNoErrorIfProtoFileEndsWithNewline(self): def testNoErrorIfProtoFileEndsWithNewline(self):
self._GenerateProtoWithNewlineAtTheEnd() self._GenerateProtoWithNewlineAtTheEnd()
@ -237,8 +238,7 @@ class CheckNoMixingSourcesTest(unittest.TestCase):
self.assertTrue('bar.c' in str(errors[0])) self.assertTrue('bar.c' in str(errors[0]))
def _AssertNumberOfErrorsWithSources(self, number_of_errors, sources): def _AssertNumberOfErrorsWithSources(self, number_of_errors, sources):
assert len( assert len(sources) == 3, 'This function accepts a list of 3 source files'
sources) == 3, 'This function accepts a list of 3 source files'
self._GenerateBuildFile( self._GenerateBuildFile(
textwrap.dedent(""" textwrap.dedent("""
rtc_static_library("bar_foo") { rtc_static_library("bar_foo") {
@ -275,24 +275,16 @@ class CheckAssertUsageTest(unittest.TestCase):
def setUp(self): def setUp(self):
self.input_api = MockInputApi() self.input_api = MockInputApi()
self.output_api = MockOutputApi() self.output_api = MockOutputApi()
self._content_with_assert = [ self._content_with_assert = ['void Foo() {', ' assert(true);', '}']
'void Foo() {', self._content_without_assert = ['void Foo() {', ' RTC_CHECK(true);', '}']
' assert(true);',
'}'
]
self._content_without_assert = [
'void Foo() {',
' RTC_CHECK(true);',
'}'
]
def testDetectsAssertInCcFile(self): def testDetectsAssertInCcFile(self):
self.input_api.files = [ self.input_api.files = [
MockFile('with_assert.cc', self._content_with_assert), MockFile('with_assert.cc', self._content_with_assert),
MockFile('without_assert.cc', self._content_without_assert), MockFile('without_assert.cc', self._content_without_assert),
] ]
errors = PRESUBMIT.CheckAssertUsage( errors = PRESUBMIT.CheckAssertUsage(self.input_api,
self.input_api, self.output_api, lambda x: True) self.output_api, lambda x: True)
self.assertEqual(1, len(errors)) self.assertEqual(1, len(errors))
self.assertEqual('with_assert.cc', errors[0].items[0]) self.assertEqual('with_assert.cc', errors[0].items[0])
@ -301,8 +293,8 @@ class CheckAssertUsageTest(unittest.TestCase):
MockFile('with_assert.h', self._content_with_assert), MockFile('with_assert.h', self._content_with_assert),
MockFile('without_assert.h', self._content_without_assert), MockFile('without_assert.h', self._content_without_assert),
] ]
errors = PRESUBMIT.CheckAssertUsage( errors = PRESUBMIT.CheckAssertUsage(self.input_api,
self.input_api, self.output_api, lambda x: True) self.output_api, lambda x: True)
self.assertEqual(1, len(errors)) self.assertEqual(1, len(errors))
self.assertEqual('with_assert.h', errors[0].items[0]) self.assertEqual('with_assert.h', errors[0].items[0])
@ -311,8 +303,8 @@ class CheckAssertUsageTest(unittest.TestCase):
MockFile('with_assert.m', self._content_with_assert), MockFile('with_assert.m', self._content_with_assert),
MockFile('without_assert.m', self._content_without_assert), MockFile('without_assert.m', self._content_without_assert),
] ]
errors = PRESUBMIT.CheckAssertUsage( errors = PRESUBMIT.CheckAssertUsage(self.input_api,
self.input_api, self.output_api, lambda x: True) self.output_api, lambda x: True)
self.assertEqual(1, len(errors)) self.assertEqual(1, len(errors))
self.assertEqual('with_assert.m', errors[0].items[0]) self.assertEqual('with_assert.m', errors[0].items[0])
@ -321,8 +313,8 @@ class CheckAssertUsageTest(unittest.TestCase):
MockFile('with_assert.mm', self._content_with_assert), MockFile('with_assert.mm', self._content_with_assert),
MockFile('without_assert.mm', self._content_without_assert), MockFile('without_assert.mm', self._content_without_assert),
] ]
errors = PRESUBMIT.CheckAssertUsage( errors = PRESUBMIT.CheckAssertUsage(self.input_api,
self.input_api, self.output_api, lambda x: True) self.output_api, lambda x: True)
self.assertEqual(1, len(errors)) self.assertEqual(1, len(errors))
self.assertEqual('with_assert.mm', errors[0].items[0]) self.assertEqual('with_assert.mm', errors[0].items[0])
@ -330,8 +322,8 @@ class CheckAssertUsageTest(unittest.TestCase):
self.input_api.files = [ self.input_api.files = [
MockFile('with_assert.cpp', self._content_with_assert), MockFile('with_assert.cpp', self._content_with_assert),
] ]
errors = PRESUBMIT.CheckAssertUsage( errors = PRESUBMIT.CheckAssertUsage(self.input_api,
self.input_api, self.output_api, lambda x: True) self.output_api, lambda x: True)
self.assertEqual(0, len(errors)) self.assertEqual(0, len(errors))

View File

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -9,11 +11,12 @@
# This file is inspired to [1]. # This file is inspired to [1].
# [1] - https://cs.chromium.org/chromium/src/PRESUBMIT_test_mocks.py # [1] - https://cs.chromium.org/chromium/src/PRESUBMIT_test_mocks.py
from __future__ import absolute_import
import os.path import os.path
import re import re
class MockInputApi(object): class MockInputApi:
"""Mock class for the InputApi class. """Mock class for the InputApi class.
This class can be used for unittests for presubmit by initializing the files This class can be used for unittests for presubmit by initializing the files
@ -38,34 +41,31 @@ class MockInputApi(object):
yield f yield f
@classmethod @classmethod
def FilterSourceFile(cls, def FilterSourceFile(cls, affected_file, files_to_check=(), files_to_skip=()):
affected_file,
files_to_check=(),
files_to_skip=()):
# pylint: disable=unused-argument # pylint: disable=unused-argument
return True return True
def PresubmitLocalPath(self): def PresubmitLocalPath(self):
return self.presubmit_local_path return self.presubmit_local_path
def ReadFile(self, affected_file, mode='rU'): def ReadFile(self, affected_file, mode='r'):
filename = affected_file.AbsoluteLocalPath() filename = affected_file.AbsoluteLocalPath()
for f in self.files: for f in self.files:
if f.LocalPath() == filename: if f.LocalPath() == filename:
with open(filename, mode) as f: with open(filename, mode) as f:
return f.read() return f.read()
# Otherwise, file is not in our mock API. # Otherwise, file is not in our mock API.
raise IOError, "No such file or directory: '%s'" % filename raise IOError("No such file or directory: '%s'" % filename)
class MockOutputApi(object): class MockOutputApi:
"""Mock class for the OutputApi class. """Mock class for the OutputApi class.
An instance of this class can be passed to presubmit unittests for outputing An instance of this class can be passed to presubmit unittests for outputing
various types of results. various types of results.
""" """
class PresubmitResult(object): class PresubmitResult:
def __init__(self, message, items=None, long_text=''): def __init__(self, message, items=None, long_text=''):
self.message = message self.message = message
self.items = items self.items = items
@ -76,12 +76,11 @@ class MockOutputApi(object):
class PresubmitError(PresubmitResult): class PresubmitError(PresubmitResult):
def __init__(self, message, items=None, long_text=''): def __init__(self, message, items=None, long_text=''):
MockOutputApi.PresubmitResult.__init__(self, message, items, MockOutputApi.PresubmitResult.__init__(self, message, items, long_text)
long_text)
self.type = 'error' self.type = 'error'
class MockChange(object): class MockChange:
"""Mock class for Change class. """Mock class for Change class.
This class can be used in presubmit unittests to mock the query of the This class can be used in presubmit unittests to mock the query of the
@ -103,7 +102,7 @@ class MockChange(object):
return self.tags.get(attr) return self.tags.get(attr)
class MockFile(object): class MockFile:
"""Mock class for the File class. """Mock class for the File class.
This class can be used to form the mock list of changed files in This class can be used to form the mock list of changed files in
@ -119,8 +118,7 @@ class MockFile(object):
new_contents = ["Data"] new_contents = ["Data"]
self._local_path = local_path self._local_path = local_path
self._new_contents = new_contents self._new_contents = new_contents
self._changed_contents = [(i + 1, l) self._changed_contents = [(i + 1, l) for i, l in enumerate(new_contents)]
for i, l in enumerate(new_contents)]
self._action = action self._action = action
self._old_contents = old_contents self._old_contents = old_contents

View File

@ -28,6 +28,7 @@ disable=
exec-used, exec-used,
fixme, fixme,
import-error, import-error,
import-outside-toplevel,
missing-docstring, missing-docstring,
no-init, no-init,
no-member, no-member,

View File

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -6,12 +8,15 @@
# in the file PATENTS. All contributing project authors may # in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
# Runs PRESUBMIT.py in py3 mode by git cl presubmit.
USE_PYTHON3 = True
def _LicenseHeader(input_api): def _LicenseHeader(input_api):
"""Returns the license header regexp.""" """Returns the license header regexp."""
# Accept any year number from 2003 to the current year # Accept any year number from 2003 to the current year
current_year = int(input_api.time.strftime('%Y')) current_year = int(input_api.time.strftime('%Y'))
allowed_years = (str(s) for s in reversed(xrange(2003, current_year + 1))) allowed_years = (str(s) for s in reversed(range(2003, current_year + 1)))
years_re = '(' + '|'.join(allowed_years) + ')' years_re = '(' + '|'.join(allowed_years) + ')'
license_header = ( license_header = (
r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. ' r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. '

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
@ -60,8 +60,7 @@ def _ParseArgs():
default='libwebrtc.aar', default='libwebrtc.aar',
type=os.path.abspath, type=os.path.abspath,
help='Output file of the script.') help='Output file of the script.')
parser.add_argument( parser.add_argument('--arch',
'--arch',
default=DEFAULT_ARCHS, default=DEFAULT_ARCHS,
nargs='*', nargs='*',
help='Architectures to build. Defaults to %(default)s.') help='Architectures to build. Defaults to %(default)s.')
@ -126,9 +125,8 @@ def _EncodeForGN(value):
"""Encodes value as a GN literal.""" """Encodes value as a GN literal."""
if isinstance(value, str): if isinstance(value, str):
return '"' + value + '"' return '"' + value + '"'
elif isinstance(value, bool): if isinstance(value, bool):
return repr(value).lower() return repr(value).lower()
else:
return repr(value) return repr(value)
@ -141,13 +139,12 @@ def _GetTargetCpu(arch):
"""Returns target_cpu for the GN build with the given architecture.""" """Returns target_cpu for the GN build with the given architecture."""
if arch in ['armeabi', 'armeabi-v7a']: if arch in ['armeabi', 'armeabi-v7a']:
return 'arm' return 'arm'
elif arch == 'arm64-v8a': if arch == 'arm64-v8a':
return 'arm64' return 'arm64'
elif arch == 'x86': if arch == 'x86':
return 'x86' return 'x86'
elif arch == 'x86_64': if arch == 'x86_64':
return 'x64' return 'x64'
else:
raise Exception('Unknown arch: ' + arch) raise Exception('Unknown arch: ' + arch)
@ -155,11 +152,10 @@ def _GetArmVersion(arch):
"""Returns arm_version for the GN build with the given architecture.""" """Returns arm_version for the GN build with the given architecture."""
if arch == 'armeabi': if arch == 'armeabi':
return 6 return 6
elif arch == 'armeabi-v7a': if arch == 'armeabi-v7a':
return 7 return 7
elif arch in ['arm64-v8a', 'x86', 'x86_64']: if arch in ['arm64-v8a', 'x86', 'x86_64']:
return None return None
else:
raise Exception('Unknown arch: ' + arch) raise Exception('Unknown arch: ' + arch)
@ -180,8 +176,7 @@ def Build(build_dir, arch, use_goma, extra_gn_args, extra_gn_switches,
if arm_version: if arm_version:
gn_args['arm_version'] = arm_version gn_args['arm_version'] = arm_version
gn_args_str = '--args=' + ' '.join( gn_args_str = '--args=' + ' '.join(
[k + '=' + _EncodeForGN(v) [k + '=' + _EncodeForGN(v) for k, v in gn_args.items()] + extra_gn_args)
for k, v in gn_args.items()] + extra_gn_args)
gn_args_list = ['gen', output_directory, gn_args_str] gn_args_list = ['gen', output_directory, gn_args_str]
gn_args_list.extend(extra_gn_switches) gn_args_list.extend(extra_gn_switches)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python3 #!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
@ -7,8 +7,7 @@
# tree. An additional intellectual property rights grant can be found # tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may # in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
"""Script for building and testing WebRTC AAR. """Script for building and testing WebRTC AAR."""
"""
import argparse import argparse
import logging import logging
@ -80,8 +79,7 @@ def _TestAAR(build_dir):
logging.info('Testing library.') logging.info('Testing library.')
# Uninstall any existing version of AppRTCMobile. # Uninstall any existing version of AppRTCMobile.
logging.info( logging.info('Uninstalling previous AppRTCMobile versions. It is okay for '
'Uninstalling previous AppRTCMobile versions. It is okay for '
'these commands to fail if AppRTCMobile is not installed.') 'these commands to fail if AppRTCMobile is not installed.')
subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc']) subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc'])
subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc.test']) subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc.test'])
@ -92,9 +90,9 @@ def _TestAAR(build_dir):
subprocess.check_call([GRADLEW_BIN, 'clean'], cwd=AAR_PROJECT_DIR) subprocess.check_call([GRADLEW_BIN, 'clean'], cwd=AAR_PROJECT_DIR)
# Then run the tests. # Then run the tests.
subprocess.check_call([ subprocess.check_call([
GRADLEW_BIN, GRADLEW_BIN, 'connectedDebugAndroidTest',
'connectedDebugAndroidTest', '-PaarDir=' + os.path.abspath(build_dir)
'-PaarDir=' + os.path.abspath(build_dir)], ],
cwd=AAR_PROJECT_DIR) cwd=AAR_PROJECT_DIR)
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
logging.exception('Test failure.') logging.exception('Test failure.')
@ -106,8 +104,8 @@ def _TestAAR(build_dir):
def BuildAndTestAar(use_goma, skip_tests, build_dir): def BuildAndTestAar(use_goma, skip_tests, build_dir):
version = '1.0.' + _GetCommitPos() version = '1.0.' + _GetCommitPos()
commit = _GetCommitHash() commit = _GetCommitHash()
logging.info( logging.info('Building and Testing AAR version %s with hash %s', version,
'Building and Testing AAR version %s with hash %s', version, commit) commit)
# If build directory is not specified, create a temporary directory. # If build directory is not specified, create a temporary directory.
use_tmp_dir = not build_dir use_tmp_dir = not build_dir

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -12,9 +13,9 @@ import re
import sys import sys
def replace_double_quote(line): def _ReplaceDoubleQuote(line):
re_rtc_import = re.compile( re_rtc_import = re.compile(r'(\s*)#import\s+"(\S+/|)(\w+\+|)RTC(\w+)\.h"(.*)',
r'(\s*)#import\s+"(\S+/|)(\w+\+|)RTC(\w+)\.h"(.*)', re.DOTALL) re.DOTALL)
match = re_rtc_import.match(line) match = re_rtc_import.match(line)
if not match: if not match:
return line return line
@ -23,10 +24,10 @@ def replace_double_quote(line):
match.group(4), match.group(5)) match.group(4), match.group(5))
def process(input_file, output_file): def Process(input_file, output_file):
with open(input_file, 'rb') as fb, open(output_file, 'wb') as fw: with open(input_file, 'rb') as fb, open(output_file, 'wb') as fw:
for line in fb.read().decode('UTF-8').splitlines(): for line in fb.read().decode('UTF-8').splitlines():
fw.write(replace_double_quote(line).encode('UTF-8')) fw.write(_ReplaceDoubleQuote(line).encode('UTF-8'))
fw.write(b"\n") fw.write(b"\n")
@ -35,12 +36,10 @@ def main():
description= description=
"Copy headers of framework and replace double-quoted includes to" + "Copy headers of framework and replace double-quoted includes to" +
" angle-bracketed respectively.") " angle-bracketed respectively.")
parser.add_argument('--input', parser.add_argument('--input', help='Input header files to copy.', type=str)
help='Input header files to copy.',
type=str)
parser.add_argument('--output', help='Output file.', type=str) parser.add_argument('--output', help='Output file.', type=str)
parsed_args = parser.parse_args() parsed_args = parser.parse_args()
return process(parsed_args.input, parsed_args.output) return Process(parsed_args.input, parsed_args.output)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -8,26 +9,25 @@
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
import unittest import unittest
from copy_framework_header import replace_double_quote from copy_framework_header import _ReplaceDoubleQuote
class TestCopyFramework(unittest.TestCase): class TestCopyFramework(unittest.TestCase):
def testReplaceDoubleQuote(self): def testReplaceDoubleQuote(self):
self.assertEqual(replace_double_quote("""#import "RTCMacros.h\""""), self.assertEqual(_ReplaceDoubleQuote("""#import "RTCMacros.h\""""),
"""#import <WebRTC/RTCMacros.h>""") """#import <WebRTC/RTCMacros.h>""")
self.assertEqual(replace_double_quote("""#import "RTCMacros.h\"\n"""), self.assertEqual(_ReplaceDoubleQuote("""#import "RTCMacros.h\"\n"""),
"""#import <WebRTC/RTCMacros.h>\n""") """#import <WebRTC/RTCMacros.h>\n""")
self.assertEqual( self.assertEqual(
replace_double_quote("""#import "UIDevice+RTCDevice.h\"\n"""), _ReplaceDoubleQuote("""#import "UIDevice+RTCDevice.h\"\n"""),
"""#import <WebRTC/UIDevice+RTCDevice.h>\n""") """#import <WebRTC/UIDevice+RTCDevice.h>\n""")
self.assertEqual( self.assertEqual(
replace_double_quote("#import \"components/video_codec/" + _ReplaceDoubleQuote("#import \"components/video_codec/" +
"RTCVideoDecoderFactoryH264.h\"\n"), "RTCVideoDecoderFactoryH264.h\"\n"),
"""#import <WebRTC/RTCVideoDecoderFactoryH264.h>\n""") """#import <WebRTC/RTCVideoDecoderFactoryH264.h>\n""")
self.assertEqual( self.assertEqual(
replace_double_quote( _ReplaceDoubleQuote(
"""@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *\n""" """@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *\n"""),
),
"""@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *\n""") """@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *\n""")

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -8,7 +9,6 @@
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
"""Script to automatically roll dependencies in the WebRTC DEPS file.""" """Script to automatically roll dependencies in the WebRTC DEPS file."""
from __future__ import absolute_import
import argparse import argparse
import base64 import base64
@ -18,10 +18,7 @@ import os
import re import re
import subprocess import subprocess
import sys import sys
import urllib
import six.moves.urllib.request
import six.moves.urllib.error
import six.moves.urllib.parse
def FindSrcDirPath(): def FindSrcDirPath():
@ -153,7 +150,7 @@ def _RunCommand(command,
logging.debug('CMD: %s CWD: %s', ' '.join(command), working_dir) logging.debug('CMD: %s CWD: %s', ' '.join(command), working_dir)
env = os.environ.copy() env = os.environ.copy()
if extra_env: if extra_env:
assert all(isinstance(value, str) for value in extra_env.values()) assert all(isinstance(value, str) for value in list(extra_env.values()))
logging.debug('extra env: %s', extra_env) logging.debug('extra env: %s', extra_env)
env.update(extra_env) env.update(extra_env)
p = subprocess.Popen(command, p = subprocess.Popen(command,
@ -169,8 +166,7 @@ def _RunCommand(command,
if not ignore_exit_code and p.returncode != 0: if not ignore_exit_code and p.returncode != 0:
logging.error('Command failed: %s\n' logging.error('Command failed: %s\n'
'stdout:\n%s\n' 'stdout:\n%s\n'
'stderr:\n%s\n', ' '.join(command), std_output, 'stderr:\n%s\n', ' '.join(command), std_output, err_output)
err_output)
sys.exit(p.returncode) sys.exit(p.returncode)
return std_output, err_output return std_output, err_output
@ -229,7 +225,7 @@ def ReadUrlContent(url):
Returns: Returns:
A list of lines. A list of lines.
""" """
conn = six.moves.urllib.request.urlopen(url) conn = urllib.request.urlopen(url)
try: try:
return conn.readlines() return conn.readlines()
except IOError as e: except IOError as e:
@ -253,13 +249,12 @@ def GetMatchingDepsEntries(depsentry_dict, dir_path):
A list of DepsEntry objects. A list of DepsEntry objects.
""" """
result = [] result = []
for path, depsentry in depsentry_dict.items(): for path, depsentry in list(depsentry_dict.items()):
if path == dir_path: if path == dir_path:
result.append(depsentry) result.append(depsentry)
else: else:
parts = path.split('/') parts = path.split('/')
if all(part == parts[i] if all(part == parts[i] for i, part in enumerate(dir_path.split('/'))):
for i, part in enumerate(dir_path.split('/'))):
result.append(depsentry) result.append(depsentry)
return result return result
@ -269,7 +264,7 @@ def BuildDepsentryDict(deps_dict):
result = {} result = {}
def AddDepsEntries(deps_subdict): def AddDepsEntries(deps_subdict):
for path, dep in deps_subdict.items(): for path, dep in list(deps_subdict.items()):
if path in result: if path in result:
continue continue
if not isinstance(dep, dict): if not isinstance(dep, dict):
@ -305,8 +300,8 @@ def _FindChangedCipdPackages(path, old_pkgs, new_pkgs):
for new_pkg in new_pkgs: for new_pkg in new_pkgs:
old_version = old_pkg['version'] old_version = old_pkg['version']
new_version = new_pkg['version'] new_version = new_pkg['version']
if (old_pkg['package'] == new_pkg['package'] and if (old_pkg['package'] == new_pkg['package']
old_version != new_version): and old_version != new_version):
logging.debug('Roll dependency %s to %s', path, new_version) logging.debug('Roll dependency %s to %s', path, new_version)
yield ChangedCipdPackage(path, old_pkg['package'], old_version, yield ChangedCipdPackage(path, old_pkg['package'], old_version,
new_version) new_version)
@ -379,9 +374,8 @@ def FindRemovedDeps(webrtc_deps, new_cr_deps):
A list of paths of unexpected disappearing dependencies. A list of paths of unexpected disappearing dependencies.
""" """
all_removed_deps = _FindNewDeps(new_cr_deps, webrtc_deps) all_removed_deps = _FindNewDeps(new_cr_deps, webrtc_deps)
generated_android_deps = sorted([ generated_android_deps = sorted(
path for path in all_removed_deps if path.startswith(ANDROID_DEPS_PATH) [path for path in all_removed_deps if path.startswith(ANDROID_DEPS_PATH)])
])
# Webrtc-only dependencies are handled in CalculateChangedDeps. # Webrtc-only dependencies are handled in CalculateChangedDeps.
other_deps = sorted([ other_deps = sorted([
path for path in all_removed_deps path for path in all_removed_deps
@ -408,7 +402,7 @@ def CalculateChangedDeps(webrtc_deps, new_cr_deps):
result = [] result = []
webrtc_entries = BuildDepsentryDict(webrtc_deps) webrtc_entries = BuildDepsentryDict(webrtc_deps)
new_cr_entries = BuildDepsentryDict(new_cr_deps) new_cr_entries = BuildDepsentryDict(new_cr_deps)
for path, webrtc_deps_entry in webrtc_entries.items(): for path, webrtc_deps_entry in list(webrtc_entries.items()):
if path in DONT_AUTOROLL_THESE: if path in DONT_AUTOROLL_THESE:
continue continue
cr_deps_entry = new_cr_entries.get(path) cr_deps_entry = new_cr_entries.get(path)
@ -424,8 +418,8 @@ def CalculateChangedDeps(webrtc_deps, new_cr_deps):
# Use the revision from Chromium's DEPS file. # Use the revision from Chromium's DEPS file.
new_rev = cr_deps_entry.revision new_rev = cr_deps_entry.revision
assert webrtc_deps_entry.url == cr_deps_entry.url, ( assert webrtc_deps_entry.url == cr_deps_entry.url, (
'WebRTC DEPS entry %s has a different URL %s than Chromium %s.' 'WebRTC DEPS entry %s has a different URL %s than Chromium %s.' %
% (path, webrtc_deps_entry.url, cr_deps_entry.url)) (path, webrtc_deps_entry.url, cr_deps_entry.url))
else: else:
if isinstance(webrtc_deps_entry, DepsEntry): if isinstance(webrtc_deps_entry, DepsEntry):
# Use the HEAD of the deps repo. # Use the HEAD of the deps repo.
@ -441,8 +435,8 @@ def CalculateChangedDeps(webrtc_deps, new_cr_deps):
if webrtc_deps_entry.revision != new_rev: if webrtc_deps_entry.revision != new_rev:
logging.debug('Roll dependency %s to %s', path, new_rev) logging.debug('Roll dependency %s to %s', path, new_rev)
result.append( result.append(
ChangedDep(path, webrtc_deps_entry.url, ChangedDep(path, webrtc_deps_entry.url, webrtc_deps_entry.revision,
webrtc_deps_entry.revision, new_rev)) new_rev))
return sorted(result) return sorted(result)
@ -462,8 +456,7 @@ def CalculateChangedClang(new_cr_rev):
new_clang_update_py = ReadRemoteCrFile(CLANG_UPDATE_SCRIPT_URL_PATH, new_clang_update_py = ReadRemoteCrFile(CLANG_UPDATE_SCRIPT_URL_PATH,
new_cr_rev).splitlines() new_cr_rev).splitlines()
new_rev = GetClangRev(new_clang_update_py) new_rev = GetClangRev(new_clang_update_py)
return ChangedDep(CLANG_UPDATE_SCRIPT_LOCAL_PATH, None, current_rev, return ChangedDep(CLANG_UPDATE_SCRIPT_LOCAL_PATH, None, current_rev, new_rev)
new_rev)
def GenerateCommitMessage( def GenerateCommitMessage(
@ -481,8 +474,7 @@ def GenerateCommitMessage(
git_number_interval = '%s:%s' % (current_commit_pos, new_commit_pos) git_number_interval = '%s:%s' % (current_commit_pos, new_commit_pos)
commit_msg = [ commit_msg = [
'Roll chromium_revision %s (%s)\n' % 'Roll chromium_revision %s (%s)\n' % (rev_interval, git_number_interval),
(rev_interval, git_number_interval),
'Change log: %s' % (CHROMIUM_LOG_TEMPLATE % rev_interval), 'Change log: %s' % (CHROMIUM_LOG_TEMPLATE % rev_interval),
'Full diff: %s\n' % (CHROMIUM_COMMIT_TEMPLATE % rev_interval) 'Full diff: %s\n' % (CHROMIUM_COMMIT_TEMPLATE % rev_interval)
] ]
@ -499,8 +491,7 @@ def GenerateCommitMessage(
commit_msg.append('* %s: %s..%s' % commit_msg.append('* %s: %s..%s' %
(c.path, c.current_version, c.new_version)) (c.path, c.current_version, c.new_version))
else: else:
commit_msg.append( commit_msg.append('* %s: %s/+log/%s..%s' %
'* %s: %s/+log/%s..%s' %
(c.path, c.url, c.current_rev[0:10], c.new_rev[0:10])) (c.path, c.url, c.current_rev[0:10], c.new_rev[0:10]))
if added_deps_paths: if added_deps_paths:
@ -543,8 +534,7 @@ def UpdateDepsFile(deps_filename, rev_update, changed_deps, new_cr_content):
# Add and remove dependencies. For now: only generated android deps. # Add and remove dependencies. For now: only generated android deps.
# Since gclient cannot add or remove deps, we on the fact that # Since gclient cannot add or remove deps, we on the fact that
# these android deps are located in one place we can copy/paste. # these android deps are located in one place we can copy/paste.
deps_re = re.compile(ANDROID_DEPS_START + '.*' + ANDROID_DEPS_END, deps_re = re.compile(ANDROID_DEPS_START + '.*' + ANDROID_DEPS_END, re.DOTALL)
re.DOTALL)
new_deps = deps_re.search(new_cr_content) new_deps = deps_re.search(new_cr_content)
old_deps = deps_re.search(deps_content) old_deps = deps_re.search(deps_content)
if not new_deps or not old_deps: if not new_deps or not old_deps:
@ -707,8 +697,7 @@ def main():
help=('Ignore if the current branch is not main or if there ' help=('Ignore if the current branch is not main or if there '
'are uncommitted changes (default: %(default)s).')) 'are uncommitted changes (default: %(default)s).'))
grp = p.add_mutually_exclusive_group() grp = p.add_mutually_exclusive_group()
grp.add_argument( grp.add_argument('--skip-cq',
'--skip-cq',
action='store_true', action='store_true',
default=False, default=False,
help='Skip sending the CL to the CQ (default: %(default)s)') help='Skip sending the CL to the CQ (default: %(default)s)')

View File

@ -1,4 +1,5 @@
#!/usr/bin/env vpython #!/usr/bin/env vpython3
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -7,7 +8,6 @@
# in the file PATENTS. All contributing project authors may # in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
from __future__ import absolute_import
import glob import glob
import os import os
@ -15,16 +15,11 @@ import shutil
import sys import sys
import tempfile import tempfile
import unittest import unittest
import mock
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PARENT_DIR = os.path.join(SCRIPT_DIR, os.pardir) PARENT_DIR = os.path.join(SCRIPT_DIR, os.pardir)
sys.path.append(PARENT_DIR) sys.path.append(PARENT_DIR)
# Workaround for the presubmit, plan only to run in py3 now.
# TODO(webrtc:13418) Remove when py2 presubmit is gone.
if sys.version_info >= (3, 3):
from unittest import mock
else:
import mock
import roll_deps import roll_deps
from roll_deps import CalculateChangedDeps, FindAddedDeps, \ from roll_deps import CalculateChangedDeps, FindAddedDeps, \
@ -57,7 +52,7 @@ class TestError(Exception):
pass pass
class FakeCmd(object): class FakeCmd:
def __init__(self): def __init__(self):
self.expectations = [] self.expectations = []
@ -73,14 +68,13 @@ class FakeCmd(object):
for item in ignores: for item in ignores:
kwargs.pop(item, None) kwargs.pop(item, None)
if args != exp_args or kwargs != exp_kwargs: if args != exp_args or kwargs != exp_kwargs:
message = 'Expected:\n args: %s\n kwargs: %s\n' % (exp_args, message = 'Expected:\n args: %s\n kwargs: %s\n' % (exp_args, exp_kwargs)
exp_kwargs)
message += 'Got:\n args: %s\n kwargs: %s\n' % (args, kwargs) message += 'Got:\n args: %s\n kwargs: %s\n' % (args, kwargs)
raise TestError(message) raise TestError(message)
return exp_returns return exp_returns
class NullCmd(object): class NullCmd:
"""No-op mock when calls mustn't be checked. """ """No-op mock when calls mustn't be checked. """
def __call__(self, *args, **kwargs): def __call__(self, *args, **kwargs):
@ -122,8 +116,7 @@ class TestRollChromiumRevision(unittest.TestCase):
new_cr_contents) new_cr_contents)
with open(self._webrtc_depsfile) as deps_file: with open(self._webrtc_depsfile) as deps_file:
deps_contents = deps_file.read() deps_contents = deps_file.read()
self.assertTrue( self.assertTrue(new_rev in deps_contents,
new_rev in deps_contents,
'Failed to find %s in\n%s' % (new_rev, deps_contents)) 'Failed to find %s in\n%s' % (new_rev, deps_contents))
def _UpdateDepsSetup(self): def _UpdateDepsSetup(self):
@ -136,9 +129,8 @@ class TestRollChromiumRevision(unittest.TestCase):
changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
with mock.patch('roll_deps._RunCommand', NullCmd()): with mock.patch('roll_deps._RunCommand', NullCmd()):
UpdateDepsFile(self._webrtc_depsfile_android, UpdateDepsFile(self._webrtc_depsfile_android, NO_CHROMIUM_REVISION_UPDATE,
NO_CHROMIUM_REVISION_UPDATE, changed_deps, changed_deps, new_cr_contents)
new_cr_contents)
with open(self._webrtc_depsfile_android) as deps_file: with open(self._webrtc_depsfile_android) as deps_file:
updated_contents = deps_file.read() updated_contents = deps_file.read()
@ -174,8 +166,7 @@ class TestRollChromiumRevision(unittest.TestCase):
vars_dict = local_scope['vars'] vars_dict = local_scope['vars']
def AssertVar(variable_name): def AssertVar(variable_name):
self.assertEqual(vars_dict[variable_name], self.assertEqual(vars_dict[variable_name], TEST_DATA_VARS[variable_name])
TEST_DATA_VARS[variable_name])
AssertVar('chromium_git') AssertVar('chromium_git')
AssertVar('chromium_revision') AssertVar('chromium_revision')
@ -200,8 +191,7 @@ class TestRollChromiumRevision(unittest.TestCase):
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile) new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile)
with mock.patch('roll_deps._RunCommand', self.fake): with mock.patch('roll_deps._RunCommand', self.fake):
_SetupGitLsRemoteCall( _SetupGitLsRemoteCall(
self.fake, self.fake, 'https://chromium.googlesource.com/chromium/src/build',
'https://chromium.googlesource.com/chromium/src/build',
BUILD_NEW_REV) BUILD_NEW_REV)
changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
@ -214,8 +204,7 @@ class TestRollChromiumRevision(unittest.TestCase):
self.assertEqual(changed_deps[1].package, 'gn/gn/linux-amd64') self.assertEqual(changed_deps[1].package, 'gn/gn/linux-amd64')
self.assertEqual(changed_deps[1].current_version, self.assertEqual(changed_deps[1].current_version,
'git_revision:69ec4fca1fa69ddadae13f9e6b7507efa0675263') 'git_revision:69ec4fca1fa69ddadae13f9e6b7507efa0675263')
self.assertEqual(changed_deps[1].new_version, self.assertEqual(changed_deps[1].new_version, 'git_revision:new-revision')
'git_revision:new-revision')
self.assertEqual(changed_deps[2].path, 'src/third_party/depot_tools') self.assertEqual(changed_deps[2].path, 'src/third_party/depot_tools')
self.assertEqual(changed_deps[2].current_rev, DEPOTTOOLS_OLD_REV) self.assertEqual(changed_deps[2].current_rev, DEPOTTOOLS_OLD_REV)
@ -239,11 +228,10 @@ class TestRollChromiumRevision(unittest.TestCase):
def testFindAddedDeps(self): def testFindAddedDeps(self):
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android)
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
added_android_paths, other_paths = FindAddedDeps( added_android_paths, other_paths = FindAddedDeps(webrtc_deps, new_cr_deps)
webrtc_deps, new_cr_deps) self.assertEqual(
self.assertEqual(added_android_paths, [ added_android_paths,
'src/third_party/android_deps/libs/android_arch_lifecycle_common' ['src/third_party/android_deps/libs/android_arch_lifecycle_common'])
])
self.assertEqual(other_paths, []) self.assertEqual(other_paths, [])
def testFindRemovedDeps(self): def testFindRemovedDeps(self):
@ -251,9 +239,9 @@ class TestRollChromiumRevision(unittest.TestCase):
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
removed_android_paths, other_paths = FindRemovedDeps( removed_android_paths, other_paths = FindRemovedDeps(
webrtc_deps, new_cr_deps) webrtc_deps, new_cr_deps)
self.assertEqual(removed_android_paths, [ self.assertEqual(
'src/third_party/android_deps/libs/android_arch_lifecycle_runtime' removed_android_paths,
]) ['src/third_party/android_deps/libs/android_arch_lifecycle_runtime'])
self.assertEqual(other_paths, []) self.assertEqual(other_paths, [])
def testMissingDepsIsDetected(self): def testMissingDepsIsDetected(self):
@ -264,8 +252,7 @@ class TestRollChromiumRevision(unittest.TestCase):
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile) webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile)
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
_, other_paths = FindRemovedDeps(webrtc_deps, new_cr_deps) _, other_paths = FindRemovedDeps(webrtc_deps, new_cr_deps)
self.assertEqual( self.assertEqual(other_paths,
other_paths,
['src/buildtools/linux64', 'src/third_party/depot_tools']) ['src/buildtools/linux64', 'src/third_party/depot_tools'])
def testExpectedDepsIsNotReportedMissing(self): def testExpectedDepsIsNotReportedMissing(self):
@ -289,9 +276,8 @@ class TestRollChromiumRevision(unittest.TestCase):
new_commit_pos = 'f00d' new_commit_pos = 'f00d'
commit_msg = GenerateCommitMessage(NO_CHROMIUM_REVISION_UPDATE, commit_msg = GenerateCommitMessage(NO_CHROMIUM_REVISION_UPDATE,
current_commit_pos, current_commit_pos, new_commit_pos,
new_commit_pos, changed_deps, changed_deps, added_paths, removed_paths)
added_paths, removed_paths)
return [l.strip() for l in commit_msg.split('\n')] return [l.strip() for l in commit_msg.split('\n')]

View File

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -19,7 +21,7 @@ if __name__ == '__main__':
args = sys.argv args = sys.argv
if len(args) != 2: if len(args) != 2:
print('Usage: binary_version_test.py <FILE_NAME>') print('Usage: binary_version_test.py <FILE_NAME>')
exit(1) sys.exit(1)
filename = sys.argv[1] filename = sys.argv[1]
output = subprocess.check_output(['strings', filename]) output = subprocess.check_output(['strings', filename])
strings_in_binary = output.decode('utf-8').splitlines() strings_in_binary = output.decode('utf-8').splitlines()
@ -27,8 +29,8 @@ if __name__ == '__main__':
if WEBRTC_VERSION_RE.match(symbol): if WEBRTC_VERSION_RE.match(symbol):
with open('webrtc_binary_version_check', 'w') as f: with open('webrtc_binary_version_check', 'w') as f:
f.write(symbol) f.write(symbol)
exit(0) sys.exit(0)
print('WebRTC source timestamp not found in "%s"' % filename) print('WebRTC source timestamp not found in "%s"' % filename)
print('Check why "kSourceTimestamp" from call/version.cc is not linked ' print('Check why "kSourceTimestamp" from call/version.cc is not linked '
'(or why it has been optimized away by the compiler/linker)') '(or why it has been optimized away by the compiler/linker)')
exit(1) sys.exit(1)

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -20,9 +21,8 @@ import shutil
import subprocess import subprocess
import sys import sys
import tempfile import tempfile
#pylint: disable=relative-import from presubmit_checks_lib.build_helpers import (GetClangTidyPath,
from presubmit_checks_lib.build_helpers import GetClangTidyPath, \ GetCompilationCommand)
GetCompilationCommand
# We enable all checkers by default for investigation purpose. # We enable all checkers by default for investigation purpose.
# This includes clang-analyzer-* checks. # This includes clang-analyzer-* checks.
@ -55,7 +55,7 @@ def Process(filepath, args):
# Replace clang++ by clang-tidy # Replace clang++ by clang-tidy
command[0:1] = [GetClangTidyPath(), CHECKER_OPTION, rel_path command[0:1] = [GetClangTidyPath(), CHECKER_OPTION, rel_path
] + args + ['--'] # Separator for clang flags. ] + args + ['--'] # Separator for clang flags.
print "Running: %s" % ' '.join(command) print("Running: %s" % ' '.join(command))
# Run from build dir so that relative paths are correct. # Run from build dir so that relative paths are correct.
p = subprocess.Popen(command, p = subprocess.Popen(command,
cwd=out_dir, cwd=out_dir,

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -36,10 +37,9 @@ def main():
return '-c \'%s testing/xvfb.py %s\'' % (sys.executable, binary) return '-c \'%s testing/xvfb.py %s\'' % (sys.executable, binary)
modules_unittests = 'out/coverage/modules_unittests' modules_unittests = 'out/coverage/modules_unittests'
cmd[cmd.index('-c \'%s\'' % cmd[cmd.index('-c \'%s\'' % modules_unittests)] = WithXvfb(modules_unittests)
modules_unittests)] = WithXvfb(modules_unittests)
print ' '.join(cmd) print(' '.join(cmd))
return 0 return 0

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -46,7 +47,6 @@ if os.path.exists(binary_path):
========== ENDING OF PATCH ========== ========== ENDING OF PATCH ==========
""" """
import sys import sys
DIRECTORY = 'out/coverage' DIRECTORY = 'out/coverage'
@ -102,64 +102,64 @@ def GenerateIOSSimulatorCommand():
[FormatIossimTest(t, is_xctest=True) for t in XC_TESTS] + [FormatIossimTest(t, is_xctest=True) for t in XC_TESTS] +
[FormatIossimTest(t, is_xctest=False) for t in TESTS]) [FormatIossimTest(t, is_xctest=False) for t in TESTS])
print 'To get code coverage using iOS sim just run following commands:' print('To get code coverage using iOS sim just run following commands:')
print '' print('')
print ' '.join(gn_cmd) print(' '.join(gn_cmd))
print '' print('')
print ' '.join(coverage_cmd) print(' '.join(coverage_cmd))
return 0 return 0
def GenerateIOSDeviceCommand(): def GenerateIOSDeviceCommand():
gn_args_string = ' '.join(GetGNArgs(is_simulator=False)) gn_args_string = ' '.join(GetGNArgs(is_simulator=False))
coverage_report_cmd = ( coverage_report_cmd = ([sys.executable, 'tools/code_coverage/coverage.py'] +
[sys.executable, 'tools/code_coverage/coverage.py'] +
['%s.app' % t for t in TESTS] + ['-b %s' % DIRECTORY] + ['%s.app' % t for t in TESTS] + ['-b %s' % DIRECTORY] +
['-o out/report'] + ['-p %s/merged.profdata' % DIRECTORY] + ['-o out/report'] +
['-p %s/merged.profdata' % DIRECTORY] +
['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\'']) ['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''])
print 'Computing code coverage for real iOS device is a little bit tedious.' print('Computing code coverage for real iOS device is a little bit tedious.')
print '' print('')
print 'You will need:' print('You will need:')
print '' print('')
print '1. Generate xcode project and open it with Xcode 10+:' print('1. Generate xcode project and open it with Xcode 10+:')
print ' gn gen %s --ide=xcode --args=\'%s\'' % (DIRECTORY, gn_args_string) print(' gn gen %s --ide=xcode --args=\'%s\'' % (DIRECTORY, gn_args_string))
print ' open %s/all.xcworkspace' % DIRECTORY print(' open %s/all.xcworkspace' % DIRECTORY)
print '' print('')
print '2. Execute these Run targets manually with Xcode Run button and ' print('2. Execute these Run targets manually with Xcode Run button and ')
print 'manually save generated coverage.profraw file to %s:' % DIRECTORY print('manually save generated coverage.profraw file to %s:' % DIRECTORY)
print '\n'.join('- %s' % t for t in TESTS) print('\n'.join('- %s' % t for t in TESTS))
print '' print('')
print '3. Execute these Test targets manually with Xcode Test button and ' print('3. Execute these Test targets manually with Xcode Test button and ')
print 'manually save generated coverage.profraw file to %s:' % DIRECTORY print('manually save generated coverage.profraw file to %s:' % DIRECTORY)
print '\n'.join('- %s' % t for t in XC_TESTS) print('\n'.join('- %s' % t for t in XC_TESTS))
print '' print('')
print '4. Merge *.profraw files to *.profdata using llvm-profdata tool:' print('4. Merge *.profraw files to *.profdata using llvm-profdata tool:')
print(' build/mac_files/Xcode.app/Contents/Developer/Toolchains/' + print((' build/mac_files/Xcode.app/Contents/Developer/Toolchains/' +
'XcodeDefault.xctoolchain/usr/bin/llvm-profdata merge ' + 'XcodeDefault.xctoolchain/usr/bin/llvm-profdata merge ' +
'-o %s/merged.profdata ' % DIRECTORY + '-o %s/merged.profdata ' % DIRECTORY +
'-sparse=true %s/*.profraw' % DIRECTORY) '-sparse=true %s/*.profraw' % DIRECTORY))
print '' print('')
print '5. Generate coverage report:' print('5. Generate coverage report:')
print ' ' + ' '.join(coverage_report_cmd) print(' ' + ' '.join(coverage_report_cmd))
return 0 return 0
def Main(): def main():
if len(sys.argv) < 2: if len(sys.argv) < 2:
print 'Please specify type of coverage:' print('Please specify type of coverage:')
print ' %s simulator' % sys.argv[0] print(' %s simulator' % sys.argv[0])
print ' %s device' % sys.argv[0] print(' %s device' % sys.argv[0])
elif sys.argv[1] == 'simulator': elif sys.argv[1] == 'simulator':
GenerateIOSSimulatorCommand() GenerateIOSSimulatorCommand()
elif sys.argv[1] == 'device': elif sys.argv[1] == 'device':
GenerateIOSDeviceCommand() GenerateIOSDeviceCommand()
else: else:
print 'Unsupported type of coverage' print('Unsupported type of coverage')
return 0 return 0
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(Main()) sys.exit(main())

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# #
# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
# #
@ -8,37 +8,37 @@
# in the file PATENTS. All contributing project authors may # in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
import psutil
import sys import sys
import psutil
import numpy import numpy
from matplotlib import pyplot from matplotlib import pyplot
class CpuSnapshot(object): class CpuSnapshot:
def __init__(self, label): def __init__(self, label):
self.label = label self.label = label
self.samples = [] self.samples = []
def Capture(self, sample_count): def Capture(self, sample_count):
print('Capturing %d CPU samples for %s...' % print(('Capturing %d CPU samples for %s...' %
((sample_count - len(self.samples)), self.label)) ((sample_count - len(self.samples)), self.label)))
while len(self.samples) < sample_count: while len(self.samples) < sample_count:
self.samples.append(psutil.cpu_percent(1.0, False)) self.samples.append(psutil.cpu_percent(1.0, False))
def Text(self): def Text(self):
return ('%s: avg=%s, median=%s, min=%s, max=%s' % return (
(self.label, numpy.average(self.samples), '%s: avg=%s, median=%s, min=%s, max=%s' %
numpy.median(self.samples), numpy.min( (self.label, numpy.average(self.samples), numpy.median(
self.samples), numpy.max(self.samples))) self.samples), numpy.min(self.samples), numpy.max(self.samples)))
def Max(self): def Max(self):
return numpy.max(self.samples) return numpy.max(self.samples)
def GrabCpuSamples(sample_count): def GrabCpuSamples(sample_count):
print 'Label for snapshot (enter to quit): ' print('Label for snapshot (enter to quit): ')
label = raw_input().strip() label = eval(input().strip())
if len(label) == 0: if len(label) == 0:
return None return None
@ -49,12 +49,12 @@ def GrabCpuSamples(sample_count):
def main(): def main():
print 'How many seconds to capture per snapshot (enter for 60)?' print('How many seconds to capture per snapshot (enter for 60)?')
sample_count = raw_input().strip() sample_count = eval(input().strip())
if len(sample_count) > 0 and int(sample_count) > 0: if len(sample_count) > 0 and int(sample_count) > 0:
sample_count = int(sample_count) sample_count = int(sample_count)
else: else:
print 'Defaulting to 60 samples.' print('Defaulting to 60 samples.')
sample_count = 60 sample_count = 60
snapshots = [] snapshots = []
@ -65,7 +65,7 @@ def main():
snapshots.append(snapshot) snapshots.append(snapshot)
if len(snapshots) == 0: if len(snapshots) == 0:
print 'no samples captured' print('no samples captured')
return -1 return -1
pyplot.title('CPU usage') pyplot.title('CPU usage')

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -43,7 +44,7 @@ def main(directories):
'--recursive', '--recursive',
path, path,
] ]
print 'Downloading precompiled tools...' print('Downloading precompiled tools...')
# Perform download similar to how gclient hooks execute. # Perform download similar to how gclient hooks execute.
try: try:
@ -51,7 +52,7 @@ def main(directories):
cwd=SRC_DIR, cwd=SRC_DIR,
always_show_header=True) always_show_header=True)
except (gclient_utils.Error, subprocess2.CalledProcessError) as e: except (gclient_utils.Error, subprocess2.CalledProcessError) as e:
print 'Error: %s' % str(e) print('Error: %s' % str(e))
return 2 return 2
return 0 return 0

View File

@ -1,4 +1,5 @@
#!/usr/bin/env vpython3 #!/usr/bin/env vpython3
# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -26,10 +27,10 @@ If any command line arguments are passed to the script, it is executed as a
command in a subprocess. command in a subprocess.
""" """
# psutil is not installed on non-Linux machines by default.
import psutil # pylint: disable=F0401
import subprocess import subprocess
import sys import sys
# psutil is not installed on non-Linux machines by default.
import psutil # pylint: disable=F0401
WEBCAM_WIN = ('schtasks', '/run', '/tn', 'ManyCam') WEBCAM_WIN = ('schtasks', '/run', '/tn', 'ManyCam')
WEBCAM_MAC = ('open', '/Applications/ManyCam/ManyCam.app') WEBCAM_MAC = ('open', '/Applications/ManyCam/ManyCam.app')
@ -81,7 +82,7 @@ def StartWebCam():
def _ForcePythonInterpreter(cmd): def _ForcePythonInterpreter(cmd):
"""Returns the fixed command line to call the right python executable.""" """Returns the fixed command line to call the right python executable."""
out = cmd[:] out = cmd[:]
if out[0] == 'python': if out[0] == 'vpython3':
out[0] = sys.executable out[0] = sys.executable
elif out[0].endswith('.py'): elif out[0].endswith('.py'):
out.insert(0, sys.executable) out.insert(0, sys.executable)
@ -95,7 +96,6 @@ def Main(argv):
if argv: if argv:
return subprocess.call(_ForcePythonInterpreter(argv)) return subprocess.call(_ForcePythonInterpreter(argv))
else:
return 0 return 0

View File

@ -1,4 +1,4 @@
#!/usr/bin/env/python #!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
# #
@ -24,7 +24,7 @@ following executable in your out folder:
You will be able to compile the same executable targeting your host machine You will be able to compile the same executable targeting your host machine
by running: by running:
$ python tools_webrtc/executable_host_build.py --executable_name random_exec $ vpython3 tools_webrtc/executable_host_build.py --executable_name random_exec
The generated executable will have the same name as the input executable with The generated executable will have the same name as the input executable with
suffix '_host'. suffix '_host'.
@ -95,7 +95,6 @@ if __name__ == '__main__':
EXECUTABLE_FINAL_NAME = ARGS.executable_name + '_host' EXECUTABLE_FINAL_NAME = ARGS.executable_name + '_host'
with HostBuildDir() as build_dir: with HostBuildDir() as build_dir:
_RunCommand([sys.executable, DepotToolPath('gn.py'), 'gen', build_dir]) _RunCommand([sys.executable, DepotToolPath('gn.py'), 'gen', build_dir])
_RunCommand( _RunCommand([DepotToolPath('ninja'), '-C', build_dir, EXECUTABLE_TO_BUILD])
[DepotToolPath('ninja'), '-C', build_dir, EXECUTABLE_TO_BUILD])
shutil.copy(os.path.join(build_dir, EXECUTABLE_TO_BUILD), shutil.copy(os.path.join(build_dir, EXECUTABLE_TO_BUILD),
EXECUTABLE_FINAL_NAME) EXECUTABLE_FINAL_NAME)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
# #
@ -36,6 +36,7 @@ def main():
def _ForcePythonInterpreter(cmd): def _ForcePythonInterpreter(cmd):
"""Returns the fixed command line to call the right python executable.""" """Returns the fixed command line to call the right python executable."""
out = cmd[:] out = cmd[:]
if len(out) > 0:
if out[0] == 'python': if out[0] == 'python':
out[0] = sys.executable out[0] = sys.executable
elif out[0].endswith('.py'): elif out[0].endswith('.py'):

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python3 #!/usr/bin/env vpython3
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -11,9 +12,6 @@ This file emits the list of reasons why a particular build needs to be clobbered
(or a list of 'landmines'). (or a list of 'landmines').
""" """
from __future__ import absolute_import
from __future__ import print_function
import os import os
import sys import sys
@ -39,8 +37,7 @@ def print_landmines(): # pylint: disable=invalid-name
if host_os() == 'win': if host_os() == 'win':
print('Clobber to resolve some issues with corrupt .pdb files on bots.') print('Clobber to resolve some issues with corrupt .pdb files on bots.')
print('Clobber due to corrupt .pdb files (after #14623)') print('Clobber due to corrupt .pdb files (after #14623)')
print( print('Clobber due to Win 64-bit Debug linking error (crbug.com/668961)')
'Clobber due to Win 64-bit Debug linking error (crbug.com/668961)')
print('Clobber due to Win Clang Debug linking errors in ' print('Clobber due to Win Clang Debug linking errors in '
'https://codereview.webrtc.org/2786603002') 'https://codereview.webrtc.org/2786603002')
print('Clobber due to Win Debug linking errors in ' print('Clobber due to Win Debug linking errors in '

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
# #
@ -14,9 +14,9 @@ It will run `mb gen` in a temporary directory and it is really useful to
check for different configurations. check for different configurations.
Usage: Usage:
$ python tools_webrtc/gn_check_autofix.py -m some_mater -b some_bot $ vpython3 tools_webrtc/gn_check_autofix.py -m some_mater -b some_bot
or or
$ python tools_webrtc/gn_check_autofix.py -c some_mb_config $ vpython3 tools_webrtc/gn_check_autofix.py -c some_mb_config
""" """
import os import os
@ -38,7 +38,7 @@ TARGET_RE = re.compile(
r'(?P<indentation_level>\s*)\w*\("(?P<target_name>\w*)"\) {$') r'(?P<indentation_level>\s*)\w*\("(?P<target_name>\w*)"\) {$')
class TemporaryDirectory(object): class TemporaryDirectory:
def __init__(self): def __init__(self):
self._closed = False self._closed = False
self._name = None self._name = None
@ -54,7 +54,7 @@ class TemporaryDirectory(object):
def Run(cmd): def Run(cmd):
print 'Running:', ' '.join(cmd) print('Running:', ' '.join(cmd))
sub = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) sub = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return sub.communicate() return sub.communicate()
@ -75,14 +75,13 @@ def FixErrors(filename, missing_deps, deleted_sources):
match = re.match(indentation_level + '}$', line) match = re.match(indentation_level + '}$', line)
if match: if match:
line = ('deps = [\n' + ''.join(' "' + dep + '",\n' line = ('deps = [\n' + ''.join(' "' + dep + '",\n'
for dep in missing_deps[target]) for dep in missing_deps[target]) +
+ ']\n') + line ']\n') + line
indentation_level = None indentation_level = None
elif line.strip().startswith('deps'): elif line.strip().startswith('deps'):
is_empty_deps = line.strip() == 'deps = []' is_empty_deps = line.strip() == 'deps = []'
line = 'deps = [\n' if is_empty_deps else line line = 'deps = [\n' if is_empty_deps else line
line += ''.join(' "' + dep + '",\n' line += ''.join(' "' + dep + '",\n' for dep in missing_deps[target])
for dep in missing_deps[target])
line += ']\n' if is_empty_deps else '' line += ']\n' if is_empty_deps else ''
indentation_level = None indentation_level = None
@ -156,10 +155,10 @@ def main():
] + sys.argv[1:]) ] + sys.argv[1:])
mb_output = Run(mb_gen_command) mb_output = Run(mb_gen_command)
errors = mb_output[0].split('ERROR')[1:] errors = mb_output[0].decode('utf-8').split('ERROR')[1:]
if mb_output[1]: if mb_output[1]:
print mb_output[1] print(mb_output[1])
return 1 return 1
for error in errors: for error in errors:
@ -168,7 +167,7 @@ def main():
if target_msg not in error: if target_msg not in error:
target_msg = 'It is not in any dependency of' target_msg = 'It is not in any dependency of'
if target_msg not in error: if target_msg not in error:
print '\n'.join(error) print('\n'.join(error))
continue continue
index = error.index(target_msg) + 1 index = error.index(target_msg) + 1
path, target = error[index].strip().split(':') path, target = error[index].strip().split(':')
@ -182,14 +181,13 @@ def main():
path = os.path.join(path[2:], 'BUILD.gn') path = os.path.join(path[2:], 'BUILD.gn')
errors_by_file[path][target].add(dep) errors_by_file[path][target].add(dep)
elif error[index + 1] == 'has a source file:': elif error[index + 1] == 'has a source file:':
deleted_file = '"' + os.path.basename( deleted_file = '"' + os.path.basename(error[index + 2].strip()) + '",'
error[index + 2].strip()) + '",'
deleted_sources.add(deleted_file) deleted_sources.add(deleted_file)
else: else:
print '\n'.join(error) print('\n'.join(error))
continue continue
for path, missing_deps in errors_by_file.items(): for path, missing_deps in list(errors_by_file.items()):
FixErrors(path, missing_deps, deleted_sources) FixErrors(path, missing_deps, deleted_sources)
return 0 return 0

View File

@ -53,7 +53,7 @@ For example:
Will be converted into: Will be converted into:
python gtest-parallel \ vpython3 gtest-parallel \
--shard_index 0 \ --shard_index 0 \
--shard_count 1 \ --shard_count 1 \
--output_dir=SOME_OUTPUT_DIR \ --output_dir=SOME_OUTPUT_DIR \
@ -82,8 +82,8 @@ Args = collections.namedtuple(
['gtest_parallel_args', 'test_env', 'output_dir', 'test_artifacts_dir']) ['gtest_parallel_args', 'test_env', 'output_dir', 'test_artifacts_dir'])
def _CatFiles(file_list, output_file): def _CatFiles(file_list, output_file_destination):
with open(output_file, 'w') as output_file: with open(output_file_destination, 'w') as output_file:
for filename in file_list: for filename in file_list:
with open(filename) as input_file: with open(filename) as input_file:
output_file.write(input_file.read()) output_file.write(input_file.read())
@ -100,7 +100,7 @@ def _ParseWorkersOption(workers):
return max(result, 1) # Sanitize when using e.g. '0.5x'. return max(result, 1) # Sanitize when using e.g. '0.5x'.
class ReconstructibleArgumentGroup(object): class ReconstructibleArgumentGroup:
"""An argument group that can be converted back into a command line. """An argument group that can be converted back into a command line.
This acts like ArgumentParser.add_argument_group, but names of arguments added This acts like ArgumentParser.add_argument_group, but names of arguments added
@ -154,7 +154,7 @@ def ParseArgs(argv=None):
parser.add_argument('--store-test-artifacts', action='store_true') parser.add_argument('--store-test-artifacts', action='store_true')
# No-sandbox is a Chromium-specific flag, ignore it. # No-sandbox is a Chromium-specific flag, ignore it.
# TODO(oprypin): Remove (bugs.webrtc.org/8115) # TODO(bugs.webrtc.org/8115): Remove workaround when fixed.
parser.add_argument('--no-sandbox', parser.add_argument('--no-sandbox',
action='store_true', action='store_true',
help=argparse.SUPPRESS) help=argparse.SUPPRESS)
@ -171,7 +171,7 @@ def ParseArgs(argv=None):
} }
args_to_pass = [] args_to_pass = []
for arg in unrecognized_args: for arg in unrecognized_args:
if any(arg.startswith(k) for k in webrtc_flags_to_change.keys()): if any(arg.startswith(k) for k in list(webrtc_flags_to_change.keys())):
arg_split = arg.split('=') arg_split = arg.split('=')
args_to_pass.append(webrtc_flags_to_change[arg_split[0]] + '=' + args_to_pass.append(webrtc_flags_to_change[arg_split[0]] + '=' +
arg_split[1]) arg_split[1])

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
# #
@ -34,8 +34,7 @@ class GtestParallelWrapperHelpersTest(unittest.TestCase):
def testGetTwiceWorkers(self): def testGetTwiceWorkers(self):
expected = 2 * multiprocessing.cpu_count() expected = 2 * multiprocessing.cpu_count()
# pylint: disable=protected-access # pylint: disable=protected-access
self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('2x'), self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('2x'), expected)
expected)
def testGetHalfWorkers(self): def testGetHalfWorkers(self):
expected = max(multiprocessing.cpu_count() // 2, 1) expected = max(multiprocessing.cpu_count() // 2, 1)
@ -56,21 +55,19 @@ class GtestParallelWrapperTest(unittest.TestCase):
self.assertEqual(result.gtest_parallel_args, expected) self.assertEqual(result.gtest_parallel_args, expected)
def testMixing(self): def testMixing(self):
result = gtest_parallel_wrapper.ParseArgs([ result = gtest_parallel_wrapper.ParseArgs(
'--timeout=123', '--param1', 'exec', '--param2', '--timeout', '124' ['--timeout=123', '--param1', 'exec', '--param2', '--timeout', '124'])
])
expected = self._Expected( expected = self._Expected(
['--timeout=124', 'exec', '--', '--param1', '--param2']) ['--timeout=124', 'exec', '--', '--param1', '--param2'])
self.assertEqual(result.gtest_parallel_args, expected) self.assertEqual(result.gtest_parallel_args, expected)
def testMixingPositional(self): def testMixingPositional(self):
result = gtest_parallel_wrapper.ParseArgs([ result = gtest_parallel_wrapper.ParseArgs([
'--timeout=123', 'exec', '--foo1', 'bar1', '--timeout', '124', '--timeout=123', 'exec', '--foo1', 'bar1', '--timeout', '124', '--foo2',
'--foo2', 'bar2' 'bar2'
])
expected = self._Expected([
'--timeout=124', 'exec', '--', '--foo1', 'bar1', '--foo2', 'bar2'
]) ])
expected = self._Expected(
['--timeout=124', 'exec', '--', '--foo1', 'bar1', '--foo2', 'bar2'])
self.assertEqual(result.gtest_parallel_args, expected) self.assertEqual(result.gtest_parallel_args, expected)
def testDoubleDash1(self): def testDoubleDash1(self):
@ -83,8 +80,7 @@ class GtestParallelWrapperTest(unittest.TestCase):
def testDoubleDash2(self): def testDoubleDash2(self):
result = gtest_parallel_wrapper.ParseArgs( result = gtest_parallel_wrapper.ParseArgs(
['--timeout=123', '--', 'exec', '--timeout=124']) ['--timeout=123', '--', 'exec', '--timeout=124'])
expected = self._Expected( expected = self._Expected(['--timeout=123', 'exec', '--', '--timeout=124'])
['--timeout=123', 'exec', '--', '--timeout=124'])
self.assertEqual(result.gtest_parallel_args, expected) self.assertEqual(result.gtest_parallel_args, expected)
def testArtifacts(self): def testArtifacts(self):
@ -135,16 +131,16 @@ class GtestParallelWrapperTest(unittest.TestCase):
result = gtest_parallel_wrapper.ParseArgs([ result = gtest_parallel_wrapper.ParseArgs([
'some_test', '--some_flag=some_value', '--another_flag', 'some_test', '--some_flag=some_value', '--another_flag',
'--output_dir=' + output_dir, '--store-test-artifacts', '--output_dir=' + output_dir, '--store-test-artifacts',
'--isolated-script-test-perf-output=SOME_OTHER_DIR', '--isolated-script-test-perf-output=SOME_OTHER_DIR', '--foo=bar',
'--foo=bar', '--baz' '--baz'
]) ])
expected_artifacts_dir = os.path.join(output_dir, 'test_artifacts') expected_artifacts_dir = os.path.join(output_dir, 'test_artifacts')
expected = self._Expected([ expected = self._Expected([
'--output_dir=' + output_dir, 'some_test', '--', '--output_dir=' + output_dir, 'some_test', '--',
'--test_artifacts_dir=' + expected_artifacts_dir, '--test_artifacts_dir=' + expected_artifacts_dir,
'--some_flag=some_value', '--another_flag', '--some_flag=some_value', '--another_flag',
'--isolated_script_test_perf_output=SOME_OTHER_DIR', '--isolated_script_test_perf_output=SOME_OTHER_DIR', '--foo=bar',
'--foo=bar', '--baz' '--baz'
]) ])
self.assertEqual(result.gtest_parallel_args, expected) self.assertEqual(result.gtest_parallel_args, expected)
@ -161,8 +157,7 @@ class GtestParallelWrapperTest(unittest.TestCase):
self.assertEqual(result.gtest_parallel_args, expected) self.assertEqual(result.gtest_parallel_args, expected)
def testUseHalfTheCpuCores(self): def testUseHalfTheCpuCores(self):
result = gtest_parallel_wrapper.ParseArgs( result = gtest_parallel_wrapper.ParseArgs(['--workers', '0.5x', 'exec'])
['--workers', '0.5x', 'exec'])
workers = max(multiprocessing.cpu_count() // 2, 1) workers = max(multiprocessing.cpu_count() // 2, 1)
expected = self._Expected(['--workers=%s' % workers, 'exec']) expected = self._Expected(['--workers=%s' % workers, 'exec'])
self.assertEqual(result.gtest_parallel_args, expected) self.assertEqual(result.gtest_parallel_args, expected)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
@ -57,8 +57,7 @@ def _ParseArgs():
choices=['debug', 'release'], choices=['debug', 'release'],
help='The build config. Can be "debug" or "release". ' help='The build config. Can be "debug" or "release". '
'Defaults to "release".') 'Defaults to "release".')
parser.add_argument( parser.add_argument('--arch',
'--arch',
nargs='+', nargs='+',
default=DEFAULT_ARCHS, default=DEFAULT_ARCHS,
choices=ENABLED_ARCHS, choices=ENABLED_ARCHS,
@ -69,8 +68,7 @@ def _ParseArgs():
action='store_true', action='store_true',
default=False, default=False,
help='Removes the previously generated build output, if any.') help='Removes the previously generated build output, if any.')
parser.add_argument( parser.add_argument('-p',
'-p',
'--purify', '--purify',
action='store_true', action='store_true',
default=False, default=False,
@ -158,8 +156,10 @@ def BuildWebRTC(output_dir, target_environment, target_arch, flavor,
gn_target_name, ios_deployment_target, libvpx_build_vp9, gn_target_name, ios_deployment_target, libvpx_build_vp9,
use_bitcode, use_goma, extra_gn_args): use_bitcode, use_goma, extra_gn_args):
gn_args = [ gn_args = [
'target_os="ios"', 'ios_enable_code_signing=false', 'target_os="ios"',
'is_component_build=false', 'rtc_include_tests=false', 'ios_enable_code_signing=false',
'is_component_build=false',
'rtc_include_tests=false',
] ]
# Add flavor option. # Add flavor option.
@ -179,8 +179,7 @@ def BuildWebRTC(output_dir, target_environment, target_arch, flavor,
gn_args.append('rtc_libvpx_build_vp9=' + gn_args.append('rtc_libvpx_build_vp9=' +
('true' if libvpx_build_vp9 else 'false')) ('true' if libvpx_build_vp9 else 'false'))
gn_args.append('enable_ios_bitcode=' + gn_args.append('enable_ios_bitcode=' + ('true' if use_bitcode else 'false'))
('true' if use_bitcode else 'false'))
gn_args.append('use_goma=' + ('true' if use_goma else 'false')) gn_args.append('use_goma=' + ('true' if use_goma else 'false'))
gn_args.append('rtc_enable_objc_symbol_export=true') gn_args.append('rtc_enable_objc_symbol_export=true')
@ -224,7 +223,7 @@ def main():
gn_args = args.extra_gn_args gn_args = args.extra_gn_args
if args.purify: if args.purify:
_CleanTemporary(args.output_dir, architectures.keys()) _CleanTemporary(args.output_dir, list(architectures.keys()))
return 0 return 0
gn_target_name = 'framework_objc' gn_target_name = 'framework_objc'
@ -235,7 +234,7 @@ def main():
# Build all architectures. # Build all architectures.
framework_paths = [] framework_paths = []
all_lib_paths = [] all_lib_paths = []
for (environment, archs) in architectures.items(): for (environment, archs) in list(architectures.items()):
framework_path = os.path.join(args.output_dir, environment) framework_path = os.path.join(args.output_dir, environment)
framework_paths.append(framework_path) framework_paths.append(framework_path)
lib_paths = [] lib_paths = []
@ -250,11 +249,9 @@ def main():
# Combine the slices. # Combine the slices.
dylib_path = os.path.join(SDK_FRAMEWORK_NAME, 'WebRTC') dylib_path = os.path.join(SDK_FRAMEWORK_NAME, 'WebRTC')
# Dylibs will be combined, all other files are the same across archs. # Dylibs will be combined, all other files are the same across archs.
shutil.rmtree( shutil.rmtree(os.path.join(framework_path, SDK_FRAMEWORK_NAME),
os.path.join(framework_path, SDK_FRAMEWORK_NAME),
ignore_errors=True) ignore_errors=True)
shutil.copytree( shutil.copytree(os.path.join(lib_paths[0], SDK_FRAMEWORK_NAME),
os.path.join(lib_paths[0], SDK_FRAMEWORK_NAME),
os.path.join(framework_path, SDK_FRAMEWORK_NAME), os.path.join(framework_path, SDK_FRAMEWORK_NAME),
symlinks=True) symlinks=True)
logging.info('Merging framework slices for %s.', environment) logging.info('Merging framework slices for %s.', environment)
@ -273,24 +270,20 @@ def main():
# Merge the dSYM slices. # Merge the dSYM slices.
lib_dsym_dir_path = os.path.join(lib_paths[0], SDK_DSYM_NAME) lib_dsym_dir_path = os.path.join(lib_paths[0], SDK_DSYM_NAME)
if os.path.isdir(lib_dsym_dir_path): if os.path.isdir(lib_dsym_dir_path):
shutil.rmtree( shutil.rmtree(os.path.join(framework_path, SDK_DSYM_NAME),
os.path.join(framework_path, SDK_DSYM_NAME),
ignore_errors=True) ignore_errors=True)
shutil.copytree( shutil.copytree(lib_dsym_dir_path,
lib_dsym_dir_path, os.path.join(framework_path, SDK_DSYM_NAME)) os.path.join(framework_path, SDK_DSYM_NAME))
logging.info('Merging dSYM slices.') logging.info('Merging dSYM slices.')
dsym_path = os.path.join(SDK_DSYM_NAME, 'Contents', 'Resources', dsym_path = os.path.join(SDK_DSYM_NAME, 'Contents', 'Resources', 'DWARF',
'DWARF', 'WebRTC') 'WebRTC')
lib_dsym_paths = [ lib_dsym_paths = [os.path.join(path, dsym_path) for path in lib_paths]
os.path.join(path, dsym_path) for path in lib_paths
]
out_dsym_path = os.path.join(framework_path, dsym_path) out_dsym_path = os.path.join(framework_path, dsym_path)
try: try:
os.remove(out_dsym_path) os.remove(out_dsym_path)
except OSError: except OSError:
pass pass
cmd = ['lipo' cmd = ['lipo'] + lib_dsym_paths + ['-create', '-output', out_dsym_path]
] + lib_dsym_paths + ['-create', '-output', out_dsym_path]
_RunCommand(cmd) _RunCommand(cmd)
# Check for Mac-style WebRTC.framework/Resources/ (for Catalyst)... # Check for Mac-style WebRTC.framework/Resources/ (for Catalyst)...

View File

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license

View File

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license

View File

@ -1,4 +1,4 @@
#!/usr/bin/python #!/usr/bin/env vpython3
# Copyright 2016 The WebRTC project authors. All Rights Reserved. # Copyright 2016 The WebRTC project authors. All Rights Reserved.
# #
@ -10,11 +10,12 @@
"""Script for merging generated iOS libraries.""" """Script for merging generated iOS libraries."""
import sys import sys
import argparse import argparse
import os import os
import re import re
import subprocess import subprocess
from six.moves import range
# Valid arch subdir names. # Valid arch subdir names.
VALID_ARCHS = ['arm_libs', 'arm64_libs', 'ia32_libs', 'x64_libs'] VALID_ARCHS = ['arm_libs', 'arm64_libs', 'ia32_libs', 'x64_libs']
@ -49,12 +50,12 @@ def MergeLibs(lib_base_dir):
libs[filename] = entry libs[filename] = entry
orphaned_libs = {} orphaned_libs = {}
valid_libs = {} valid_libs = {}
for library, paths in libs.items(): for library, paths in list(libs.items()):
if len(paths) < len(archs): if len(paths) < len(archs):
orphaned_libs[library] = paths orphaned_libs[library] = paths
else: else:
valid_libs[library] = paths valid_libs[library] = paths
for library, paths in orphaned_libs.items(): for library, paths in list(orphaned_libs.items()):
components = library[:-2].split('_')[:-1] components = library[:-2].split('_')[:-1]
found = False found = False
# Find directly matching parent libs by stripping suffix. # Find directly matching parent libs by stripping suffix.
@ -68,7 +69,7 @@ def MergeLibs(lib_base_dir):
# Find next best match by finding parent libs with the same prefix. # Find next best match by finding parent libs with the same prefix.
if not found: if not found:
base_prefix = library[:-2].split('_')[0] base_prefix = library[:-2].split('_')[0]
for valid_lib, valid_paths in valid_libs.items(): for valid_lib, valid_paths in list(valid_libs.items()):
if valid_lib[:len(base_prefix)] == base_prefix: if valid_lib[:len(base_prefix)] == base_prefix:
valid_paths.extend(paths) valid_paths.extend(paths)
found = True found = True
@ -89,18 +90,16 @@ def MergeLibs(lib_base_dir):
# Merge libraries using libtool. # Merge libraries using libtool.
libtool_returncode = 0 libtool_returncode = 0
for library, paths in valid_libs.items(): for library, paths in list(valid_libs.items()):
cmd_list = [ cmd_list = [
'libtool', '-static', '-v', '-o', 'libtool', '-static', '-v', '-o',
os.path.join(output_dir_path, library) os.path.join(output_dir_path, library)
] + paths ] + paths
libtoolout = subprocess.Popen(cmd_list, libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
stderr=subprocess.PIPE,
env=env)
_, err = libtoolout.communicate() _, err = libtoolout.communicate()
for line in err.splitlines(): for line in err.splitlines():
if not libtool_re.match(line): if not libtool_re.match(line):
print >> sys.stderr, line print(line, file=sys.stderr)
# Unconditionally touch the output .a file on the command line if present # Unconditionally touch the output .a file on the command line if present
# and the command succeeded. A bit hacky. # and the command succeeded. A bit hacky.
libtool_returncode = libtoolout.returncode libtool_returncode = libtoolout.returncode
@ -112,7 +111,7 @@ def MergeLibs(lib_base_dir):
return libtool_returncode return libtool_returncode
def Main(): def main():
parser_description = 'Merge WebRTC libraries.' parser_description = 'Merge WebRTC libraries.'
parser = argparse.ArgumentParser(description=parser_description) parser = argparse.ArgumentParser(description=parser_description)
parser.add_argument('lib_base_dir', parser.add_argument('lib_base_dir',
@ -124,4 +123,4 @@ def Main():
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(Main()) sys.exit(main())

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python3 #!/usr/bin/env vpython3
# Copyright 2016 The WebRTC project authors. All Rights Reserved. # Copyright 2016 The WebRTC project authors. All Rights Reserved.
# #
@ -13,7 +13,8 @@ Licenses are taken from dependent libraries which are determined by
GN desc command `gn desc` on all targets specified via `--target` argument. GN desc command `gn desc` on all targets specified via `--target` argument.
One can see all dependencies by invoking this command: One can see all dependencies by invoking this command:
$ gn.py desc --all --format=json <out_directory> <target> | python -m json.tool $ gn.py desc --all --format=json <out_directory> <target> | \
vpython3 -m json.tool
(see "deps" subarray) (see "deps" subarray)
Libraries are mapped to licenses via LIB_TO_LICENSES_DICT dictionary. Libraries are mapped to licenses via LIB_TO_LICENSES_DICT dictionary.
@ -21,18 +22,13 @@ Libraries are mapped to licenses via LIB_TO_LICENSES_DICT dictionary.
""" """
import sys import sys
import argparse import argparse
import json import json
import logging import logging
import os import os
import re import re
import subprocess import subprocess
try: from html import escape
# python 3.2+
from html import escape
except ImportError:
from cgi import escape
# Third_party library to licences mapping. Keys are names of the libraries # Third_party library to licences mapping. Keys are names of the libraries
# (right after the `third_party/` prefix) # (right after the `third_party/` prefix)
@ -124,7 +120,7 @@ THIRD_PARTY_LIB_SIMPLE_NAME_REGEX = r'^.*/third_party/([\w\-+]+).*$'
THIRD_PARTY_LIB_REGEX_TEMPLATE = r'^.*/third_party/%s$' THIRD_PARTY_LIB_REGEX_TEMPLATE = r'^.*/third_party/%s$'
class LicenseBuilder(object): class LicenseBuilder:
def __init__(self, def __init__(self,
buildfile_dirs, buildfile_dirs,
targets, targets,
@ -194,7 +190,7 @@ class LicenseBuilder(object):
def _GetThirdPartyLibraries(self, buildfile_dir, target): def _GetThirdPartyLibraries(self, buildfile_dir, target):
output = json.loads(LicenseBuilder._RunGN(buildfile_dir, target)) output = json.loads(LicenseBuilder._RunGN(buildfile_dir, target))
libraries = set() libraries = set()
for described_target in output.values(): for described_target in list(output.values()):
third_party_libs = (self._ParseLibrary(dep) third_party_libs = (self._ParseLibrary(dep)
for dep in described_target['deps']) for dep in described_target['deps'])
libraries |= set(lib for lib in third_party_libs if lib) libraries |= set(lib for lib in third_party_libs if lib)
@ -206,12 +202,10 @@ class LicenseBuilder(object):
third_party_libs = set() third_party_libs = set()
for buildfile in self.buildfile_dirs: for buildfile in self.buildfile_dirs:
for target in self.targets: for target in self.targets:
third_party_libs |= self._GetThirdPartyLibraries( third_party_libs |= self._GetThirdPartyLibraries(buildfile, target)
buildfile, target)
assert len(third_party_libs) > 0 assert len(third_party_libs) > 0
missing_licenses = third_party_libs - set( missing_licenses = third_party_libs - set(self.common_licenses_dict.keys())
self.common_licenses_dict.keys())
if missing_licenses: if missing_licenses:
error_msg = 'Missing licenses for following third_party targets: %s' % \ error_msg = 'Missing licenses for following third_party targets: %s' % \
', '.join(sorted(missing_licenses)) ', '.join(sorted(missing_licenses))
@ -225,12 +219,10 @@ class LicenseBuilder(object):
logging.info('List of licenses: %s', ', '.join(license_libs)) logging.info('List of licenses: %s', ', '.join(license_libs))
# Generate markdown. # Generate markdown.
output_license_file = open(os.path.join(output_dir, 'LICENSE.md'), output_license_file = open(os.path.join(output_dir, 'LICENSE.md'), 'w+')
'w+')
for license_lib in license_libs: for license_lib in license_libs:
if len(self.common_licenses_dict[license_lib]) == 0: if len(self.common_licenses_dict[license_lib]) == 0:
logging.info( logging.info('Skipping compile time or internal dependency: %s',
'Skipping compile time or internal dependency: %s',
license_lib) license_lib)
continue # Compile time dependency continue # Compile time dependency
@ -258,8 +250,7 @@ def main():
action='append', action='append',
default=[], default=[],
help='Name of the GN target to generate a license for') help='Name of the GN target to generate a license for')
parser.add_argument('output_dir', parser.add_argument('output_dir', help='Directory to output LICENSE.md to.')
help='Directory to output LICENSE.md to.')
parser.add_argument('buildfile_dirs', parser.add_argument('buildfile_dirs',
nargs='+', nargs='+',
help='Directories containing gn generated ninja files') help='Directories containing gn generated ninja files')

View File

@ -1,5 +1,6 @@
#!/usr/bin/env vpython #!/usr/bin/env vpython3
# pylint: disable=relative-import,protected-access,unused-argument
# pylint: disable=protected-access,unused-argument
# Copyright 2017 The WebRTC project authors. All Rights Reserved. # Copyright 2017 The WebRTC project authors. All Rights Reserved.
# #
@ -10,12 +11,7 @@
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
import unittest import unittest
try: from mock import patch
# python 3.3+
from unittest.mock import patch
except ImportError:
# From site-package
from mock import patch
from generate_licenses import LicenseBuilder from generate_licenses import LicenseBuilder
@ -41,11 +37,11 @@ class TestLicenseBuilder(unittest.TestCase):
LicenseBuilder._ParseLibraryName('//a/b/third_party/libname1:c'), LicenseBuilder._ParseLibraryName('//a/b/third_party/libname1:c'),
'libname1') 'libname1')
self.assertEqual( self.assertEqual(
LicenseBuilder._ParseLibraryName( LicenseBuilder._ParseLibraryName('//a/b/third_party/libname2:c(d)'),
'//a/b/third_party/libname2:c(d)'), 'libname2') 'libname2')
self.assertEqual( self.assertEqual(
LicenseBuilder._ParseLibraryName( LicenseBuilder._ParseLibraryName('//a/b/third_party/libname3/c:d(e)'),
'//a/b/third_party/libname3/c:d(e)'), 'libname3') 'libname3')
self.assertEqual( self.assertEqual(
LicenseBuilder._ParseLibraryName('//a/b/not_third_party/c'), None) LicenseBuilder._ParseLibraryName('//a/b/not_third_party/c'), None)
@ -60,8 +56,7 @@ class TestLicenseBuilder(unittest.TestCase):
} }
builder = LicenseBuilder([], [], lib_dict, {}) builder = LicenseBuilder([], [], lib_dict, {})
self.assertEqual( self.assertEqual(
builder._ParseLibrary('//a/b/third_party/libname:bar_java'), builder._ParseLibrary('//a/b/third_party/libname:bar_java'), 'libname')
'libname')
def testParseLibraryRegExMatch(self): def testParseLibraryRegExMatch(self):
lib_regex_dict = { lib_regex_dict = {
@ -87,15 +82,13 @@ class TestLicenseBuilder(unittest.TestCase):
} }
builder = LicenseBuilder([], [], {}, lib_regex_dict) builder = LicenseBuilder([], [], {}, lib_regex_dict)
self.assertEqual( self.assertEqual(
builder._ParseLibrary( builder._ParseLibrary('//a/b/third_party/libname/fooHAHA:bar_java'),
'//a/b/third_party/libname/fooHAHA:bar_java'),
'libname/foo.*bar.*') 'libname/foo.*bar.*')
@patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN) @patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
def testGetThirdPartyLibrariesWithoutRegex(self): def testGetThirdPartyLibrariesWithoutRegex(self):
builder = LicenseBuilder([], [], {}, {}) builder = LicenseBuilder([], [], {}, {})
self.assertEqual( self.assertEqual(builder._GetThirdPartyLibraries('out/arm', 'target1'),
builder._GetThirdPartyLibraries('out/arm', 'target1'),
set(['libname1', 'libname2', 'libname3'])) set(['libname1', 'libname2', 'libname3']))
@patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN) @patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
@ -104,8 +97,7 @@ class TestLicenseBuilder(unittest.TestCase):
'libname2:c.*': ['path/to/LICENSE'], 'libname2:c.*': ['path/to/LICENSE'],
} }
builder = LicenseBuilder([], [], {}, lib_regex_dict) builder = LicenseBuilder([], [], {}, lib_regex_dict)
self.assertEqual( self.assertEqual(builder._GetThirdPartyLibraries('out/arm', 'target1'),
builder._GetThirdPartyLibraries('out/arm', 'target1'),
set(['libname1', 'libname2:c.*', 'libname3'])) set(['libname1', 'libname2:c.*', 'libname3']))
@patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN) @patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)

View File

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -7,6 +9,10 @@
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
# Runs PRESUBMIT.py in py3 mode by git cl presubmit.
USE_PYTHON3 = True
def _CommonChecks(input_api, output_api): def _CommonChecks(input_api, output_api):
results = [] results = []
@ -27,15 +33,16 @@ def _CommonChecks(input_api, output_api):
results.extend(input_api.RunTests(pylint_checks)) results.extend(input_api.RunTests(pylint_checks))
# Run the MB unittests. # Run the MB unittests.
results.extend(input_api.canned_checks.RunUnitTestsInDirectory( results.extend(
input_api, input_api.canned_checks.RunUnitTestsInDirectory(input_api,
output_api, output_api,
'.', '.',
[ r'^.+_unittest\.py$'], [r'^.+_unittest\.py$'],
skip_shebang_check=True)) skip_shebang_check=False,
run_on_python2=False))
# Validate the format of the mb_config.pyl file. # Validate the format of the mb_config.pyl file.
cmd = [input_api.python_executable, 'mb.py', 'validate'] cmd = [input_api.python3_executable, 'mb.py', 'validate']
kwargs = {'cwd': input_api.PresubmitLocalPath()} kwargs = {'cwd': input_api.PresubmitLocalPath()}
results.extend(input_api.RunTests([ results.extend(input_api.RunTests([
input_api.Command(name='mb_validate', input_api.Command(name='mb_validate',

View File

@ -3,4 +3,4 @@ setlocal
:: This is required with cygwin only. :: This is required with cygwin only.
PATH=%~dp0;%PATH% PATH=%~dp0;%PATH%
set PYTHONDONTWRITEBYTECODE=1 set PYTHONDONTWRITEBYTECODE=1
call python "%~dp0mb.py" %* call vpython3 "%~dp0mb.py" %*

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -13,8 +14,6 @@ MB is a wrapper script for GN that can be used to generate build files
for sets of canned configurations and analyze them. for sets of canned configurations and analyze them.
""" """
from __future__ import print_function
import argparse import argparse
import ast import ast
import errno import errno
@ -28,10 +27,7 @@ import sys
import subprocess import subprocess
import tempfile import tempfile
import traceback import traceback
try: from urllib.request import urlopen
from urllib2 import urlopen # for Python2
except ImportError:
from urllib.request import urlopen # for Python3
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
SRC_DIR = os.path.dirname(os.path.dirname(SCRIPT_DIR)) SRC_DIR = os.path.dirname(os.path.dirname(SCRIPT_DIR))
@ -280,7 +276,7 @@ class MetaBuildWrapper(object):
def CmdExport(self): def CmdExport(self):
self.ReadConfigFile() self.ReadConfigFile()
obj = {} obj = {}
for builder_group, builders in self.builder_groups.items(): for builder_group, builders in list(self.builder_groups.items()):
obj[builder_group] = {} obj[builder_group] = {}
for builder in builders: for builder in builders:
config = self.builder_groups[builder_group][builder] config = self.builder_groups[builder_group][builder]
@ -290,7 +286,7 @@ class MetaBuildWrapper(object):
if isinstance(config, dict): if isinstance(config, dict):
args = { args = {
k: self.FlattenConfig(v)['gn_args'] k: self.FlattenConfig(v)['gn_args']
for k, v in config.items() for k, v in list(config.items())
} }
elif config.startswith('//'): elif config.startswith('//'):
args = config args = config
@ -476,15 +472,15 @@ class MetaBuildWrapper(object):
# Build a list of all of the configs referenced by builders. # Build a list of all of the configs referenced by builders.
all_configs = {} all_configs = {}
for builder_group in self.builder_groups: for builder_group in self.builder_groups:
for config in self.builder_groups[builder_group].values(): for config in list(self.builder_groups[builder_group].values()):
if isinstance(config, dict): if isinstance(config, dict):
for c in config.values(): for c in list(config.values()):
all_configs[c] = builder_group all_configs[c] = builder_group
else: else:
all_configs[config] = builder_group all_configs[config] = builder_group
# Check that every referenced args file or config actually exists. # Check that every referenced args file or config actually exists.
for config, loc in all_configs.items(): for config, loc in list(all_configs.items()):
if config.startswith('//'): if config.startswith('//'):
if not self.Exists(self.ToAbsPath(config)): if not self.Exists(self.ToAbsPath(config)):
errs.append('Unknown args file "%s" referenced from "%s".' % errs.append('Unknown args file "%s" referenced from "%s".' %
@ -500,7 +496,7 @@ class MetaBuildWrapper(object):
# Figure out the whole list of mixins, and check that every mixin # Figure out the whole list of mixins, and check that every mixin
# listed by a config or another mixin actually exists. # listed by a config or another mixin actually exists.
referenced_mixins = set() referenced_mixins = set()
for config, mixins in self.configs.items(): for config, mixins in list(self.configs.items()):
for mixin in mixins: for mixin in mixins:
if not mixin in self.mixins: if not mixin in self.mixins:
errs.append('Unknown mixin "%s" referenced by config "%s".' % errs.append('Unknown mixin "%s" referenced by config "%s".' %
@ -1172,7 +1168,7 @@ class MetaBuildWrapper(object):
self.Print('%s%s=%s' % (env_prefix, var, env_quoter(env[var]))) self.Print('%s%s=%s' % (env_prefix, var, env_quoter(env[var])))
if cmd[0] == self.executable: if cmd[0] == self.executable:
cmd = ['python'] + cmd[1:] cmd = ['vpython3'] + cmd[1:]
self.Print(*[shell_quoter(arg) for arg in cmd]) self.Print(*[shell_quoter(arg) for arg in cmd])
def PrintJSON(self, obj): def PrintJSON(self, obj):

View File

@ -1,4 +1,5 @@
#!/usr/bin/python #!/usr/bin/env vpython3
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -11,10 +12,7 @@
import ast import ast
import json import json
try: from io import StringIO # for Python3
from StringIO import StringIO # for Python2
except ImportError:
from io import StringIO # for Python3
import os import os
import re import re
import sys import sys
@ -35,14 +33,14 @@ class FakeMBW(mb.MetaBuildWrapper):
self.default_isolate_map = ('c:\\fake_src\\testing\\buildbot\\' self.default_isolate_map = ('c:\\fake_src\\testing\\buildbot\\'
'gn_isolate_map.pyl') 'gn_isolate_map.pyl')
self.platform = 'win32' self.platform = 'win32'
self.executable = 'c:\\python\\python.exe' self.executable = 'c:\\python\\vpython3.exe'
self.sep = '\\' self.sep = '\\'
self.cwd = 'c:\\fake_src\\out\\Default' self.cwd = 'c:\\fake_src\\out\\Default'
else: else:
self.src_dir = '/fake_src' self.src_dir = '/fake_src'
self.default_config = '/fake_src/tools_webrtc/mb/mb_config.pyl' self.default_config = '/fake_src/tools_webrtc/mb/mb_config.pyl'
self.default_isolate_map = '/fake_src/testing/buildbot/gn_isolate_map.pyl' self.default_isolate_map = '/fake_src/testing/buildbot/gn_isolate_map.pyl'
self.executable = '/usr/bin/python' self.executable = '/usr/bin/vpython3'
self.platform = 'linux2' self.platform = 'linux2'
self.sep = '/' self.sep = '/'
self.cwd = '/fake_src/out/Default' self.cwd = '/fake_src/out/Default'
@ -197,7 +195,7 @@ class UnitTest(unittest.TestCase):
mbw.ToAbsPath('//build/args/bots/fake_group/fake_args_bot.gn'), mbw.ToAbsPath('//build/args/bots/fake_group/fake_args_bot.gn'),
'is_debug = false\n') 'is_debug = false\n')
if files: if files:
for path, contents in files.items(): for path, contents in list(files.items()):
mbw.files[path] = contents mbw.files[path] = contents
return mbw return mbw
@ -846,8 +844,8 @@ class UnitTest(unittest.TestCase):
'/fake_src/out/Default/base_unittests.archive.json': '/fake_src/out/Default/base_unittests.archive.json':
("{\"base_unittests\":\"fake_hash\"}"), ("{\"base_unittests\":\"fake_hash\"}"),
'/fake_src/third_party/depot_tools/cipd_manifest.txt': '/fake_src/third_party/depot_tools/cipd_manifest.txt':
("# vpython\n" ("# vpython3\n"
"/some/vpython/pkg git_revision:deadbeef\n"), "/some/vpython3/pkg git_revision:deadbeef\n"),
} }
task_json = json.dumps({'tasks': [{'task_id': '00000'}]}) task_json = json.dumps({'tasks': [{'task_id': '00000'}]})
collect_json = json.dumps({'00000': {'results': {}}}) collect_json = json.dumps({'00000': {'results': {}}})

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -9,7 +10,7 @@
"""Configuration class for network emulation.""" """Configuration class for network emulation."""
class ConnectionConfig(object): class ConnectionConfig:
"""Configuration containing the characteristics of a network connection.""" """Configuration containing the characteristics of a network connection."""
def __init__(self, num, name, receive_bw_kbps, send_bw_kbps, delay_ms, def __init__(self, num, name, receive_bw_kbps, send_bw_kbps, delay_ms,
@ -31,6 +32,5 @@ class ConnectionConfig(object):
""" """
left_aligned_name = self.name.ljust(24, ' ') left_aligned_name = self.name.ljust(24, ' ')
return '%2s %24s %5s kbps %5s kbps %4s %5s ms %3s %%' % ( return '%2s %24s %5s kbps %5s kbps %4s %5s ms %3s %%' % (
self.num, left_aligned_name, self.receive_bw_kbps, self.num, left_aligned_name, self.receive_bw_kbps, self.send_bw_kbps,
self.send_bw_kbps, self.queue_slots, self.delay_ms, self.queue_slots, self.delay_ms, self.packet_loss_percent)
self.packet_loss_percent)

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -74,8 +75,7 @@ def _ParseArgs():
default=_DEFAULT_PRESET_ID, default=_DEFAULT_PRESET_ID,
help=('ConnectionConfig configuration, specified by ID. ' help=('ConnectionConfig configuration, specified by ID. '
'Default: %default')) 'Default: %default'))
parser.add_option( parser.add_option('-r',
'-r',
'--receive-bw', '--receive-bw',
type='int', type='int',
default=_DEFAULT_PRESET.receive_bw_kbps, default=_DEFAULT_PRESET.receive_bw_kbps,
@ -95,19 +95,16 @@ def _ParseArgs():
type='float', type='float',
default=_DEFAULT_PRESET.packet_loss_percent, default=_DEFAULT_PRESET.packet_loss_percent,
help=('Packet loss in %. Default: %default')) help=('Packet loss in %. Default: %default'))
parser.add_option( parser.add_option('-q',
'-q',
'--queue', '--queue',
type='int', type='int',
default=_DEFAULT_PRESET.queue_slots, default=_DEFAULT_PRESET.queue_slots,
help=('Queue size as number of slots. Default: %default')) help=('Queue size as number of slots. Default: %default'))
parser.add_option( parser.add_option('--port-range',
'--port-range',
default='%s,%s' % _DEFAULT_PORT_RANGE, default='%s,%s' % _DEFAULT_PORT_RANGE,
help=('Range of ports for constrained network. Specify as ' help=('Range of ports for constrained network. Specify as '
'two comma separated integers. Default: %default')) 'two comma separated integers. Default: %default'))
parser.add_option( parser.add_option('--target-ip',
'--target-ip',
default=None, default=None,
help=('The interface IP address to apply the rules for. ' help=('The interface IP address to apply the rules for. '
'Default: the external facing interface IP address.')) 'Default: the external facing interface IP address.'))
@ -121,7 +118,7 @@ def _ParseArgs():
options = parser.parse_args()[0] options = parser.parse_args()[0]
# Find preset by ID, if specified. # Find preset by ID, if specified.
if options.preset and not _PRESETS_DICT.has_key(options.preset): if options.preset and options.preset not in _PRESETS_DICT:
parser.error('Invalid preset: %s' % options.preset) parser.error('Invalid preset: %s' % options.preset)
# Simple validation of the IP address, if supplied. # Simple validation of the IP address, if supplied.
@ -129,8 +126,7 @@ def _ParseArgs():
try: try:
socket.inet_aton(options.target_ip) socket.inet_aton(options.target_ip)
except socket.error: except socket.error:
parser.error('Invalid IP address specified: %s' % parser.error('Invalid IP address specified: %s' % options.target_ip)
options.target_ip)
# Convert port range into the desired tuple format. # Convert port range into the desired tuple format.
try: try:
@ -138,8 +134,7 @@ def _ParseArgs():
options.port_range = tuple( options.port_range = tuple(
int(port) for port in options.port_range.split(',')) int(port) for port in options.port_range.split(','))
if len(options.port_range) != 2: if len(options.port_range) != 2:
parser.error( parser.error('Invalid port range specified, please specify two '
'Invalid port range specified, please specify two '
'integers separated by a comma.') 'integers separated by a comma.')
except ValueError: except ValueError:
parser.error('Invalid port range specified.') parser.error('Invalid port range specified.')
@ -195,13 +190,12 @@ def main():
' Delay : %s ms\n' ' Delay : %s ms\n'
' Packet loss : %s %%\n' ' Packet loss : %s %%\n'
' Queue slots : %s', connection_config.receive_bw_kbps, ' Queue slots : %s', connection_config.receive_bw_kbps,
connection_config.receive_bw_kbps / 8, connection_config.receive_bw_kbps / 8, connection_config.send_bw_kbps,
connection_config.send_bw_kbps, connection_config.send_bw_kbps / 8, connection_config.send_bw_kbps / 8, connection_config.delay_ms,
connection_config.delay_ms, connection_config.packet_loss_percent, connection_config.packet_loss_percent, connection_config.queue_slots)
connection_config.queue_slots)
logging.info('Affected traffic: IP traffic on ports %s-%s', logging.info('Affected traffic: IP traffic on ports %s-%s',
options.port_range[0], options.port_range[1]) options.port_range[0], options.port_range[1])
raw_input('Press Enter to abort Network Emulation...') input('Press Enter to abort Network Emulation...')
logging.info('Flushing all Dummynet rules...') logging.info('Flushing all Dummynet rules...')
network_emulator.Cleanup() network_emulator.Cleanup()
logging.info('Completed Network Emulation.') logging.info('Completed Network Emulation.')

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -40,7 +41,7 @@ class NetworkEmulatorError(BaseException):
self.error = error self.error = error
class NetworkEmulator(object): class NetworkEmulator:
"""A network emulator that can constrain the network using Dummynet.""" """A network emulator that can constrain the network using Dummynet."""
def __init__(self, connection_config, port_range): def __init__(self, connection_config, port_range):
@ -70,19 +71,17 @@ class NetworkEmulator(object):
self._connection_config.queue_slots) self._connection_config.queue_slots)
logging.debug('Created receive pipe: %s', receive_pipe_id) logging.debug('Created receive pipe: %s', receive_pipe_id)
send_pipe_id = self._CreateDummynetPipe( send_pipe_id = self._CreateDummynetPipe(
self._connection_config.send_bw_kbps, self._connection_config.send_bw_kbps, self._connection_config.delay_ms,
self._connection_config.delay_ms,
self._connection_config.packet_loss_percent, self._connection_config.packet_loss_percent,
self._connection_config.queue_slots) self._connection_config.queue_slots)
logging.debug('Created send pipe: %s', send_pipe_id) logging.debug('Created send pipe: %s', send_pipe_id)
# Adding the rules will start the emulation. # Adding the rules will start the emulation.
incoming_rule_id = self._CreateDummynetRule(receive_pipe_id, 'any', incoming_rule_id = self._CreateDummynetRule(receive_pipe_id, 'any',
target_ip, target_ip, self._port_range)
self._port_range)
logging.debug('Created incoming rule: %s', incoming_rule_id) logging.debug('Created incoming rule: %s', incoming_rule_id)
outgoing_rule_id = self._CreateDummynetRule(send_pipe_id, target_ip, outgoing_rule_id = self._CreateDummynetRule(send_pipe_id, target_ip, 'any',
'any', self._port_range) self._port_range)
logging.debug('Created outgoing rule: %s', outgoing_rule_id) logging.debug('Created outgoing rule: %s', outgoing_rule_id)
@staticmethod @staticmethod
@ -95,18 +94,15 @@ class NetworkEmulator(object):
""" """
try: try:
if os.getuid() != 0: if os.getuid() != 0:
raise NetworkEmulatorError( raise NetworkEmulatorError('You must run this script with sudo.')
'You must run this script with sudo.') except AttributeError as permission_error:
except AttributeError:
# AttributeError will be raised on Windows. # AttributeError will be raised on Windows.
if ctypes.windll.shell32.IsUserAnAdmin() == 0: if ctypes.windll.shell32.IsUserAnAdmin() == 0:
raise NetworkEmulatorError( raise NetworkEmulatorError('You must run this script with administrator'
'You must run this script with administrator' ' privileges.') from permission_error
' privileges.')
def _CreateDummynetRule(self, pipe_id, from_address, to_address, def _CreateDummynetRule(self, pipe_id, from_address, to_address, port_range):
port_range):
"""Creates a network emulation rule and returns its ID. """Creates a network emulation rule and returns its ID.
Args: Args:
@ -123,19 +119,17 @@ class NetworkEmulator(object):
""" """
self._rule_counter += 100 self._rule_counter += 100
add_part = [ add_part = [
'add', self._rule_counter, 'pipe', pipe_id, 'ip', 'from', 'add', self._rule_counter, 'pipe', pipe_id, 'ip', 'from', from_address,
from_address, 'to', to_address 'to', to_address
] ]
_RunIpfwCommand(add_part + _RunIpfwCommand(add_part + ['src-port', '%s-%s' % port_range],
['src-port', '%s-%s' % port_range],
'Failed to add Dummynet src-port rule.') 'Failed to add Dummynet src-port rule.')
_RunIpfwCommand(add_part + _RunIpfwCommand(add_part + ['dst-port', '%s-%s' % port_range],
['dst-port', '%s-%s' % port_range],
'Failed to add Dummynet dst-port rule.') 'Failed to add Dummynet dst-port rule.')
return self._rule_counter return self._rule_counter
def _CreateDummynetPipe(self, bandwidth_kbps, delay_ms, def _CreateDummynetPipe(self, bandwidth_kbps, delay_ms, packet_loss_percent,
packet_loss_percent, queue_slots): queue_slots):
"""Creates a Dummynet pipe and return its ID. """Creates a Dummynet pipe and return its ID.
Args: Args:
@ -155,8 +149,7 @@ class NetworkEmulator(object):
] ]
error_message = 'Failed to create Dummynet pipe. ' error_message = 'Failed to create Dummynet pipe. '
if sys.platform.startswith('linux'): if sys.platform.startswith('linux'):
error_message += ( error_message += ('Make sure you have loaded the ipfw_mod.ko module to '
'Make sure you have loaded the ipfw_mod.ko module to '
'your kernel (sudo insmod /path/to/ipfw_mod.ko).') 'your kernel (sudo insmod /path/to/ipfw_mod.ko).')
_RunIpfwCommand(cmd, error_message) _RunIpfwCommand(cmd, error_message)
return self._pipe_counter return self._pipe_counter
@ -197,6 +190,6 @@ def _RunIpfwCommand(command, fail_msg=None):
stderr=subprocess.PIPE) stderr=subprocess.PIPE)
output, error = process.communicate() output, error = process.communicate()
if process.returncode != 0: if process.returncode != 0:
raise NetworkEmulatorError(fail_msg, cmd_string, process.returncode, raise NetworkEmulatorError(fail_msg, cmd_string, process.returncode, output,
output, error) error)
return output.strip() return output.strip()

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -8,12 +9,13 @@
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
import datetime import datetime
import httplib2
import json import json
import subprocess import subprocess
import time import time
import zlib import zlib
import httplib2
from tracing.value import histogram from tracing.value import histogram
from tracing.value import histogram_set from tracing.value import histogram_set
from tracing.value.diagnostics import generic_set from tracing.value.diagnostics import generic_set
@ -26,7 +28,6 @@ def _GenerateOauthToken():
if p.wait() == 0: if p.wait() == 0:
output = p.stdout.read() output = p.stdout.read()
return output.strip() return output.strip()
else:
raise RuntimeError( raise RuntimeError(
'Error generating authentication token.\nStdout: %s\nStderr:%s' % 'Error generating authentication token.\nStdout: %s\nStderr:%s' %
(p.stdout.read(), p.stderr.read())) (p.stdout.read(), p.stderr.read()))
@ -54,7 +55,7 @@ def _SendHistogramSet(url, histograms):
else: else:
data = zlib.compress(serialized) data = zlib.compress(serialized)
print 'Sending %d bytes to %s.' % (len(data), url + '/add_histograms') print('Sending %d bytes to %s.' % (len(data), url + '/add_histograms'))
http = httplib2.Http() http = httplib2.Http()
response, content = http.request(url + '/add_histograms', response, content = http.request(url + '/add_histograms',
@ -97,12 +98,13 @@ def _WaitForUploadConfirmation(url, upload_token, wait_timeout,
next_poll_time = datetime.datetime.now() + wait_polling_period next_poll_time = datetime.datetime.now() + wait_polling_period
response, content = http.request(url + '/uploads/' + upload_token, response, content = http.request(url + '/uploads/' + upload_token,
method='GET', headers=headers) method='GET',
headers=headers)
print 'Upload state polled. Response: %r.' % content print('Upload state polled. Response: %r.' % content)
if not oauth_refreshed and response.status == 403: if not oauth_refreshed and response.status == 403:
print 'Oauth token refreshed. Continue polling.' print('Oauth token refreshed. Continue polling.')
headers = _CreateHeaders(_GenerateOauthToken()) headers = _CreateHeaders(_GenerateOauthToken())
oauth_refreshed = True oauth_refreshed = True
continue continue
@ -143,28 +145,27 @@ def _CheckFullUploadInfo(url, upload_token,
response, content = http.request(url + '/uploads/' + upload_token + response, content = http.request(url + '/uploads/' + upload_token +
'?additional_info=measurements', '?additional_info=measurements',
method='GET', headers=headers) method='GET',
headers=headers)
if response.status != 200: if response.status != 200:
print 'Failed to reach the dashboard to get full upload info.' print('Failed to reach the dashboard to get full upload info.')
return False return False
resp_json = json.loads(content) resp_json = json.loads(content)
print 'Full upload info: %s.' % json.dumps(resp_json, indent=4) print('Full upload info: %s.' % json.dumps(resp_json, indent=4))
if 'measurements' in resp_json: if 'measurements' in resp_json:
measurements_cnt = len(resp_json['measurements']) measurements_cnt = len(resp_json['measurements'])
not_completed_state_cnt = len([ not_completed_state_cnt = len(
m for m in resp_json['measurements'] [m for m in resp_json['measurements'] if m['state'] != 'COMPLETED'])
if m['state'] != 'COMPLETED'
])
if (measurements_cnt >= min_measurements_amount and if (measurements_cnt >= min_measurements_amount
(not_completed_state_cnt / (measurements_cnt * 1.0) <= and (not_completed_state_cnt /
max_failed_measurements_percent)): (measurements_cnt * 1.0) <= max_failed_measurements_percent)):
print('Not all measurements were confirmed to upload. ' print(('Not all measurements were confirmed to upload. '
'Measurements count: %d, failed to upload or timed out: %d' % 'Measurements count: %d, failed to upload or timed out: %d' %
(measurements_cnt, not_completed_state_cnt)) (measurements_cnt, not_completed_state_cnt)))
return True return True
return False return False
@ -207,9 +208,9 @@ def _AddBuildInfo(histograms, options):
reserved_infos.BUILD_URLS: options.build_page_url, reserved_infos.BUILD_URLS: options.build_page_url,
} }
for k, v in common_diagnostics.items(): for k, v in list(common_diagnostics.items()):
histograms.AddSharedDiagnosticToAllHistograms( histograms.AddSharedDiagnosticToAllHistograms(k.name,
k.name, generic_set.GenericSet([v])) generic_set.GenericSet([v]))
def _DumpOutput(histograms, output_file): def _DumpOutput(histograms, output_file):
@ -227,36 +228,35 @@ def UploadToDashboard(options):
response, content = _SendHistogramSet(options.dashboard_url, histograms) response, content = _SendHistogramSet(options.dashboard_url, histograms)
if response.status != 200: if response.status != 200:
print('Upload failed with %d: %s\n\n%s' % (response.status, print(('Upload failed with %d: %s\n\n%s' %
response.reason, content)) (response.status, response.reason, content)))
return 1 return 1
upload_token = json.loads(content).get('token') upload_token = json.loads(content).get('token')
if not options.wait_for_upload or not upload_token: if not options.wait_for_upload or not upload_token:
print('Received 200 from dashboard. ', print(('Received 200 from dashboard. ',
'Not waiting for the upload status confirmation.') 'Not waiting for the upload status confirmation.'))
return 0 return 0
response, resp_json = _WaitForUploadConfirmation( response, resp_json = _WaitForUploadConfirmation(
options.dashboard_url, options.dashboard_url, upload_token,
upload_token,
datetime.timedelta(seconds=options.wait_timeout_sec), datetime.timedelta(seconds=options.wait_timeout_sec),
datetime.timedelta(seconds=options.wait_polling_period_sec)) datetime.timedelta(seconds=options.wait_polling_period_sec))
if ((resp_json and resp_json['state'] == 'COMPLETED') or if ((resp_json and resp_json['state'] == 'COMPLETED')
_CheckFullUploadInfo(options.dashboard_url, upload_token)): or _CheckFullUploadInfo(options.dashboard_url, upload_token)):
print 'Upload completed.' print('Upload completed.')
return 0 return 0
if response.status != 200: if response.status != 200:
print('Upload status poll failed with %d: %s' % (response.status, print(('Upload status poll failed with %d: %s' %
response.reason)) (response.status, response.reason)))
return 1 return 1
if resp_json['state'] == 'FAILED': if resp_json['state'] == 'FAILED':
print 'Upload failed.' print('Upload failed.')
return 1 return 1
print('Upload wasn\'t completed in a given time: %d seconds.' % print(('Upload wasn\'t completed in a given time: %d seconds.' %
options.wait_timeout_sec) options.wait_timeout_sec))
return 1 return 1

View File

@ -1,4 +1,5 @@
#!/usr/bin/env vpython #!/usr/bin/env vpython3
# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -29,42 +30,58 @@ import google.protobuf # pylint: disable=unused-import
def _CreateParser(): def _CreateParser():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--perf-dashboard-machine-group', required=True, parser.add_argument('--perf-dashboard-machine-group',
required=True,
help='The "master" the bots are grouped under. This ' help='The "master" the bots are grouped under. This '
'string is the group in the the perf dashboard path ' 'string is the group in the the perf dashboard path '
'group/bot/perf_id/metric/subtest.') 'group/bot/perf_id/metric/subtest.')
parser.add_argument('--bot', required=True, parser.add_argument('--bot',
required=True,
help='The bot running the test (e.g. ' help='The bot running the test (e.g. '
'webrtc-win-large-tests).') 'webrtc-win-large-tests).')
parser.add_argument('--test-suite', required=True, parser.add_argument('--test-suite',
required=True,
help='The key for the test in the dashboard (i.e. what ' help='The key for the test in the dashboard (i.e. what '
'you select in the top-level test suite selector in ' 'you select in the top-level test suite selector in '
'the dashboard') 'the dashboard')
parser.add_argument('--webrtc-git-hash', required=True, parser.add_argument('--webrtc-git-hash',
required=True,
help='webrtc.googlesource.com commit hash.') help='webrtc.googlesource.com commit hash.')
parser.add_argument('--commit-position', type=int, required=True, parser.add_argument('--commit-position',
type=int,
required=True,
help='Commit pos corresponding to the git hash.') help='Commit pos corresponding to the git hash.')
parser.add_argument('--build-page-url', required=True, parser.add_argument('--build-page-url',
required=True,
help='URL to the build page for this build.') help='URL to the build page for this build.')
parser.add_argument('--dashboard-url', required=True, parser.add_argument('--dashboard-url',
required=True,
help='Which dashboard to use.') help='Which dashboard to use.')
parser.add_argument('--input-results-file', type=argparse.FileType(), parser.add_argument('--input-results-file',
type=argparse.FileType(),
required=True, required=True,
help='A HistogramSet proto file with output from ' help='A HistogramSet proto file with output from '
'WebRTC tests.') 'WebRTC tests.')
parser.add_argument('--output-json-file', type=argparse.FileType('w'), parser.add_argument('--output-json-file',
type=argparse.FileType('w'),
help='Where to write the output (for debugging).') help='Where to write the output (for debugging).')
parser.add_argument('--outdir', required=True, parser.add_argument('--outdir',
required=True,
help='Path to the local out/ dir (usually out/Default)') help='Path to the local out/ dir (usually out/Default)')
parser.add_argument('--wait-for-upload', action='store_true', parser.add_argument('--wait-for-upload',
action='store_true',
help='If specified, script will wait untill Chrome ' help='If specified, script will wait untill Chrome '
'perf dashboard confirms that the data was succesfully ' 'perf dashboard confirms that the data was succesfully '
'proccessed and uploaded') 'proccessed and uploaded')
parser.add_argument('--wait-timeout-sec', type=int, default=1200, parser.add_argument('--wait-timeout-sec',
type=int,
default=1200,
help='Used only if wait-for-upload is True. Maximum ' help='Used only if wait-for-upload is True. Maximum '
'amount of time in seconds that the script will wait ' 'amount of time in seconds that the script will wait '
'for the confirmation.') 'for the confirmation.')
parser.add_argument('--wait-polling-period-sec', type=int, default=120, parser.add_argument('--wait-polling-period-sec',
type=int,
default=120,
help='Used only if wait-for-upload is True. Status ' help='Used only if wait-for-upload is True. Status '
'will be requested from the Dashboard every ' 'will be requested from the Dashboard every '
'wait-polling-period-sec seconds.') 'wait-polling-period-sec seconds.')
@ -82,8 +99,8 @@ def _ConfigurePythonPath(options):
# It would be better if there was an equivalent to py_binary in GN, but # It would be better if there was an equivalent to py_binary in GN, but
# there's not. # there's not.
script_dir = os.path.dirname(os.path.realpath(__file__)) script_dir = os.path.dirname(os.path.realpath(__file__))
checkout_root = os.path.abspath( checkout_root = os.path.abspath(os.path.join(script_dir, os.pardir,
os.path.join(script_dir, os.pardir, os.pardir)) os.pardir))
sys.path.insert( sys.path.insert(
0, os.path.join(checkout_root, 'third_party', 'catapult', 'tracing')) 0, os.path.join(checkout_root, 'third_party', 'catapult', 'tracing'))
@ -100,8 +117,7 @@ def _ConfigurePythonPath(options):
# Fail early in case the proto hasn't been built. # Fail early in case the proto hasn't been built.
from tracing.proto import histogram_proto from tracing.proto import histogram_proto
if not histogram_proto.HAS_PROTO: if not histogram_proto.HAS_PROTO:
raise ImportError( raise ImportError('Could not find histogram_pb2. You need to build the '
'Could not find histogram_pb2. You need to build the '
'webrtc_dashboard_upload target before invoking this ' 'webrtc_dashboard_upload target before invoking this '
'script. Expected to find ' 'script. Expected to find '
'histogram_pb2.py in %s.' % histogram_proto_path) 'histogram_pb2.py in %s.' % histogram_proto_path)

View File

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -61,7 +63,7 @@ def RunGnCheck(root_dir=None):
error = RunGnCommand(['gen', '--check', out_dir], root_dir) error = RunGnCommand(['gen', '--check', out_dir], root_dir)
finally: finally:
shutil.rmtree(out_dir, ignore_errors=True) shutil.rmtree(out_dir, ignore_errors=True)
return GN_ERROR_RE.findall(error) if error else [] return GN_ERROR_RE.findall(error.decode('utf-8')) if error else []
def RunNinjaCommand(args, root_dir=None): def RunNinjaCommand(args, root_dir=None):
@ -105,8 +107,8 @@ def GetCompilationCommand(filepath, gn_args, work_dir):
""" """
gn_errors = RunGnCommand(['gen'] + gn_args + [work_dir]) gn_errors = RunGnCommand(['gen'] + gn_args + [work_dir])
if gn_errors: if gn_errors:
raise (RuntimeError('FYI, cannot complete check due to gn error:\n%s\n' raise RuntimeError('FYI, cannot complete check due to gn error:\n%s\n'
'Please open a bug.' % gn_errors)) 'Please open a bug.' % gn_errors)
# Needed for single file compilation. # Needed for single file compilation.
commands = GetCompilationDb(work_dir) commands = GetCompilationDb(work_dir)
@ -117,9 +119,9 @@ def GetCompilationCommand(filepath, gn_args, work_dir):
# Gather defines, include path and flags (such as -std=c++11). # Gather defines, include path and flags (such as -std=c++11).
try: try:
compilation_entry = commands[rel_path] compilation_entry = commands[rel_path]
except KeyError: except KeyError as not_found:
raise ValueError('%s: Not found in compilation database.\n' raise ValueError('%s: Not found in compilation database.\n'
'Please check the path.' % filepath) 'Please check the path.' % filepath) from not_found
command = compilation_entry['command'].split() command = compilation_entry['command'].split()
# Remove troublesome flags. May trigger an error otherwise. # Remove troublesome flags. May trigger an error otherwise.

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
@ -12,7 +12,6 @@ import re
import os import os
import unittest import unittest
#pylint: disable=relative-import
import build_helpers import build_helpers
TESTDATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), TESTDATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
@ -26,7 +25,7 @@ class GnCheckTest(unittest.TestCase):
expected_error = re.compile('ERROR Dependency cycle') expected_error = re.compile('ERROR Dependency cycle')
gn_output = build_helpers.RunGnCheck(test_dir) gn_output = build_helpers.RunGnCheck(test_dir)
self.assertEqual(1, len(gn_output)) self.assertEqual(1, len(gn_output))
self.assertRegexpMatches(gn_output[0], expected_error) self.assertRegex(gn_output[0], expected_error)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -9,7 +10,6 @@
import os import os
import re import re
import string
# TARGET_RE matches a GN target, and extracts the target name and the contents. # TARGET_RE matches a GN target, and extracts the target name and the contents.
TARGET_RE = re.compile( TARGET_RE = re.compile(
@ -66,9 +66,7 @@ def GetBuildGnPathFromFilePath(file_path, file_exists_check, root_dir_path):
candidate_build_gn_path = os.path.join(candidate_dir, 'BUILD.gn') candidate_build_gn_path = os.path.join(candidate_dir, 'BUILD.gn')
if file_exists_check(candidate_build_gn_path): if file_exists_check(candidate_build_gn_path):
return candidate_build_gn_path return candidate_build_gn_path
else: candidate_dir = os.path.abspath(os.path.join(candidate_dir, os.pardir))
candidate_dir = os.path.abspath(
os.path.join(candidate_dir, os.pardir))
raise NoBuildGnFoundError( raise NoBuildGnFoundError(
'No BUILD.gn file found for file: `{}`'.format(file_path)) 'No BUILD.gn file found for file: `{}`'.format(file_path))
@ -87,8 +85,8 @@ def IsHeaderInBuildGn(header_path, build_gn_path):
""" """
target_abs_path = os.path.dirname(build_gn_path) target_abs_path = os.path.dirname(build_gn_path)
build_gn_content = _ReadFile(build_gn_path) build_gn_content = _ReadFile(build_gn_path)
headers_in_build_gn = GetHeadersInBuildGnFileSources( headers_in_build_gn = GetHeadersInBuildGnFileSources(build_gn_content,
build_gn_content, target_abs_path) target_abs_path)
return header_path in headers_in_build_gn return header_path in headers_in_build_gn
@ -112,7 +110,7 @@ def GetHeadersInBuildGnFileSources(file_content, target_abs_path):
for source_file_match in SOURCE_FILE_RE.finditer(sources): for source_file_match in SOURCE_FILE_RE.finditer(sources):
source_file = source_file_match.group('source_file') source_file = source_file_match.group('source_file')
if source_file.endswith('.h'): if source_file.endswith('.h'):
source_file_tokens = string.split(source_file, '/') source_file_tokens = source_file.split('/')
headers_in_sources.add( headers_in_sources.add(
os.path.join(target_abs_path, *source_file_tokens)) os.path.join(target_abs_path, *source_file_tokens))
return headers_in_sources return headers_in_sources

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -11,14 +12,11 @@ import os
import sys import sys
import unittest import unittest
#pylint: disable=relative-import
import check_orphan_headers import check_orphan_headers
def _GetRootBasedOnPlatform(): def _GetRootBasedOnPlatform():
if sys.platform.startswith('win'): if sys.platform.startswith('win'):
return 'C:\\' return 'C:\\'
else:
return '/' return '/'
@ -31,42 +29,40 @@ class GetBuildGnPathFromFilePathTest(unittest.TestCase):
file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h') file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h')
expected_build_path = _GetPath('home', 'projects', 'webrtc', 'base', expected_build_path = _GetPath('home', 'projects', 'webrtc', 'base',
'BUILD.gn') 'BUILD.gn')
file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc', file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc', 'base',
'base', 'BUILD.gn') 'BUILD.gn')
src_dir_path = _GetPath('home', 'projects', 'webrtc') src_dir_path = _GetPath('home', 'projects', 'webrtc')
self.assertEqual( self.assertEqual(
expected_build_path, expected_build_path,
check_orphan_headers.GetBuildGnPathFromFilePath( check_orphan_headers.GetBuildGnPathFromFilePath(file_path, file_exists,
file_path, file_exists, src_dir_path)) src_dir_path))
def testGetBuildPathFromParentDirectory(self): def testGetBuildPathFromParentDirectory(self):
file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h') file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h')
expected_build_path = _GetPath('home', 'projects', 'webrtc', expected_build_path = _GetPath('home', 'projects', 'webrtc', 'BUILD.gn')
'BUILD.gn')
file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc', file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc',
'BUILD.gn') 'BUILD.gn')
src_dir_path = _GetPath('home', 'projects', 'webrtc') src_dir_path = _GetPath('home', 'projects', 'webrtc')
self.assertEqual( self.assertEqual(
expected_build_path, expected_build_path,
check_orphan_headers.GetBuildGnPathFromFilePath( check_orphan_headers.GetBuildGnPathFromFilePath(file_path, file_exists,
file_path, file_exists, src_dir_path)) src_dir_path))
def testExceptionIfNoBuildGnFilesAreFound(self): def testExceptionIfNoBuildGnFilesAreFound(self):
with self.assertRaises(check_orphan_headers.NoBuildGnFoundError): with self.assertRaises(check_orphan_headers.NoBuildGnFoundError):
file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h') file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h')
file_exists = lambda p: False file_exists = lambda p: False
src_dir_path = _GetPath('home', 'projects', 'webrtc') src_dir_path = _GetPath('home', 'projects', 'webrtc')
check_orphan_headers.GetBuildGnPathFromFilePath( check_orphan_headers.GetBuildGnPathFromFilePath(file_path, file_exists,
file_path, file_exists, src_dir_path) src_dir_path)
def testExceptionIfFilePathIsNotAnHeader(self): def testExceptionIfFilePathIsNotAnHeader(self):
with self.assertRaises(check_orphan_headers.WrongFileTypeError): with self.assertRaises(check_orphan_headers.WrongFileTypeError):
file_path = _GetPath('home', 'projects', 'webrtc', 'base', file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.cc')
'foo.cc')
file_exists = lambda p: False file_exists = lambda p: False
src_dir_path = _GetPath('home', 'projects', 'webrtc') src_dir_path = _GetPath('home', 'projects', 'webrtc')
check_orphan_headers.GetBuildGnPathFromFilePath( check_orphan_headers.GetBuildGnPathFromFilePath(file_path, file_exists,
file_path, file_exists, src_dir_path) src_dir_path)
class GetHeadersInBuildGnFileSourcesTest(unittest.TestCase): class GetHeadersInBuildGnFileSourcesTest(unittest.TestCase):

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
@ -74,9 +74,8 @@ def _CheckBuildFile(build_file_path, packages):
subpackage = subpackages_match.group('subpackage') subpackage = subpackages_match.group('subpackage')
source_file = subpackages_match.group('source_file') source_file = subpackages_match.group('source_file')
if subpackage: if subpackage:
yield PackageBoundaryViolation(build_file_path, yield PackageBoundaryViolation(build_file_path, target_name,
target_name, source_file, source_file, subpackage)
subpackage)
def CheckPackageBoundaries(root_dir, build_files=None): def CheckPackageBoundaries(root_dir, build_files=None):
@ -88,9 +87,7 @@ def CheckPackageBoundaries(root_dir, build_files=None):
for build_file_path in build_files: for build_file_path in build_files:
assert build_file_path.startswith(root_dir) assert build_file_path.startswith(root_dir)
else: else:
build_files = [ build_files = [os.path.join(package, 'BUILD.gn') for package in packages]
os.path.join(package, 'BUILD.gn') for package in packages
]
messages = [] messages = []
for build_file_path in build_files: for build_file_path in build_files:
@ -126,8 +123,8 @@ def main(argv):
for i, message in enumerate(messages): for i, message in enumerate(messages):
if i > 0: if i > 0:
print print()
print message print(message)
return bool(messages) return bool(messages)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
@ -12,8 +12,7 @@ import ast
import os import os
import unittest import unittest
#pylint: disable=relative-import import check_package_boundaries
from check_package_boundaries import CheckPackageBoundaries
MSG_FORMAT = 'ERROR:check_package_boundaries.py: Unexpected %s.' MSG_FORMAT = 'ERROR:check_package_boundaries.py: Unexpected %s.'
TESTDATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), TESTDATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
@ -32,12 +31,11 @@ class UnitTest(unittest.TestCase):
build_files = None build_files = None
messages = [] messages = []
for violation in CheckPackageBoundaries(test_dir, build_files): for violation in check_package_boundaries.CheckPackageBoundaries(
build_file_path = os.path.relpath(violation.build_file_path, test_dir, build_files):
test_dir) build_file_path = os.path.relpath(violation.build_file_path, test_dir)
build_file_path = build_file_path.replace(os.path.sep, '/') build_file_path = build_file_path.replace(os.path.sep, '/')
messages.append( messages.append(violation._replace(build_file_path=build_file_path))
violation._replace(build_file_path=build_file_path))
expected_messages = ReadPylFile(os.path.join(test_dir, 'expected.pyl')) expected_messages = ReadPylFile(os.path.join(test_dir, 'expected.pyl'))
self.assertListEqual(sorted(expected_messages), sorted(messages)) self.assertListEqual(sorted(expected_messages), sorted(messages))
@ -46,12 +44,11 @@ class UnitTest(unittest.TestCase):
self._RunTest(os.path.join(TESTDATA_DIR, 'no_errors')) self._RunTest(os.path.join(TESTDATA_DIR, 'no_errors'))
def testMultipleErrorsSingleTarget(self): def testMultipleErrorsSingleTarget(self):
self._RunTest( self._RunTest(os.path.join(TESTDATA_DIR, 'multiple_errors_single_target'))
os.path.join(TESTDATA_DIR, 'multiple_errors_single_target'))
def testMultipleErrorsMultipleTargets(self): def testMultipleErrorsMultipleTargets(self):
self._RunTest( self._RunTest(os.path.join(TESTDATA_DIR,
os.path.join(TESTDATA_DIR, 'multiple_errors_multiple_targets')) 'multiple_errors_multiple_targets'))
def testCommonPrefix(self): def testCommonPrefix(self):
self._RunTest(os.path.join(TESTDATA_DIR, 'common_prefix')) self._RunTest(os.path.join(TESTDATA_DIR, 'common_prefix'))
@ -67,7 +64,7 @@ class UnitTest(unittest.TestCase):
def testRelativeFilename(self): def testRelativeFilename(self):
test_dir = os.path.join(TESTDATA_DIR, 'all_build_files') test_dir = os.path.join(TESTDATA_DIR, 'all_build_files')
with self.assertRaises(AssertionError): with self.assertRaises(AssertionError):
CheckPackageBoundaries(test_dir, ["BUILD.gn"]) check_package_boundaries.CheckPackageBoundaries(test_dir, ["BUILD.gn"])
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -9,7 +9,7 @@ by WebRTC follow this instructions:
2. Launch the script: 2. Launch the script:
``` ```
$ python tools_webrtc/sslroots/generate_sslroots.py roots.pem $ vpython3 tools_webrtc/sslroots/generate_sslroots.py roots.pem
``` ```
3. Step 2 should have generated an ssl_roots.h file right next to roots.pem. 3. Step 2 should have generated an ssl_roots.h file right next to roots.pem.

View File

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# -*- coding:utf-8 -*- # -*- coding:utf-8 -*-
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
# #
@ -17,7 +19,7 @@ Arguments:
generated file size. generated file size.
""" """
import commands import subprocess
from optparse import OptionParser from optparse import OptionParser
import os import os
import re import re
@ -42,10 +44,7 @@ def main():
"""The main entrypoint.""" """The main entrypoint."""
parser = OptionParser('usage %prog FILE') parser = OptionParser('usage %prog FILE')
parser.add_option('-v', '--verbose', dest='verbose', action='store_true') parser.add_option('-v', '--verbose', dest='verbose', action='store_true')
parser.add_option('-f', parser.add_option('-f', '--full_cert', dest='full_cert', action='store_true')
'--full_cert',
dest='full_cert',
action='store_true')
options, args = parser.parse_args() options, args = parser.parse_args()
if len(args) < 1: if len(args) < 1:
parser.error('No crt file specified.') parser.error('No crt file specified.')
@ -93,8 +92,8 @@ def _GenCFiles(root_dir, options):
_CHAR_TYPE, options) _CHAR_TYPE, options)
certificate_list = _CreateArraySectionHeader(_CERTIFICATE_VARIABLE, certificate_list = _CreateArraySectionHeader(_CERTIFICATE_VARIABLE,
_CHAR_TYPE, options) _CHAR_TYPE, options)
certificate_size_list = _CreateArraySectionHeader( certificate_size_list = _CreateArraySectionHeader(_CERTIFICATE_SIZE_VARIABLE,
_CERTIFICATE_SIZE_VARIABLE, _INT_TYPE, options) _INT_TYPE, options)
for _, _, files in os.walk(root_dir): for _, _, files in os.walk(root_dir):
for current_file in files: for current_file in files:
@ -102,14 +101,12 @@ def _GenCFiles(root_dir, options):
prefix_length = len(_PREFIX) prefix_length = len(_PREFIX)
length = len(current_file) - len(_EXTENSION) length = len(current_file) - len(_EXTENSION)
label = current_file[prefix_length:length] label = current_file[prefix_length:length]
filtered_output, cert_size = _CreateCertSection( filtered_output, cert_size = _CreateCertSection(root_dir, current_file,
root_dir, current_file, label, options) label, options)
output_header_file.write(filtered_output + '\n\n\n') output_header_file.write(filtered_output + '\n\n\n')
if options.full_cert: if options.full_cert:
subject_name_list += _AddLabelToArray( subject_name_list += _AddLabelToArray(label, _SUBJECT_NAME_ARRAY)
label, _SUBJECT_NAME_ARRAY) public_key_list += _AddLabelToArray(label, _PUBLIC_KEY_ARRAY)
public_key_list += _AddLabelToArray(
label, _PUBLIC_KEY_ARRAY)
certificate_list += _AddLabelToArray(label, _CERTIFICATE_ARRAY) certificate_list += _AddLabelToArray(label, _CERTIFICATE_ARRAY)
certificate_size_list += (' %s,\n') % (cert_size) certificate_size_list += (' %s,\n') % (cert_size)
@ -135,7 +132,7 @@ def _Cleanup(root_dir):
def _CreateCertSection(root_dir, source_file, label, options): def _CreateCertSection(root_dir, source_file, label, options):
command = 'openssl x509 -in %s%s -noout -C' % (root_dir, source_file) command = 'openssl x509 -in %s%s -noout -C' % (root_dir, source_file)
_PrintOutput(command, options) _PrintOutput(command, options)
output = commands.getstatusoutput(command)[1] output = subprocess.getstatusoutput(command)[1]
renamed_output = output.replace('unsigned char XXX_', renamed_output = output.replace('unsigned char XXX_',
'const unsigned char ' + label + '_') 'const unsigned char ' + label + '_')
filtered_output = '' filtered_output = ''
@ -155,8 +152,7 @@ def _CreateCertSection(root_dir, source_file, label, options):
def _CreateOutputHeader(): def _CreateOutputHeader():
output = ( output = ('/*\n'
'/*\n'
' * Copyright 2004 The WebRTC Project Authors. All rights ' ' * Copyright 2004 The WebRTC Project Authors. All rights '
'reserved.\n' 'reserved.\n'
' *\n' ' *\n'
@ -173,7 +169,7 @@ def _CreateOutputHeader():
'// This file is the root certificates in C form that are needed to' '// This file is the root certificates in C form that are needed to'
' connect to\n// Google.\n\n' ' connect to\n// Google.\n\n'
'// It was generated with the following command line:\n' '// It was generated with the following command line:\n'
'// > python tools_webrtc/sslroots/generate_sslroots.py' '// > vpython3 tools_webrtc/sslroots/generate_sslroots.py'
'\n// https://pki.goog/roots.pem\n\n' '\n// https://pki.goog/roots.pem\n\n'
'// clang-format off\n' '// clang-format off\n'
'// Don\'t bother formatting generated code,\n' '// Don\'t bother formatting generated code,\n'
@ -182,7 +178,7 @@ def _CreateOutputHeader():
def _CreateOutputFooter(): def _CreateOutputFooter():
output = ('// clang-format on\n\n' '#endif // RTC_BASE_SSL_ROOTS_H_\n') output = ('// clang-format on\n\n#endif // RTC_BASE_SSL_ROOTS_H_\n')
return output return output
@ -211,7 +207,7 @@ def _SafeName(original_file_name):
def _PrintOutput(output, options): def _PrintOutput(output, options):
if options.verbose: if options.verbose:
print output print(output)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -37,8 +38,7 @@ def _RemovePreviousUpdateBranch():
if active_branch == UPDATE_BRANCH_NAME: if active_branch == UPDATE_BRANCH_NAME:
active_branch = 'master' active_branch = 'master'
if UPDATE_BRANCH_NAME in branches: if UPDATE_BRANCH_NAME in branches:
logging.info('Removing previous update branch (%s)', logging.info('Removing previous update branch (%s)', UPDATE_BRANCH_NAME)
UPDATE_BRANCH_NAME)
subprocess.check_call(['git', 'checkout', active_branch]) subprocess.check_call(['git', 'checkout', active_branch])
subprocess.check_call(['git', 'branch', '-D', UPDATE_BRANCH_NAME]) subprocess.check_call(['git', 'branch', '-D', UPDATE_BRANCH_NAME])
logging.info('No branch to remove') logging.info('No branch to remove')
@ -46,9 +46,8 @@ def _RemovePreviousUpdateBranch():
def _GetLastAuthor(): def _GetLastAuthor():
"""Returns a string with the author of the last commit.""" """Returns a string with the author of the last commit."""
author = subprocess.check_output(['git', 'log', author = subprocess.check_output(
'-1', ['git', 'log', '-1', '--pretty=format:"%an"']).splitlines()
'--pretty=format:"%an"']).splitlines()
return author return author
@ -85,12 +84,8 @@ def _UpdateWebRTCVersion(filename):
# pylint: disable=line-too-long # pylint: disable=line-too-long
new_content = re.sub( new_content = re.sub(
r'WebRTC source stamp [0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}', r'WebRTC source stamp [0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}',
r'WebRTC source stamp %02d-%02d-%02dT%02d:%02d:%02d' % (d.year, r'WebRTC source stamp %02d-%02d-%02dT%02d:%02d:%02d' %
d.month, (d.year, d.month, d.day, d.hour, d.minute, d.second),
d.day,
d.hour,
d.minute,
d.second),
content, content,
flags=re.MULTILINE) flags=re.MULTILINE)
# pylint: enable=line-too-long # pylint: enable=line-too-long
@ -109,12 +104,9 @@ def _LocalCommit():
logging.info('Committing changes locally.') logging.info('Committing changes locally.')
d = datetime.datetime.utcnow() d = datetime.datetime.utcnow()
git_author = subprocess.check_output(['git', 'config',
'user.email']).strip()
commit_msg = ('Update WebRTC code version (%02d-%02d-%02dT%02d:%02d:%02d).' commit_msg = ('Update WebRTC code version (%02d-%02d-%02dT%02d:%02d:%02d).'
'\n\nBug: None') '\n\nBug: None')
commit_msg = commit_msg % (d.year, d.month, d.day, d.hour, d.minute, commit_msg = commit_msg % (d.year, d.month, d.day, d.hour, d.minute, d.second)
d.second)
subprocess.check_call(['git', 'add', '--update', '.']) subprocess.check_call(['git', 'add', '--update', '.'])
subprocess.check_call(['git', 'commit', '-m', commit_msg]) subprocess.check_call(['git', 'commit', '-m', commit_msg])
@ -127,8 +119,9 @@ def _UploadCL(commit_queue_mode):
- 1: Run trybots but do not submit to CQ. - 1: Run trybots but do not submit to CQ.
- 0: Skip CQ, upload only. - 0: Skip CQ, upload only.
""" """
cmd = ['git', 'cl', 'upload', '--force', '--bypass-hooks', cmd = [
'--bypass-watchlist'] 'git', 'cl', 'upload', '--force', '--bypass-hooks', '--bypass-watchlist'
]
if commit_queue_mode >= 2: if commit_queue_mode >= 2:
logging.info('Sending the CL to the CQ...') logging.info('Sending the CL to the CQ...')
cmd.extend(['-o', 'label=Bot-Commit+1']) cmd.extend(['-o', 'label=Bot-Commit+1'])

View File

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -75,10 +77,6 @@ _EXTENSION_FLAGS = {
} }
def PathExists(*args):
return os.path.exists(os.path.join(*args))
def FindWebrtcSrcFromFilename(filename): def FindWebrtcSrcFromFilename(filename):
"""Searches for the root of the WebRTC checkout. """Searches for the root of the WebRTC checkout.
@ -92,9 +90,9 @@ def FindWebrtcSrcFromFilename(filename):
""" """
curdir = os.path.normpath(os.path.dirname(filename)) curdir = os.path.normpath(os.path.dirname(filename))
while not (os.path.basename(curdir) == 'src' while not (os.path.basename(curdir) == 'src'
and PathExists(curdir, 'DEPS') and and os.path.exists(os.path.join(curdir, 'DEPS')) and
(PathExists(curdir, '..', '.gclient') (os.path.exists(os.path.join(curdir, '..', '.gclient'))
or PathExists(curdir, '.git'))): or os.path.exists(os.path.join(curdir, '.git')))):
nextdir = os.path.normpath(os.path.join(curdir, '..')) nextdir = os.path.normpath(os.path.join(curdir, '..'))
if nextdir == curdir: if nextdir == curdir:
return None return None
@ -248,8 +246,7 @@ def GetClangOptionsFromCommandLine(clang_commandline, out_dir,
elif flag.startswith('-std'): elif flag.startswith('-std'):
clang_flags.append(flag) clang_flags.append(flag)
elif flag.startswith('-') and flag[1] in 'DWFfmO': elif flag.startswith('-') and flag[1] in 'DWFfmO':
if (flag == '-Wno-deprecated-register' or if flag in ['-Wno-deprecated-register', '-Wno-header-guard']:
flag == '-Wno-header-guard'):
# These flags causes libclang (3.3) to crash. Remove it until # These flags causes libclang (3.3) to crash. Remove it until
# things are fixed. # things are fixed.
continue continue
@ -266,8 +263,7 @@ def GetClangOptionsFromCommandLine(clang_commandline, out_dir,
if sysroot_path.startswith('/'): if sysroot_path.startswith('/'):
clang_flags.append(flag) clang_flags.append(flag)
else: else:
abs_path = os.path.normpath(os.path.join( abs_path = os.path.normpath(os.path.join(out_dir, sysroot_path))
out_dir, sysroot_path))
clang_flags.append('--sysroot=' + abs_path) clang_flags.append('--sysroot=' + abs_path)
return clang_flags return clang_flags
@ -332,8 +328,7 @@ def GetClangOptionsFromNinjaForFilename(webrtc_root, filename):
if not clang_line: if not clang_line:
return additional_flags return additional_flags
return GetClangOptionsFromCommandLine(clang_line, out_dir, return GetClangOptionsFromCommandLine(clang_line, out_dir, additional_flags)
additional_flags)
def FlagsForFile(filename): def FlagsForFile(filename):
@ -349,8 +344,7 @@ def FlagsForFile(filename):
""" """
abs_filename = os.path.abspath(filename) abs_filename = os.path.abspath(filename)
webrtc_root = FindWebrtcSrcFromFilename(abs_filename) webrtc_root = FindWebrtcSrcFromFilename(abs_filename)
clang_flags = GetClangOptionsFromNinjaForFilename(webrtc_root, clang_flags = GetClangOptionsFromNinjaForFilename(webrtc_root, abs_filename)
abs_filename)
# If clang_flags could not be determined, then assume that was due to a # If clang_flags could not be determined, then assume that was due to a
# transient failure. Preventing YCM from caching the flags allows us to # transient failure. Preventing YCM from caching the flags allows us to