tools_webrtc dir converted to py3 + top level PRESUBMIT script

Bug: webrtc:13607
Change-Id: Ib018e43ea977cc24dd71048e68e3343741f7f31b
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/249083
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Reviewed-by: Jeremy Leconte <jleconte@google.com>
Commit-Queue: Christoffer Jansson <jansson@google.com>
Cr-Commit-Position: refs/heads/main@{#35953}
This commit is contained in:
Christoffer Jansson 2022-02-08 09:01:12 +01:00 committed by WebRTC LUCI CQ
parent b5cba85c2f
commit 4e8a773b4b
50 changed files with 4570 additions and 4673 deletions

View file

@ -33,10 +33,16 @@ wheel: <
# Used by tools_webrtc/perf/webrtc_dashboard_upload.py.
wheel: <
name: "infra/python/wheels/httplib2-py2_py3"
version: "version:0.10.3"
name: "infra/python/wheels/httplib2-py3"
version: "version:0.19.1"
>
wheel: <
name: "infra/python/wheels/pyparsing-py2_py3"
version: "version:2.4.7"
>
# Used by:
# build/toolchain/win
wheel: <

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -13,6 +15,9 @@ import sys
from collections import defaultdict
from contextlib import contextmanager
# Runs PRESUBMIT.py in py3 mode by git cl presubmit.
USE_PYTHON3 = True
# Files and directories that are *skipped* by cpplint in the presubmit script.
CPPLINT_EXCEPTIONS = [
'api/video_codecs/video_decoder.h',
@ -31,6 +36,9 @@ CPPLINT_EXCEPTIONS = [
'modules/video_capture',
'p2p/base/pseudo_tcp.cc',
'p2p/base/pseudo_tcp.h',
'PRESUBMIT.py',
'presubmit_test_mocks.py',
'presubmit_test.py',
'rtc_base',
'sdk/android/src/jni',
'sdk/objc',
@ -137,8 +145,8 @@ def VerifyNativeApiHeadersListIsValid(input_api, output_api):
"""Ensures the list of native API header directories is up to date."""
non_existing_paths = []
native_api_full_paths = [
input_api.os_path.join(input_api.PresubmitLocalPath(),
*path.split('/')) for path in API_DIRS
input_api.os_path.join(input_api.PresubmitLocalPath(), *path.split('/'))
for path in API_DIRS
]
for path in native_api_full_paths:
if not os.path.isdir(path):
@ -200,8 +208,8 @@ def CheckNoIOStreamInHeaders(input_api, output_api, source_file_filter):
files = []
pattern = input_api.re.compile(r'^#include\s*<iostream>',
input_api.re.MULTILINE)
file_filter = lambda x: (input_api.FilterSourceFile(x) and
source_file_filter(x))
file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter(
x))
for f in input_api.AffectedSourceFiles(file_filter):
if not f.LocalPath().endswith('.h'):
continue
@ -209,13 +217,12 @@ def CheckNoIOStreamInHeaders(input_api, output_api, source_file_filter):
if pattern.search(contents):
files.append(f)
if len(files):
if len(files) > 0:
return [
output_api.PresubmitError(
'Do not #include <iostream> in header files, since it inserts '
'static initialization into every file including the header. '
'Instead, #include <ostream>. See http://crbug.com/94794',
files)
'Instead, #include <ostream>. See http://crbug.com/94794', files)
]
return []
@ -224,8 +231,8 @@ def CheckNoPragmaOnce(input_api, output_api, source_file_filter):
"""Make sure that banned functions are not used."""
files = []
pattern = input_api.re.compile(r'^#pragma\s+once', input_api.re.MULTILINE)
file_filter = lambda x: (input_api.FilterSourceFile(x) and
source_file_filter(x))
file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter(
x))
for f in input_api.AffectedSourceFiles(file_filter):
if not f.LocalPath().endswith('.h'):
continue
@ -238,11 +245,11 @@ def CheckNoPragmaOnce(input_api, output_api, source_file_filter):
output_api.PresubmitError(
'Do not use #pragma once in header files.\n'
'See http://www.chromium.org/developers/coding-style'
'#TOC-File-headers',
files)
'#TOC-File-headers', files)
]
return []
def CheckNoFRIEND_TEST(# pylint: disable=invalid-name
input_api,
output_api,
@ -308,8 +315,7 @@ def CheckApprovedFilesLintClean(input_api, output_api,
files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
# Note that moved/renamed files also count as added.
if f.Action() == 'A' or not IsLintDisabled(disabled_paths,
f.LocalPath()):
if f.Action() == 'A' or not IsLintDisabled(disabled_paths, f.LocalPath()):
files.append(f.AbsoluteLocalPath())
for file_name in files:
@ -337,8 +343,7 @@ def CheckNoSourcesAbove(input_api, gn_files, output_api):
for source_block_match in source_pattern.finditer(contents):
# Find all source list entries starting with ../ in the source block
# (exclude overrides entries).
for file_list_match in file_pattern.finditer(
source_block_match.group(1)):
for file_list_match in file_pattern.finditer(source_block_match.group(1)):
source_file = file_list_match.group(1)
if 'overrides/' not in source_file:
violating_source_entries.append(source_file)
@ -370,6 +375,7 @@ def CheckAbseilDependencies(input_api, gn_files, output_api):
'should be moved to the "absl_deps" parameter.')
errors = []
# pylint: disable=too-many-nested-blocks
for gn_file in gn_files:
gn_file_content = input_api.ReadFile(gn_file)
for target_match in TARGET_RE.finditer(gn_file_content):
@ -382,8 +388,7 @@ def CheckAbseilDependencies(input_api, gn_files, output_api):
for dep in deps:
if re.search(absl_re, dep):
errors.append(
output_api.PresubmitError(
error_msg %
output_api.PresubmitError(error_msg %
(target_name, gn_file.LocalPath())))
break # no need to warn more than once per target
return errors
@ -398,7 +403,7 @@ def CheckNoMixingSources(input_api, gn_files, output_api):
def _MoreThanOneSourceUsed(*sources_lists):
sources_used = 0
for source_list in sources_lists:
if len(source_list):
if len(source_list) > 0:
sources_used += 1
return sources_used > 1
@ -432,8 +437,7 @@ def CheckNoMixingSources(input_api, gn_files, output_api):
c_files = []
cc_files = []
objc_files = []
for file_match in FILE_PATH_RE.finditer(
sources_match.group(1)):
for file_match in FILE_PATH_RE.finditer(sources_match.group(1)):
file_path = file_match.group('file_path')
extension = file_match.group('extension')
if extension == '.c':
@ -444,12 +448,9 @@ def CheckNoMixingSources(input_api, gn_files, output_api):
objc_files.append(file_path + extension)
list_of_sources.append((c_files, cc_files, objc_files))
for c_files_list, cc_files_list, objc_files_list in list_of_sources:
if _MoreThanOneSourceUsed(c_files_list, cc_files_list,
objc_files_list):
all_sources = sorted(c_files_list + cc_files_list +
objc_files_list)
errors[gn_file.LocalPath()].append(
(target_name, all_sources))
if _MoreThanOneSourceUsed(c_files_list, cc_files_list, objc_files_list):
all_sources = sorted(c_files_list + cc_files_list + objc_files_list)
errors[gn_file.LocalPath()].append((target_name, all_sources))
if errors:
return [
output_api.PresubmitError(
@ -459,7 +460,7 @@ def CheckNoMixingSources(input_api, gn_files, output_api):
'Mixed sources: \n'
'%s\n'
'Violating GN files:\n%s\n' %
(json.dumps(errors, indent=2), '\n'.join(errors.keys())))
(json.dumps(errors, indent=2), '\n'.join(list(errors.keys()))))
]
return []
@ -467,18 +468,16 @@ def CheckNoMixingSources(input_api, gn_files, output_api):
def CheckNoPackageBoundaryViolations(input_api, gn_files, output_api):
cwd = input_api.PresubmitLocalPath()
with _AddToPath(
input_api.os_path.join(cwd, 'tools_webrtc',
'presubmit_checks_lib')):
input_api.os_path.join(cwd, 'tools_webrtc', 'presubmit_checks_lib')):
from check_package_boundaries import CheckPackageBoundaries
build_files = [
os.path.join(cwd, gn_file.LocalPath()) for gn_file in gn_files
]
build_files = [os.path.join(cwd, gn_file.LocalPath()) for gn_file in gn_files]
errors = CheckPackageBoundaries(cwd, build_files)[:5]
if errors:
return [
output_api.PresubmitError(
'There are package boundary violations in the following GN '
'files:', long_text='\n\n'.join(str(err) for err in errors))
'files:',
long_text='\n\n'.join(str(err) for err in errors))
]
return []
@ -492,7 +491,7 @@ def CheckNoWarningSuppressionFlagsAreAdded(gn_files,
input_api,
output_api,
error_formatter=_ReportFileAndLine):
"""Ensure warning suppression flags are not added wihtout a reason."""
"""Ensure warning suppression flags are not added without a reason."""
msg = ('Usage of //build/config/clang:extra_warnings is discouraged '
'in WebRTC.\n'
'If you are not adding this code (e.g. you are just moving '
@ -502,8 +501,8 @@ def CheckNoWarningSuppressionFlagsAreAdded(gn_files,
'\n'
'Affected files:\n')
errors = [] # 2-element tuples with (file, line number)
clang_warn_re = input_api.re.compile(
r'//build/config/clang:extra_warnings')
clang_warn_re = input_api.re.compile(r'//build/config/clang:extra_warnings')
# pylint: disable-next=fixme
no_presubmit_re = input_api.re.compile(
r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)')
for f in gn_files:
@ -541,8 +540,7 @@ def CheckNoStreamUsageIsAdded(input_api,
source_file_filter,
error_formatter=_ReportFileAndLine):
"""Make sure that no more dependencies on stringstream are added."""
error_msg = (
'Usage of <sstream>, <istream> and <ostream> in WebRTC is '
error_msg = ('Usage of <sstream>, <istream> and <ostream> in WebRTC is '
'deprecated.\n'
'This includes the following types:\n'
'std::istringstream, std::ostringstream, std::wistringstream, '
@ -562,19 +560,18 @@ def CheckNoStreamUsageIsAdded(input_api,
'Affected files:\n')
errors = [] # 2-element tuples with (file, line number)
include_re = input_api.re.compile(r'#include <(i|o|s)stream>')
usage_re = input_api.re.compile(
r'std::(w|i|o|io|wi|wo|wio)(string)*stream')
usage_re = input_api.re.compile(r'std::(w|i|o|io|wi|wo|wio)(string)*stream')
no_presubmit_re = input_api.re.compile(
r'// no-presubmit-check TODO\(webrtc:8982\)')
file_filter = lambda x: (input_api.FilterSourceFile(x) and
source_file_filter(x))
file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter(
x))
def _IsException(file_path):
is_test = any(
file_path.endswith(x) for x in
['_test.cc', '_tests.cc', '_unittest.cc', '_unittests.cc'])
return (file_path.startswith('examples')
or file_path.startswith('test') or is_test)
file_path.endswith(x)
for x in ['_test.cc', '_tests.cc', '_unittest.cc', '_unittests.cc'])
return (file_path.startswith('examples') or file_path.startswith('test')
or is_test)
for f in input_api.AffectedSourceFiles(file_filter):
# Usage of stringstream is allowed under examples/ and in tests.
@ -608,21 +605,20 @@ def CheckPublicDepsIsNotUsed(gn_files, input_api, output_api):
if not surpressed:
result.append(
output_api.PresubmitError(
error_msg %
(affected_file.LocalPath(), line_number)))
error_msg % (affected_file.LocalPath(), line_number)))
return result
def CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api):
result = []
error_msg = (
'check_includes overrides are not allowed since it can cause '
error_msg = ('check_includes overrides are not allowed since it can cause '
'incorrect dependencies to form. It effectively means that your '
'module can include any .h file without depending on its '
'corresponding target. There are some exceptional cases when '
'this is allowed: if so, get approval from a .gn owner in the '
'root OWNERS file.\n'
'Used in: %s (line %d).')
# pylint: disable-next=fixme
no_presubmit_re = input_api.re.compile(
r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)')
for affected_file in gn_files:
@ -630,8 +626,8 @@ def CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api):
if ('check_includes' in affected_line
and not no_presubmit_re.search(affected_line)):
result.append(
output_api.PresubmitError(
error_msg % (affected_file.LocalPath(), line_number)))
output_api.PresubmitError(error_msg %
(affected_file.LocalPath(), line_number)))
return result
@ -652,13 +648,10 @@ def CheckGnChanges(input_api, output_api):
result.extend(CheckAbseilDependencies(input_api, gn_files, output_api))
result.extend(
CheckNoPackageBoundaryViolations(input_api, gn_files, output_api))
result.extend(CheckPublicDepsIsNotUsed(gn_files, input_api,
output_api))
result.extend(CheckPublicDepsIsNotUsed(gn_files, input_api, output_api))
result.extend(CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api))
result.extend(
CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api))
result.extend(
CheckNoWarningSuppressionFlagsAreAdded(gn_files, input_api,
output_api))
CheckNoWarningSuppressionFlagsAreAdded(gn_files, input_api, output_api))
return result
@ -668,8 +661,8 @@ def CheckGnGen(input_api, output_api):
errors.
"""
with _AddToPath(
input_api.os_path.join(input_api.PresubmitLocalPath(),
'tools_webrtc', 'presubmit_checks_lib')):
input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools_webrtc',
'presubmit_checks_lib')):
from build_helpers import RunGnCheck
errors = RunGnCheck(FindSrcDirPath(input_api.PresubmitLocalPath()))[:5]
if errors:
@ -694,8 +687,7 @@ def CheckUnwantedDependencies(input_api, output_api, source_file_filter):
# roundabout construct to import checkdeps because this file is
# eval-ed and thus doesn't have __file__.
src_path = FindSrcDirPath(input_api.PresubmitLocalPath())
checkdeps_path = input_api.os_path.join(src_path, 'buildtools',
'checkdeps')
checkdeps_path = input_api.os_path.join(src_path, 'buildtools', 'checkdeps')
if not os.path.exists(checkdeps_path):
return [
output_api.PresubmitError(
@ -752,8 +744,7 @@ def CheckCommitMessageBugEntry(input_api, output_api):
"""Check that bug entries are well-formed in commit message."""
bogus_bug_msg = (
'Bogus Bug entry: %s. Please specify the issue tracker prefix and the '
'issue number, separated by a colon, e.g. webrtc:123 or chromium:12345.'
)
'issue number, separated by a colon, e.g. webrtc:123 or chromium:12345.')
results = []
for bug in input_api.change.BugsFromDescription():
bug = bug.strip()
@ -766,8 +757,7 @@ def CheckCommitMessageBugEntry(input_api, output_api):
prefix_guess = 'chromium'
else:
prefix_guess = 'webrtc'
results.append(
'Bug entry requires issue tracker prefix, e.g. %s:%s' %
results.append('Bug entry requires issue tracker prefix, e.g. %s:%s' %
(prefix_guess, bug))
except ValueError:
results.append(bogus_bug_msg % bug)
@ -788,15 +778,13 @@ def CheckChangeHasBugField(input_api, output_api):
"""
if input_api.change.BugsFromDescription():
return []
else:
return [
output_api.PresubmitError(
'The "Bug: [bug number]" footer is mandatory. Please create a '
'bug and reference it using either of:\n'
' * https://bugs.webrtc.org - reference it using Bug: '
'webrtc:XXXX\n'
' * https://crbug.com - reference it using Bug: chromium:XXXXXX'
)
' * https://crbug.com - reference it using Bug: chromium:XXXXXX')
]
@ -804,8 +792,7 @@ def CheckJSONParseErrors(input_api, output_api, source_file_filter):
"""Check that JSON files do not contain syntax errors."""
def FilterFile(affected_file):
return (input_api.os_path.splitext(
affected_file.LocalPath())[1] == '.json'
return (input_api.os_path.splitext(affected_file.LocalPath())[1] == '.json'
and source_file_filter(affected_file))
def GetJSONParseError(input_api, filename):
@ -823,8 +810,7 @@ def CheckJSONParseErrors(input_api, output_api, source_file_filter):
affected_file.AbsoluteLocalPath())
if parse_error:
results.append(
output_api.PresubmitError(
'%s could not be parsed: %s' %
output_api.PresubmitError('%s could not be parsed: %s' %
(affected_file.LocalPath(), parse_error)))
return results
@ -849,7 +835,8 @@ def RunPythonTests(input_api, output_api):
input_api,
output_api,
directory,
files_to_check=[r'.+_test\.py$']))
files_to_check=[r'.+_test\.py$'],
run_on_python2=False))
return input_api.RunTests(tests, parallel=True)
@ -859,8 +846,8 @@ def CheckUsageOfGoogleProtobufNamespace(input_api, output_api,
files = []
pattern = input_api.re.compile(r'google::protobuf')
proto_utils_path = os.path.join('rtc_base', 'protobuf_utils.h')
file_filter = lambda x: (input_api.FilterSourceFile(x) and
source_file_filter(x))
file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter(
x))
for f in input_api.AffectedSourceFiles(file_filter):
if f.LocalPath() in [proto_utils_path, 'PRESUBMIT.py']:
continue
@ -872,8 +859,8 @@ def CheckUsageOfGoogleProtobufNamespace(input_api, output_api,
return [
output_api.PresubmitError(
'Please avoid to use namespace `google::protobuf` directly.\n'
'Add a using directive in `%s` and include that header instead.'
% proto_utils_path, files)
'Add a using directive in `%s` and include that header instead.' %
proto_utils_path, files)
]
return []
@ -882,7 +869,7 @@ def _LicenseHeader(input_api):
"""Returns the license header regexp."""
# Accept any year number from 2003 to the current year
current_year = int(input_api.time.strftime('%Y'))
allowed_years = (str(s) for s in reversed(xrange(2003, current_year + 1)))
allowed_years = (str(s) for s in reversed(range(2003, current_year + 1)))
years_re = '(' + '|'.join(allowed_years) + ')'
license_header = (
r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. '
@ -921,8 +908,11 @@ def CommonChecks(input_api, output_api):
# all python files. This is a temporary solution.
python_file_filter = lambda f: (f.LocalPath().endswith('.py') and
source_file_filter(f))
python_changed_files = [f.LocalPath() for f in input_api.AffectedFiles(
include_deletes=False, file_filter=python_file_filter)]
python_changed_files = [
f.LocalPath()
for f in input_api.AffectedFiles(include_deletes=False,
file_filter=python_file_filter)
]
results.extend(
input_api.canned_checks.RunPylint(
@ -939,13 +929,14 @@ def CommonChecks(input_api, output_api):
r'^testing[\\\/].*\.py$',
r'^third_party[\\\/].*\.py$',
r'^tools[\\\/].*\.py$',
# TODO(phoglund): should arguably be checked.
# TODO(bugs.webrtc.org/13605): should arguably be checked.
r'^tools_webrtc[\\\/]mb[\\\/].*\.py$',
r'^xcodebuild.*[\\\/].*\.py$',
),
pylintrc='pylintrc'))
pylintrc='pylintrc',
version='2.7'))
# TODO(nisse): talk/ is no more, so make below checks simpler?
# TODO(bugs.webrtc.org/13606): talk/ is no more, so make below checks simpler?
# WebRTC can't use the presubmit_canned_checks.PanProjectChecks function
# since we need to have different license checks
# in talk/ and webrtc/directories.
@ -1032,8 +1023,7 @@ def CommonChecks(input_api, output_api):
CheckNewlineAtTheEndOfProtoFiles(
input_api, output_api, source_file_filter=non_third_party_sources))
results.extend(
CheckNoStreamUsageIsAdded(input_api, output_api,
non_third_party_sources))
CheckNoStreamUsageIsAdded(input_api, output_api, non_third_party_sources))
results.extend(
CheckNoTestCaseUsageIsAdded(input_api, output_api,
non_third_party_sources))
@ -1044,8 +1034,7 @@ def CommonChecks(input_api, output_api):
results.extend(
CheckAssertUsage(input_api, output_api, non_third_party_sources))
results.extend(
CheckBannedAbslMakeUnique(input_api, output_api,
non_third_party_sources))
CheckBannedAbslMakeUnique(input_api, output_api, non_third_party_sources))
results.extend(
CheckObjcApiSymbols(input_api, output_api, non_third_party_sources))
return results
@ -1075,8 +1064,7 @@ def CheckApiDepsFileIsUpToDate(input_api, output_api):
path_tokens = [t for t in f.LocalPath().split(os.sep) if t]
if len(path_tokens) > 1:
if (path_tokens[0] not in dirs_to_skip and os.path.isdir(
os.path.join(input_api.PresubmitLocalPath(),
path_tokens[0]))):
os.path.join(input_api.PresubmitLocalPath(), path_tokens[0]))):
dirs_to_check.add(path_tokens[0])
missing_include_rules = set()
@ -1119,7 +1107,7 @@ def CheckBannedAbslMakeUnique(input_api, output_api, source_file_filter):
files.append(f)
break
if len(files):
if files:
return [
output_api.PresubmitError(
'Please use std::make_unique instead of absl::make_unique.\n'
@ -1135,8 +1123,8 @@ def CheckObjcApiSymbols(input_api, output_api, source_file_filter):
source_file_filter(f))
files = []
file_filter = lambda x: (input_api.FilterSourceFile(x) and
source_file_filter(x))
file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter(
x))
for f in input_api.AffectedSourceFiles(file_filter):
if not f.LocalPath().endswith('.h') or not 'sdk/objc' in f.LocalPath():
continue
@ -1148,11 +1136,11 @@ def CheckObjcApiSymbols(input_api, output_api, source_file_filter):
if 'RTC_OBJC_TYPE' not in export_block:
files.append(f.LocalPath())
if len(files):
if len(files) > 0:
return [
output_api.PresubmitError(
'RTC_OBJC_EXPORT types must be wrapped into an RTC_OBJC_TYPE() '
+ 'macro.\n\n' + 'For example:\n' +
'RTC_OBJC_EXPORT types must be wrapped into an RTC_OBJC_TYPE() ' +
'macro.\n\n' + 'For example:\n' +
'RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RtcFoo)\n\n' +
'RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE(RtcFoo)\n\n' +
'Please fix the following files:', files)
@ -1173,7 +1161,7 @@ def CheckAssertUsage(input_api, output_api, source_file_filter):
files.append(f.LocalPath())
break
if len(files):
if len(files) > 0:
return [
output_api.PresubmitError(
'Usage of assert() has been detected in the following files, '
@ -1199,7 +1187,7 @@ def CheckAbslMemoryInclude(input_api, output_api, source_file_filter):
files.append(f)
break
if len(files):
if len(files) > 0:
return [
output_api.PresubmitError(
'Please include "absl/memory/memory.h" header for '
@ -1213,8 +1201,8 @@ def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(CommonChecks(input_api, output_api))
results.extend(CheckGnGen(input_api, output_api))
results.extend(
input_api.canned_checks.CheckGNFormatted(input_api, output_api))
results.extend(input_api.canned_checks.CheckGNFormatted(
input_api, output_api))
return results
@ -1226,8 +1214,7 @@ def CheckChangeOnCommit(input_api, output_api):
results.extend(
input_api.canned_checks.CheckChangeWasUploaded(input_api, output_api))
results.extend(
input_api.canned_checks.CheckChangeHasDescription(
input_api, output_api))
input_api.canned_checks.CheckChangeHasDescription(input_api, output_api))
results.extend(CheckChangeHasBugField(input_api, output_api))
results.extend(CheckCommitMessageBugEntry(input_api, output_api))
results.extend(
@ -1248,8 +1235,8 @@ def CheckOrphanHeaders(input_api, output_api, source_file_filter):
os.path.join('tools_webrtc', 'ios', 'SDK'),
]
with _AddToPath(
input_api.os_path.join(input_api.PresubmitLocalPath(),
'tools_webrtc', 'presubmit_checks_lib')):
input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools_webrtc',
'presubmit_checks_lib')):
from check_orphan_headers import GetBuildGnPathFromFilePath
from check_orphan_headers import IsHeaderInBuildGn
@ -1259,14 +1246,13 @@ def CheckOrphanHeaders(input_api, output_api, source_file_filter):
if f.LocalPath().endswith('.h'):
file_path = os.path.abspath(f.LocalPath())
root_dir = os.getcwd()
gn_file_path = GetBuildGnPathFromFilePath(file_path,
os.path.exists, root_dir)
gn_file_path = GetBuildGnPathFromFilePath(file_path, os.path.exists,
root_dir)
in_build_gn = IsHeaderInBuildGn(file_path, gn_file_path)
if not in_build_gn:
results.append(
output_api.PresubmitError(
error_msg.format(f.LocalPath(),
os.path.relpath(gn_file_path))))
error_msg.format(f.LocalPath(), os.path.relpath(gn_file_path))))
return results
@ -1282,8 +1268,7 @@ def CheckNewlineAtTheEndOfProtoFiles(input_api, output_api,
with open(file_path) as f:
lines = f.readlines()
if len(lines) > 0 and not lines[-1].endswith('\n'):
results.append(
output_api.PresubmitError(error_msg.format(file_path)))
results.append(output_api.PresubmitError(error_msg.format(file_path)))
return results
@ -1297,7 +1282,7 @@ def _ExtractAddRulesFromParsedDeps(parsed_deps):
rule[1:] for rule in parsed_deps.get('include_rules', [])
if rule.startswith('+') or rule.startswith('!')
])
for _, rules in parsed_deps.get('specific_include_rules', {}).iteritems():
for _, rules in parsed_deps.get('specific_include_rules', {}).items():
add_rules.update([
rule[1:] for rule in rules
if rule.startswith('+') or rule.startswith('!')
@ -1309,7 +1294,7 @@ def _ParseDeps(contents):
"""Simple helper for parsing DEPS files."""
# Stubs for handling special syntax in the root DEPS file.
class VarImpl(object):
class VarImpl:
def __init__(self, local_scope):
self._local_scope = local_scope
@ -1317,14 +1302,15 @@ def _ParseDeps(contents):
"""Implements the Var syntax."""
try:
return self._local_scope['vars'][var_name]
except KeyError:
raise Exception('Var is not defined: %s' % var_name)
except KeyError as var_not_defined:
raise Exception('Var is not defined: %s' %
var_name) from var_not_defined
local_scope = {}
global_scope = {
'Var': VarImpl(local_scope).Lookup,
}
exec contents in global_scope, local_scope
exec(contents, global_scope, local_scope)
return local_scope
@ -1371,8 +1357,7 @@ def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
filename = input_api.os_path.basename(f.LocalPath())
if filename == 'DEPS':
virtual_depended_on_files.update(
_CalculateAddedDeps(input_api.os_path,
'\n'.join(f.OldContents()),
_CalculateAddedDeps(input_api.os_path, '\n'.join(f.OldContents()),
'\n'.join(f.NewContents())))
if not virtual_depended_on_files:
@ -1383,15 +1368,13 @@ def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
return [
output_api.PresubmitNotifyResult(
'--tbr was specified, skipping OWNERS check for DEPS '
'additions'
)
'additions')
]
if input_api.dry_run:
return [
output_api.PresubmitNotifyResult(
'This is a dry run, skipping OWNERS check for DEPS '
'additions'
)
'additions')
]
if not input_api.change.issue:
return [
@ -1405,9 +1388,7 @@ def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
owner_email, reviewers = (
input_api.canned_checks.GetCodereviewOwnerAndReviewers(
input_api,
None,
approval_needed=input_api.is_committing))
input_api, None, approval_needed=input_api.is_committing))
owner_email = owner_email or input_api.change.author_email
@ -1415,7 +1396,8 @@ def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
virtual_depended_on_files, reviewers.union([owner_email]), [])
missing_files = [
f for f in virtual_depended_on_files
if approval_status[f] != input_api.owners_client.APPROVED]
if approval_status[f] != input_api.owners_client.APPROVED
]
# We strip the /DEPS part that was added by
# _FilesToCheckForIncomingDeps to fake a path to a file in a
@ -1424,7 +1406,6 @@ def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
start_deps = path.rfind('/DEPS')
if start_deps != -1:
return path[:start_deps]
else:
return path
unapproved_dependencies = [
@ -1433,8 +1414,7 @@ def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
if unapproved_dependencies:
output_list = [
output(
'You need LGTM from owners of depends-on paths in DEPS that '
output('You need LGTM from owners of depends-on paths in DEPS that '
' were modified in this CL:\n %s' %
'\n '.join(sorted(unapproved_dependencies)))
]

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright 2017 The WebRTC project authors. All Rights Reserved.
#
@ -8,6 +8,7 @@
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
from __future__ import absolute_import
import os
import shutil
import tempfile
@ -88,8 +89,8 @@ class CheckNewlineAtTheEndOfProtoFilesTest(unittest.TestCase):
self.input_api, self.output_api, lambda x: True)
self.assertEqual(1, len(errors))
self.assertEqual(
'File %s must end with exactly one newline.' %
self.proto_file_path, str(errors[0]))
'File %s must end with exactly one newline.' % self.proto_file_path,
str(errors[0]))
def testNoErrorIfProtoFileEndsWithNewline(self):
self._GenerateProtoWithNewlineAtTheEnd()
@ -237,8 +238,7 @@ class CheckNoMixingSourcesTest(unittest.TestCase):
self.assertTrue('bar.c' in str(errors[0]))
def _AssertNumberOfErrorsWithSources(self, number_of_errors, sources):
assert len(
sources) == 3, 'This function accepts a list of 3 source files'
assert len(sources) == 3, 'This function accepts a list of 3 source files'
self._GenerateBuildFile(
textwrap.dedent("""
rtc_static_library("bar_foo") {
@ -275,24 +275,16 @@ class CheckAssertUsageTest(unittest.TestCase):
def setUp(self):
self.input_api = MockInputApi()
self.output_api = MockOutputApi()
self._content_with_assert = [
'void Foo() {',
' assert(true);',
'}'
]
self._content_without_assert = [
'void Foo() {',
' RTC_CHECK(true);',
'}'
]
self._content_with_assert = ['void Foo() {', ' assert(true);', '}']
self._content_without_assert = ['void Foo() {', ' RTC_CHECK(true);', '}']
def testDetectsAssertInCcFile(self):
self.input_api.files = [
MockFile('with_assert.cc', self._content_with_assert),
MockFile('without_assert.cc', self._content_without_assert),
]
errors = PRESUBMIT.CheckAssertUsage(
self.input_api, self.output_api, lambda x: True)
errors = PRESUBMIT.CheckAssertUsage(self.input_api,
self.output_api, lambda x: True)
self.assertEqual(1, len(errors))
self.assertEqual('with_assert.cc', errors[0].items[0])
@ -301,8 +293,8 @@ class CheckAssertUsageTest(unittest.TestCase):
MockFile('with_assert.h', self._content_with_assert),
MockFile('without_assert.h', self._content_without_assert),
]
errors = PRESUBMIT.CheckAssertUsage(
self.input_api, self.output_api, lambda x: True)
errors = PRESUBMIT.CheckAssertUsage(self.input_api,
self.output_api, lambda x: True)
self.assertEqual(1, len(errors))
self.assertEqual('with_assert.h', errors[0].items[0])
@ -311,8 +303,8 @@ class CheckAssertUsageTest(unittest.TestCase):
MockFile('with_assert.m', self._content_with_assert),
MockFile('without_assert.m', self._content_without_assert),
]
errors = PRESUBMIT.CheckAssertUsage(
self.input_api, self.output_api, lambda x: True)
errors = PRESUBMIT.CheckAssertUsage(self.input_api,
self.output_api, lambda x: True)
self.assertEqual(1, len(errors))
self.assertEqual('with_assert.m', errors[0].items[0])
@ -321,8 +313,8 @@ class CheckAssertUsageTest(unittest.TestCase):
MockFile('with_assert.mm', self._content_with_assert),
MockFile('without_assert.mm', self._content_without_assert),
]
errors = PRESUBMIT.CheckAssertUsage(
self.input_api, self.output_api, lambda x: True)
errors = PRESUBMIT.CheckAssertUsage(self.input_api,
self.output_api, lambda x: True)
self.assertEqual(1, len(errors))
self.assertEqual('with_assert.mm', errors[0].items[0])
@ -330,8 +322,8 @@ class CheckAssertUsageTest(unittest.TestCase):
self.input_api.files = [
MockFile('with_assert.cpp', self._content_with_assert),
]
errors = PRESUBMIT.CheckAssertUsage(
self.input_api, self.output_api, lambda x: True)
errors = PRESUBMIT.CheckAssertUsage(self.input_api,
self.output_api, lambda x: True)
self.assertEqual(0, len(errors))

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -9,11 +11,12 @@
# This file is inspired to [1].
# [1] - https://cs.chromium.org/chromium/src/PRESUBMIT_test_mocks.py
from __future__ import absolute_import
import os.path
import re
class MockInputApi(object):
class MockInputApi:
"""Mock class for the InputApi class.
This class can be used for unittests for presubmit by initializing the files
@ -38,34 +41,31 @@ class MockInputApi(object):
yield f
@classmethod
def FilterSourceFile(cls,
affected_file,
files_to_check=(),
files_to_skip=()):
def FilterSourceFile(cls, affected_file, files_to_check=(), files_to_skip=()):
# pylint: disable=unused-argument
return True
def PresubmitLocalPath(self):
return self.presubmit_local_path
def ReadFile(self, affected_file, mode='rU'):
def ReadFile(self, affected_file, mode='r'):
filename = affected_file.AbsoluteLocalPath()
for f in self.files:
if f.LocalPath() == filename:
with open(filename, mode) as f:
return f.read()
# Otherwise, file is not in our mock API.
raise IOError, "No such file or directory: '%s'" % filename
raise IOError("No such file or directory: '%s'" % filename)
class MockOutputApi(object):
class MockOutputApi:
"""Mock class for the OutputApi class.
An instance of this class can be passed to presubmit unittests for outputing
various types of results.
"""
class PresubmitResult(object):
class PresubmitResult:
def __init__(self, message, items=None, long_text=''):
self.message = message
self.items = items
@ -76,12 +76,11 @@ class MockOutputApi(object):
class PresubmitError(PresubmitResult):
def __init__(self, message, items=None, long_text=''):
MockOutputApi.PresubmitResult.__init__(self, message, items,
long_text)
MockOutputApi.PresubmitResult.__init__(self, message, items, long_text)
self.type = 'error'
class MockChange(object):
class MockChange:
"""Mock class for Change class.
This class can be used in presubmit unittests to mock the query of the
@ -103,7 +102,7 @@ class MockChange(object):
return self.tags.get(attr)
class MockFile(object):
class MockFile:
"""Mock class for the File class.
This class can be used to form the mock list of changed files in
@ -119,8 +118,7 @@ class MockFile(object):
new_contents = ["Data"]
self._local_path = local_path
self._new_contents = new_contents
self._changed_contents = [(i + 1, l)
for i, l in enumerate(new_contents)]
self._changed_contents = [(i + 1, l) for i, l in enumerate(new_contents)]
self._action = action
self._old_contents = old_contents

View file

@ -28,6 +28,7 @@ disable=
exec-used,
fixme,
import-error,
import-outside-toplevel,
missing-docstring,
no-init,
no-member,

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -6,12 +8,15 @@
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
# Runs PRESUBMIT.py in py3 mode by git cl presubmit.
USE_PYTHON3 = True
def _LicenseHeader(input_api):
"""Returns the license header regexp."""
# Accept any year number from 2003 to the current year
current_year = int(input_api.time.strftime('%Y'))
allowed_years = (str(s) for s in reversed(xrange(2003, current_year + 1)))
allowed_years = (str(s) for s in reversed(range(2003, current_year + 1)))
years_re = '(' + '|'.join(allowed_years) + ')'
license_header = (
r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. '

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
@ -60,8 +60,7 @@ def _ParseArgs():
default='libwebrtc.aar',
type=os.path.abspath,
help='Output file of the script.')
parser.add_argument(
'--arch',
parser.add_argument('--arch',
default=DEFAULT_ARCHS,
nargs='*',
help='Architectures to build. Defaults to %(default)s.')
@ -126,9 +125,8 @@ def _EncodeForGN(value):
"""Encodes value as a GN literal."""
if isinstance(value, str):
return '"' + value + '"'
elif isinstance(value, bool):
if isinstance(value, bool):
return repr(value).lower()
else:
return repr(value)
@ -141,13 +139,12 @@ def _GetTargetCpu(arch):
"""Returns target_cpu for the GN build with the given architecture."""
if arch in ['armeabi', 'armeabi-v7a']:
return 'arm'
elif arch == 'arm64-v8a':
if arch == 'arm64-v8a':
return 'arm64'
elif arch == 'x86':
if arch == 'x86':
return 'x86'
elif arch == 'x86_64':
if arch == 'x86_64':
return 'x64'
else:
raise Exception('Unknown arch: ' + arch)
@ -155,11 +152,10 @@ def _GetArmVersion(arch):
"""Returns arm_version for the GN build with the given architecture."""
if arch == 'armeabi':
return 6
elif arch == 'armeabi-v7a':
if arch == 'armeabi-v7a':
return 7
elif arch in ['arm64-v8a', 'x86', 'x86_64']:
if arch in ['arm64-v8a', 'x86', 'x86_64']:
return None
else:
raise Exception('Unknown arch: ' + arch)
@ -180,8 +176,7 @@ def Build(build_dir, arch, use_goma, extra_gn_args, extra_gn_switches,
if arm_version:
gn_args['arm_version'] = arm_version
gn_args_str = '--args=' + ' '.join(
[k + '=' + _EncodeForGN(v)
for k, v in gn_args.items()] + extra_gn_args)
[k + '=' + _EncodeForGN(v) for k, v in gn_args.items()] + extra_gn_args)
gn_args_list = ['gen', output_directory, gn_args_str]
gn_args_list.extend(extra_gn_switches)

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python3
#!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
@ -7,8 +7,7 @@
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Script for building and testing WebRTC AAR.
"""
"""Script for building and testing WebRTC AAR."""
import argparse
import logging
@ -80,8 +79,7 @@ def _TestAAR(build_dir):
logging.info('Testing library.')
# Uninstall any existing version of AppRTCMobile.
logging.info(
'Uninstalling previous AppRTCMobile versions. It is okay for '
logging.info('Uninstalling previous AppRTCMobile versions. It is okay for '
'these commands to fail if AppRTCMobile is not installed.')
subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc'])
subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc.test'])
@ -92,9 +90,9 @@ def _TestAAR(build_dir):
subprocess.check_call([GRADLEW_BIN, 'clean'], cwd=AAR_PROJECT_DIR)
# Then run the tests.
subprocess.check_call([
GRADLEW_BIN,
'connectedDebugAndroidTest',
'-PaarDir=' + os.path.abspath(build_dir)],
GRADLEW_BIN, 'connectedDebugAndroidTest',
'-PaarDir=' + os.path.abspath(build_dir)
],
cwd=AAR_PROJECT_DIR)
except subprocess.CalledProcessError:
logging.exception('Test failure.')
@ -106,8 +104,8 @@ def _TestAAR(build_dir):
def BuildAndTestAar(use_goma, skip_tests, build_dir):
version = '1.0.' + _GetCommitPos()
commit = _GetCommitHash()
logging.info(
'Building and Testing AAR version %s with hash %s', version, commit)
logging.info('Building and Testing AAR version %s with hash %s', version,
commit)
# If build directory is not specified, create a temporary directory.
use_tmp_dir = not build_dir

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -12,9 +13,9 @@ import re
import sys
def replace_double_quote(line):
re_rtc_import = re.compile(
r'(\s*)#import\s+"(\S+/|)(\w+\+|)RTC(\w+)\.h"(.*)', re.DOTALL)
def _ReplaceDoubleQuote(line):
re_rtc_import = re.compile(r'(\s*)#import\s+"(\S+/|)(\w+\+|)RTC(\w+)\.h"(.*)',
re.DOTALL)
match = re_rtc_import.match(line)
if not match:
return line
@ -23,10 +24,10 @@ def replace_double_quote(line):
match.group(4), match.group(5))
def process(input_file, output_file):
def Process(input_file, output_file):
with open(input_file, 'rb') as fb, open(output_file, 'wb') as fw:
for line in fb.read().decode('UTF-8').splitlines():
fw.write(replace_double_quote(line).encode('UTF-8'))
fw.write(_ReplaceDoubleQuote(line).encode('UTF-8'))
fw.write(b"\n")
@ -35,12 +36,10 @@ def main():
description=
"Copy headers of framework and replace double-quoted includes to" +
" angle-bracketed respectively.")
parser.add_argument('--input',
help='Input header files to copy.',
type=str)
parser.add_argument('--input', help='Input header files to copy.', type=str)
parser.add_argument('--output', help='Output file.', type=str)
parsed_args = parser.parse_args()
return process(parsed_args.input, parsed_args.output)
return Process(parsed_args.input, parsed_args.output)
if __name__ == '__main__':

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -8,26 +9,25 @@
# be found in the AUTHORS file in the root of the source tree.
import unittest
from copy_framework_header import replace_double_quote
from copy_framework_header import _ReplaceDoubleQuote
class TestCopyFramework(unittest.TestCase):
def testReplaceDoubleQuote(self):
self.assertEqual(replace_double_quote("""#import "RTCMacros.h\""""),
self.assertEqual(_ReplaceDoubleQuote("""#import "RTCMacros.h\""""),
"""#import <WebRTC/RTCMacros.h>""")
self.assertEqual(replace_double_quote("""#import "RTCMacros.h\"\n"""),
self.assertEqual(_ReplaceDoubleQuote("""#import "RTCMacros.h\"\n"""),
"""#import <WebRTC/RTCMacros.h>\n""")
self.assertEqual(
replace_double_quote("""#import "UIDevice+RTCDevice.h\"\n"""),
_ReplaceDoubleQuote("""#import "UIDevice+RTCDevice.h\"\n"""),
"""#import <WebRTC/UIDevice+RTCDevice.h>\n""")
self.assertEqual(
replace_double_quote("#import \"components/video_codec/" +
_ReplaceDoubleQuote("#import \"components/video_codec/" +
"RTCVideoDecoderFactoryH264.h\"\n"),
"""#import <WebRTC/RTCVideoDecoderFactoryH264.h>\n""")
self.assertEqual(
replace_double_quote(
"""@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *\n"""
),
_ReplaceDoubleQuote(
"""@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *\n"""),
"""@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *\n""")

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -8,7 +9,6 @@
# be found in the AUTHORS file in the root of the source tree.
"""Script to automatically roll dependencies in the WebRTC DEPS file."""
from __future__ import absolute_import
import argparse
import base64
@ -18,10 +18,7 @@ import os
import re
import subprocess
import sys
import six.moves.urllib.request
import six.moves.urllib.error
import six.moves.urllib.parse
import urllib
def FindSrcDirPath():
@ -153,7 +150,7 @@ def _RunCommand(command,
logging.debug('CMD: %s CWD: %s', ' '.join(command), working_dir)
env = os.environ.copy()
if extra_env:
assert all(isinstance(value, str) for value in extra_env.values())
assert all(isinstance(value, str) for value in list(extra_env.values()))
logging.debug('extra env: %s', extra_env)
env.update(extra_env)
p = subprocess.Popen(command,
@ -169,8 +166,7 @@ def _RunCommand(command,
if not ignore_exit_code and p.returncode != 0:
logging.error('Command failed: %s\n'
'stdout:\n%s\n'
'stderr:\n%s\n', ' '.join(command), std_output,
err_output)
'stderr:\n%s\n', ' '.join(command), std_output, err_output)
sys.exit(p.returncode)
return std_output, err_output
@ -229,7 +225,7 @@ def ReadUrlContent(url):
Returns:
A list of lines.
"""
conn = six.moves.urllib.request.urlopen(url)
conn = urllib.request.urlopen(url)
try:
return conn.readlines()
except IOError as e:
@ -253,13 +249,12 @@ def GetMatchingDepsEntries(depsentry_dict, dir_path):
A list of DepsEntry objects.
"""
result = []
for path, depsentry in depsentry_dict.items():
for path, depsentry in list(depsentry_dict.items()):
if path == dir_path:
result.append(depsentry)
else:
parts = path.split('/')
if all(part == parts[i]
for i, part in enumerate(dir_path.split('/'))):
if all(part == parts[i] for i, part in enumerate(dir_path.split('/'))):
result.append(depsentry)
return result
@ -269,7 +264,7 @@ def BuildDepsentryDict(deps_dict):
result = {}
def AddDepsEntries(deps_subdict):
for path, dep in deps_subdict.items():
for path, dep in list(deps_subdict.items()):
if path in result:
continue
if not isinstance(dep, dict):
@ -305,8 +300,8 @@ def _FindChangedCipdPackages(path, old_pkgs, new_pkgs):
for new_pkg in new_pkgs:
old_version = old_pkg['version']
new_version = new_pkg['version']
if (old_pkg['package'] == new_pkg['package'] and
old_version != new_version):
if (old_pkg['package'] == new_pkg['package']
and old_version != new_version):
logging.debug('Roll dependency %s to %s', path, new_version)
yield ChangedCipdPackage(path, old_pkg['package'], old_version,
new_version)
@ -379,9 +374,8 @@ def FindRemovedDeps(webrtc_deps, new_cr_deps):
A list of paths of unexpected disappearing dependencies.
"""
all_removed_deps = _FindNewDeps(new_cr_deps, webrtc_deps)
generated_android_deps = sorted([
path for path in all_removed_deps if path.startswith(ANDROID_DEPS_PATH)
])
generated_android_deps = sorted(
[path for path in all_removed_deps if path.startswith(ANDROID_DEPS_PATH)])
# Webrtc-only dependencies are handled in CalculateChangedDeps.
other_deps = sorted([
path for path in all_removed_deps
@ -408,7 +402,7 @@ def CalculateChangedDeps(webrtc_deps, new_cr_deps):
result = []
webrtc_entries = BuildDepsentryDict(webrtc_deps)
new_cr_entries = BuildDepsentryDict(new_cr_deps)
for path, webrtc_deps_entry in webrtc_entries.items():
for path, webrtc_deps_entry in list(webrtc_entries.items()):
if path in DONT_AUTOROLL_THESE:
continue
cr_deps_entry = new_cr_entries.get(path)
@ -424,8 +418,8 @@ def CalculateChangedDeps(webrtc_deps, new_cr_deps):
# Use the revision from Chromium's DEPS file.
new_rev = cr_deps_entry.revision
assert webrtc_deps_entry.url == cr_deps_entry.url, (
'WebRTC DEPS entry %s has a different URL %s than Chromium %s.'
% (path, webrtc_deps_entry.url, cr_deps_entry.url))
'WebRTC DEPS entry %s has a different URL %s than Chromium %s.' %
(path, webrtc_deps_entry.url, cr_deps_entry.url))
else:
if isinstance(webrtc_deps_entry, DepsEntry):
# Use the HEAD of the deps repo.
@ -441,8 +435,8 @@ def CalculateChangedDeps(webrtc_deps, new_cr_deps):
if webrtc_deps_entry.revision != new_rev:
logging.debug('Roll dependency %s to %s', path, new_rev)
result.append(
ChangedDep(path, webrtc_deps_entry.url,
webrtc_deps_entry.revision, new_rev))
ChangedDep(path, webrtc_deps_entry.url, webrtc_deps_entry.revision,
new_rev))
return sorted(result)
@ -462,8 +456,7 @@ def CalculateChangedClang(new_cr_rev):
new_clang_update_py = ReadRemoteCrFile(CLANG_UPDATE_SCRIPT_URL_PATH,
new_cr_rev).splitlines()
new_rev = GetClangRev(new_clang_update_py)
return ChangedDep(CLANG_UPDATE_SCRIPT_LOCAL_PATH, None, current_rev,
new_rev)
return ChangedDep(CLANG_UPDATE_SCRIPT_LOCAL_PATH, None, current_rev, new_rev)
def GenerateCommitMessage(
@ -481,8 +474,7 @@ def GenerateCommitMessage(
git_number_interval = '%s:%s' % (current_commit_pos, new_commit_pos)
commit_msg = [
'Roll chromium_revision %s (%s)\n' %
(rev_interval, git_number_interval),
'Roll chromium_revision %s (%s)\n' % (rev_interval, git_number_interval),
'Change log: %s' % (CHROMIUM_LOG_TEMPLATE % rev_interval),
'Full diff: %s\n' % (CHROMIUM_COMMIT_TEMPLATE % rev_interval)
]
@ -499,8 +491,7 @@ def GenerateCommitMessage(
commit_msg.append('* %s: %s..%s' %
(c.path, c.current_version, c.new_version))
else:
commit_msg.append(
'* %s: %s/+log/%s..%s' %
commit_msg.append('* %s: %s/+log/%s..%s' %
(c.path, c.url, c.current_rev[0:10], c.new_rev[0:10]))
if added_deps_paths:
@ -543,8 +534,7 @@ def UpdateDepsFile(deps_filename, rev_update, changed_deps, new_cr_content):
# Add and remove dependencies. For now: only generated android deps.
# Since gclient cannot add or remove deps, we on the fact that
# these android deps are located in one place we can copy/paste.
deps_re = re.compile(ANDROID_DEPS_START + '.*' + ANDROID_DEPS_END,
re.DOTALL)
deps_re = re.compile(ANDROID_DEPS_START + '.*' + ANDROID_DEPS_END, re.DOTALL)
new_deps = deps_re.search(new_cr_content)
old_deps = deps_re.search(deps_content)
if not new_deps or not old_deps:
@ -707,8 +697,7 @@ def main():
help=('Ignore if the current branch is not main or if there '
'are uncommitted changes (default: %(default)s).'))
grp = p.add_mutually_exclusive_group()
grp.add_argument(
'--skip-cq',
grp.add_argument('--skip-cq',
action='store_true',
default=False,
help='Skip sending the CL to the CQ (default: %(default)s)')

View file

@ -1,4 +1,5 @@
#!/usr/bin/env vpython
#!/usr/bin/env vpython3
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -7,7 +8,6 @@
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
from __future__ import absolute_import
import glob
import os
@ -15,16 +15,11 @@ import shutil
import sys
import tempfile
import unittest
import mock
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PARENT_DIR = os.path.join(SCRIPT_DIR, os.pardir)
sys.path.append(PARENT_DIR)
# Workaround for the presubmit, plan only to run in py3 now.
# TODO(webrtc:13418) Remove when py2 presubmit is gone.
if sys.version_info >= (3, 3):
from unittest import mock
else:
import mock
import roll_deps
from roll_deps import CalculateChangedDeps, FindAddedDeps, \
@ -57,7 +52,7 @@ class TestError(Exception):
pass
class FakeCmd(object):
class FakeCmd:
def __init__(self):
self.expectations = []
@ -73,14 +68,13 @@ class FakeCmd(object):
for item in ignores:
kwargs.pop(item, None)
if args != exp_args or kwargs != exp_kwargs:
message = 'Expected:\n args: %s\n kwargs: %s\n' % (exp_args,
exp_kwargs)
message = 'Expected:\n args: %s\n kwargs: %s\n' % (exp_args, exp_kwargs)
message += 'Got:\n args: %s\n kwargs: %s\n' % (args, kwargs)
raise TestError(message)
return exp_returns
class NullCmd(object):
class NullCmd:
"""No-op mock when calls mustn't be checked. """
def __call__(self, *args, **kwargs):
@ -122,8 +116,7 @@ class TestRollChromiumRevision(unittest.TestCase):
new_cr_contents)
with open(self._webrtc_depsfile) as deps_file:
deps_contents = deps_file.read()
self.assertTrue(
new_rev in deps_contents,
self.assertTrue(new_rev in deps_contents,
'Failed to find %s in\n%s' % (new_rev, deps_contents))
def _UpdateDepsSetup(self):
@ -136,9 +129,8 @@ class TestRollChromiumRevision(unittest.TestCase):
changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
with mock.patch('roll_deps._RunCommand', NullCmd()):
UpdateDepsFile(self._webrtc_depsfile_android,
NO_CHROMIUM_REVISION_UPDATE, changed_deps,
new_cr_contents)
UpdateDepsFile(self._webrtc_depsfile_android, NO_CHROMIUM_REVISION_UPDATE,
changed_deps, new_cr_contents)
with open(self._webrtc_depsfile_android) as deps_file:
updated_contents = deps_file.read()
@ -174,8 +166,7 @@ class TestRollChromiumRevision(unittest.TestCase):
vars_dict = local_scope['vars']
def AssertVar(variable_name):
self.assertEqual(vars_dict[variable_name],
TEST_DATA_VARS[variable_name])
self.assertEqual(vars_dict[variable_name], TEST_DATA_VARS[variable_name])
AssertVar('chromium_git')
AssertVar('chromium_revision')
@ -200,8 +191,7 @@ class TestRollChromiumRevision(unittest.TestCase):
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile)
with mock.patch('roll_deps._RunCommand', self.fake):
_SetupGitLsRemoteCall(
self.fake,
'https://chromium.googlesource.com/chromium/src/build',
self.fake, 'https://chromium.googlesource.com/chromium/src/build',
BUILD_NEW_REV)
changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
@ -214,8 +204,7 @@ class TestRollChromiumRevision(unittest.TestCase):
self.assertEqual(changed_deps[1].package, 'gn/gn/linux-amd64')
self.assertEqual(changed_deps[1].current_version,
'git_revision:69ec4fca1fa69ddadae13f9e6b7507efa0675263')
self.assertEqual(changed_deps[1].new_version,
'git_revision:new-revision')
self.assertEqual(changed_deps[1].new_version, 'git_revision:new-revision')
self.assertEqual(changed_deps[2].path, 'src/third_party/depot_tools')
self.assertEqual(changed_deps[2].current_rev, DEPOTTOOLS_OLD_REV)
@ -239,11 +228,10 @@ class TestRollChromiumRevision(unittest.TestCase):
def testFindAddedDeps(self):
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android)
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
added_android_paths, other_paths = FindAddedDeps(
webrtc_deps, new_cr_deps)
self.assertEqual(added_android_paths, [
'src/third_party/android_deps/libs/android_arch_lifecycle_common'
])
added_android_paths, other_paths = FindAddedDeps(webrtc_deps, new_cr_deps)
self.assertEqual(
added_android_paths,
['src/third_party/android_deps/libs/android_arch_lifecycle_common'])
self.assertEqual(other_paths, [])
def testFindRemovedDeps(self):
@ -251,9 +239,9 @@ class TestRollChromiumRevision(unittest.TestCase):
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
removed_android_paths, other_paths = FindRemovedDeps(
webrtc_deps, new_cr_deps)
self.assertEqual(removed_android_paths, [
'src/third_party/android_deps/libs/android_arch_lifecycle_runtime'
])
self.assertEqual(
removed_android_paths,
['src/third_party/android_deps/libs/android_arch_lifecycle_runtime'])
self.assertEqual(other_paths, [])
def testMissingDepsIsDetected(self):
@ -264,8 +252,7 @@ class TestRollChromiumRevision(unittest.TestCase):
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile)
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
_, other_paths = FindRemovedDeps(webrtc_deps, new_cr_deps)
self.assertEqual(
other_paths,
self.assertEqual(other_paths,
['src/buildtools/linux64', 'src/third_party/depot_tools'])
def testExpectedDepsIsNotReportedMissing(self):
@ -289,9 +276,8 @@ class TestRollChromiumRevision(unittest.TestCase):
new_commit_pos = 'f00d'
commit_msg = GenerateCommitMessage(NO_CHROMIUM_REVISION_UPDATE,
current_commit_pos,
new_commit_pos, changed_deps,
added_paths, removed_paths)
current_commit_pos, new_commit_pos,
changed_deps, added_paths, removed_paths)
return [l.strip() for l in commit_msg.split('\n')]

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -19,7 +21,7 @@ if __name__ == '__main__':
args = sys.argv
if len(args) != 2:
print('Usage: binary_version_test.py <FILE_NAME>')
exit(1)
sys.exit(1)
filename = sys.argv[1]
output = subprocess.check_output(['strings', filename])
strings_in_binary = output.decode('utf-8').splitlines()
@ -27,8 +29,8 @@ if __name__ == '__main__':
if WEBRTC_VERSION_RE.match(symbol):
with open('webrtc_binary_version_check', 'w') as f:
f.write(symbol)
exit(0)
sys.exit(0)
print('WebRTC source timestamp not found in "%s"' % filename)
print('Check why "kSourceTimestamp" from call/version.cc is not linked '
'(or why it has been optimized away by the compiler/linker)')
exit(1)
sys.exit(1)

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -20,9 +21,8 @@ import shutil
import subprocess
import sys
import tempfile
#pylint: disable=relative-import
from presubmit_checks_lib.build_helpers import GetClangTidyPath, \
GetCompilationCommand
from presubmit_checks_lib.build_helpers import (GetClangTidyPath,
GetCompilationCommand)
# We enable all checkers by default for investigation purpose.
# This includes clang-analyzer-* checks.
@ -55,7 +55,7 @@ def Process(filepath, args):
# Replace clang++ by clang-tidy
command[0:1] = [GetClangTidyPath(), CHECKER_OPTION, rel_path
] + args + ['--'] # Separator for clang flags.
print "Running: %s" % ' '.join(command)
print("Running: %s" % ' '.join(command))
# Run from build dir so that relative paths are correct.
p = subprocess.Popen(command,
cwd=out_dir,

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -36,10 +37,9 @@ def main():
return '-c \'%s testing/xvfb.py %s\'' % (sys.executable, binary)
modules_unittests = 'out/coverage/modules_unittests'
cmd[cmd.index('-c \'%s\'' %
modules_unittests)] = WithXvfb(modules_unittests)
cmd[cmd.index('-c \'%s\'' % modules_unittests)] = WithXvfb(modules_unittests)
print ' '.join(cmd)
print(' '.join(cmd))
return 0

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -46,7 +47,6 @@ if os.path.exists(binary_path):
========== ENDING OF PATCH ==========
"""
import sys
DIRECTORY = 'out/coverage'
@ -102,64 +102,64 @@ def GenerateIOSSimulatorCommand():
[FormatIossimTest(t, is_xctest=True) for t in XC_TESTS] +
[FormatIossimTest(t, is_xctest=False) for t in TESTS])
print 'To get code coverage using iOS sim just run following commands:'
print ''
print ' '.join(gn_cmd)
print ''
print ' '.join(coverage_cmd)
print('To get code coverage using iOS sim just run following commands:')
print('')
print(' '.join(gn_cmd))
print('')
print(' '.join(coverage_cmd))
return 0
def GenerateIOSDeviceCommand():
gn_args_string = ' '.join(GetGNArgs(is_simulator=False))
coverage_report_cmd = (
[sys.executable, 'tools/code_coverage/coverage.py'] +
coverage_report_cmd = ([sys.executable, 'tools/code_coverage/coverage.py'] +
['%s.app' % t for t in TESTS] + ['-b %s' % DIRECTORY] +
['-o out/report'] + ['-p %s/merged.profdata' % DIRECTORY] +
['-o out/report'] +
['-p %s/merged.profdata' % DIRECTORY] +
['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''])
print 'Computing code coverage for real iOS device is a little bit tedious.'
print ''
print 'You will need:'
print ''
print '1. Generate xcode project and open it with Xcode 10+:'
print ' gn gen %s --ide=xcode --args=\'%s\'' % (DIRECTORY, gn_args_string)
print ' open %s/all.xcworkspace' % DIRECTORY
print ''
print '2. Execute these Run targets manually with Xcode Run button and '
print 'manually save generated coverage.profraw file to %s:' % DIRECTORY
print '\n'.join('- %s' % t for t in TESTS)
print ''
print '3. Execute these Test targets manually with Xcode Test button and '
print 'manually save generated coverage.profraw file to %s:' % DIRECTORY
print '\n'.join('- %s' % t for t in XC_TESTS)
print ''
print '4. Merge *.profraw files to *.profdata using llvm-profdata tool:'
print(' build/mac_files/Xcode.app/Contents/Developer/Toolchains/' +
print('Computing code coverage for real iOS device is a little bit tedious.')
print('')
print('You will need:')
print('')
print('1. Generate xcode project and open it with Xcode 10+:')
print(' gn gen %s --ide=xcode --args=\'%s\'' % (DIRECTORY, gn_args_string))
print(' open %s/all.xcworkspace' % DIRECTORY)
print('')
print('2. Execute these Run targets manually with Xcode Run button and ')
print('manually save generated coverage.profraw file to %s:' % DIRECTORY)
print('\n'.join('- %s' % t for t in TESTS))
print('')
print('3. Execute these Test targets manually with Xcode Test button and ')
print('manually save generated coverage.profraw file to %s:' % DIRECTORY)
print('\n'.join('- %s' % t for t in XC_TESTS))
print('')
print('4. Merge *.profraw files to *.profdata using llvm-profdata tool:')
print((' build/mac_files/Xcode.app/Contents/Developer/Toolchains/' +
'XcodeDefault.xctoolchain/usr/bin/llvm-profdata merge ' +
'-o %s/merged.profdata ' % DIRECTORY +
'-sparse=true %s/*.profraw' % DIRECTORY)
print ''
print '5. Generate coverage report:'
print ' ' + ' '.join(coverage_report_cmd)
'-sparse=true %s/*.profraw' % DIRECTORY))
print('')
print('5. Generate coverage report:')
print(' ' + ' '.join(coverage_report_cmd))
return 0
def Main():
def main():
if len(sys.argv) < 2:
print 'Please specify type of coverage:'
print ' %s simulator' % sys.argv[0]
print ' %s device' % sys.argv[0]
print('Please specify type of coverage:')
print(' %s simulator' % sys.argv[0])
print(' %s device' % sys.argv[0])
elif sys.argv[1] == 'simulator':
GenerateIOSSimulatorCommand()
elif sys.argv[1] == 'device':
GenerateIOSDeviceCommand()
else:
print 'Unsupported type of coverage'
print('Unsupported type of coverage')
return 0
if __name__ == '__main__':
sys.exit(Main())
sys.exit(main())

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
#
# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
#
@ -8,37 +8,37 @@
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import psutil
import sys
import psutil
import numpy
from matplotlib import pyplot
class CpuSnapshot(object):
class CpuSnapshot:
def __init__(self, label):
self.label = label
self.samples = []
def Capture(self, sample_count):
print('Capturing %d CPU samples for %s...' %
((sample_count - len(self.samples)), self.label))
print(('Capturing %d CPU samples for %s...' %
((sample_count - len(self.samples)), self.label)))
while len(self.samples) < sample_count:
self.samples.append(psutil.cpu_percent(1.0, False))
def Text(self):
return ('%s: avg=%s, median=%s, min=%s, max=%s' %
(self.label, numpy.average(self.samples),
numpy.median(self.samples), numpy.min(
self.samples), numpy.max(self.samples)))
return (
'%s: avg=%s, median=%s, min=%s, max=%s' %
(self.label, numpy.average(self.samples), numpy.median(
self.samples), numpy.min(self.samples), numpy.max(self.samples)))
def Max(self):
return numpy.max(self.samples)
def GrabCpuSamples(sample_count):
print 'Label for snapshot (enter to quit): '
label = raw_input().strip()
print('Label for snapshot (enter to quit): ')
label = eval(input().strip())
if len(label) == 0:
return None
@ -49,12 +49,12 @@ def GrabCpuSamples(sample_count):
def main():
print 'How many seconds to capture per snapshot (enter for 60)?'
sample_count = raw_input().strip()
print('How many seconds to capture per snapshot (enter for 60)?')
sample_count = eval(input().strip())
if len(sample_count) > 0 and int(sample_count) > 0:
sample_count = int(sample_count)
else:
print 'Defaulting to 60 samples.'
print('Defaulting to 60 samples.')
sample_count = 60
snapshots = []
@ -65,7 +65,7 @@ def main():
snapshots.append(snapshot)
if len(snapshots) == 0:
print 'no samples captured'
print('no samples captured')
return -1
pyplot.title('CPU usage')

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -43,7 +44,7 @@ def main(directories):
'--recursive',
path,
]
print 'Downloading precompiled tools...'
print('Downloading precompiled tools...')
# Perform download similar to how gclient hooks execute.
try:
@ -51,7 +52,7 @@ def main(directories):
cwd=SRC_DIR,
always_show_header=True)
except (gclient_utils.Error, subprocess2.CalledProcessError) as e:
print 'Error: %s' % str(e)
print('Error: %s' % str(e))
return 2
return 0

View file

@ -1,4 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -26,10 +27,10 @@ If any command line arguments are passed to the script, it is executed as a
command in a subprocess.
"""
# psutil is not installed on non-Linux machines by default.
import psutil # pylint: disable=F0401
import subprocess
import sys
# psutil is not installed on non-Linux machines by default.
import psutil # pylint: disable=F0401
WEBCAM_WIN = ('schtasks', '/run', '/tn', 'ManyCam')
WEBCAM_MAC = ('open', '/Applications/ManyCam/ManyCam.app')
@ -81,7 +82,7 @@ def StartWebCam():
def _ForcePythonInterpreter(cmd):
"""Returns the fixed command line to call the right python executable."""
out = cmd[:]
if out[0] == 'python':
if out[0] == 'vpython3':
out[0] = sys.executable
elif out[0].endswith('.py'):
out.insert(0, sys.executable)
@ -95,7 +96,6 @@ def Main(argv):
if argv:
return subprocess.call(_ForcePythonInterpreter(argv))
else:
return 0

View file

@ -1,4 +1,4 @@
#!/usr/bin/env/python
#!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
#
@ -24,7 +24,7 @@ following executable in your out folder:
You will be able to compile the same executable targeting your host machine
by running:
$ python tools_webrtc/executable_host_build.py --executable_name random_exec
$ vpython3 tools_webrtc/executable_host_build.py --executable_name random_exec
The generated executable will have the same name as the input executable with
suffix '_host'.
@ -95,7 +95,6 @@ if __name__ == '__main__':
EXECUTABLE_FINAL_NAME = ARGS.executable_name + '_host'
with HostBuildDir() as build_dir:
_RunCommand([sys.executable, DepotToolPath('gn.py'), 'gen', build_dir])
_RunCommand(
[DepotToolPath('ninja'), '-C', build_dir, EXECUTABLE_TO_BUILD])
_RunCommand([DepotToolPath('ninja'), '-C', build_dir, EXECUTABLE_TO_BUILD])
shutil.copy(os.path.join(build_dir, EXECUTABLE_TO_BUILD),
EXECUTABLE_FINAL_NAME)

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
#
@ -36,6 +36,7 @@ def main():
def _ForcePythonInterpreter(cmd):
"""Returns the fixed command line to call the right python executable."""
out = cmd[:]
if len(out) > 0:
if out[0] == 'python':
out[0] = sys.executable
elif out[0].endswith('.py'):

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python3
#!/usr/bin/env vpython3
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -11,9 +12,6 @@ This file emits the list of reasons why a particular build needs to be clobbered
(or a list of 'landmines').
"""
from __future__ import absolute_import
from __future__ import print_function
import os
import sys
@ -39,8 +37,7 @@ def print_landmines(): # pylint: disable=invalid-name
if host_os() == 'win':
print('Clobber to resolve some issues with corrupt .pdb files on bots.')
print('Clobber due to corrupt .pdb files (after #14623)')
print(
'Clobber due to Win 64-bit Debug linking error (crbug.com/668961)')
print('Clobber due to Win 64-bit Debug linking error (crbug.com/668961)')
print('Clobber due to Win Clang Debug linking errors in '
'https://codereview.webrtc.org/2786603002')
print('Clobber due to Win Debug linking errors in '

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
#
@ -14,9 +14,9 @@ It will run `mb gen` in a temporary directory and it is really useful to
check for different configurations.
Usage:
$ python tools_webrtc/gn_check_autofix.py -m some_mater -b some_bot
$ vpython3 tools_webrtc/gn_check_autofix.py -m some_mater -b some_bot
or
$ python tools_webrtc/gn_check_autofix.py -c some_mb_config
$ vpython3 tools_webrtc/gn_check_autofix.py -c some_mb_config
"""
import os
@ -38,7 +38,7 @@ TARGET_RE = re.compile(
r'(?P<indentation_level>\s*)\w*\("(?P<target_name>\w*)"\) {$')
class TemporaryDirectory(object):
class TemporaryDirectory:
def __init__(self):
self._closed = False
self._name = None
@ -54,7 +54,7 @@ class TemporaryDirectory(object):
def Run(cmd):
print 'Running:', ' '.join(cmd)
print('Running:', ' '.join(cmd))
sub = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return sub.communicate()
@ -75,14 +75,13 @@ def FixErrors(filename, missing_deps, deleted_sources):
match = re.match(indentation_level + '}$', line)
if match:
line = ('deps = [\n' + ''.join(' "' + dep + '",\n'
for dep in missing_deps[target])
+ ']\n') + line
for dep in missing_deps[target]) +
']\n') + line
indentation_level = None
elif line.strip().startswith('deps'):
is_empty_deps = line.strip() == 'deps = []'
line = 'deps = [\n' if is_empty_deps else line
line += ''.join(' "' + dep + '",\n'
for dep in missing_deps[target])
line += ''.join(' "' + dep + '",\n' for dep in missing_deps[target])
line += ']\n' if is_empty_deps else ''
indentation_level = None
@ -156,10 +155,10 @@ def main():
] + sys.argv[1:])
mb_output = Run(mb_gen_command)
errors = mb_output[0].split('ERROR')[1:]
errors = mb_output[0].decode('utf-8').split('ERROR')[1:]
if mb_output[1]:
print mb_output[1]
print(mb_output[1])
return 1
for error in errors:
@ -168,7 +167,7 @@ def main():
if target_msg not in error:
target_msg = 'It is not in any dependency of'
if target_msg not in error:
print '\n'.join(error)
print('\n'.join(error))
continue
index = error.index(target_msg) + 1
path, target = error[index].strip().split(':')
@ -182,14 +181,13 @@ def main():
path = os.path.join(path[2:], 'BUILD.gn')
errors_by_file[path][target].add(dep)
elif error[index + 1] == 'has a source file:':
deleted_file = '"' + os.path.basename(
error[index + 2].strip()) + '",'
deleted_file = '"' + os.path.basename(error[index + 2].strip()) + '",'
deleted_sources.add(deleted_file)
else:
print '\n'.join(error)
print('\n'.join(error))
continue
for path, missing_deps in errors_by_file.items():
for path, missing_deps in list(errors_by_file.items()):
FixErrors(path, missing_deps, deleted_sources)
return 0

View file

@ -53,7 +53,7 @@ For example:
Will be converted into:
python gtest-parallel \
vpython3 gtest-parallel \
--shard_index 0 \
--shard_count 1 \
--output_dir=SOME_OUTPUT_DIR \
@ -82,8 +82,8 @@ Args = collections.namedtuple(
['gtest_parallel_args', 'test_env', 'output_dir', 'test_artifacts_dir'])
def _CatFiles(file_list, output_file):
with open(output_file, 'w') as output_file:
def _CatFiles(file_list, output_file_destination):
with open(output_file_destination, 'w') as output_file:
for filename in file_list:
with open(filename) as input_file:
output_file.write(input_file.read())
@ -100,7 +100,7 @@ def _ParseWorkersOption(workers):
return max(result, 1) # Sanitize when using e.g. '0.5x'.
class ReconstructibleArgumentGroup(object):
class ReconstructibleArgumentGroup:
"""An argument group that can be converted back into a command line.
This acts like ArgumentParser.add_argument_group, but names of arguments added
@ -154,7 +154,7 @@ def ParseArgs(argv=None):
parser.add_argument('--store-test-artifacts', action='store_true')
# No-sandbox is a Chromium-specific flag, ignore it.
# TODO(oprypin): Remove (bugs.webrtc.org/8115)
# TODO(bugs.webrtc.org/8115): Remove workaround when fixed.
parser.add_argument('--no-sandbox',
action='store_true',
help=argparse.SUPPRESS)
@ -171,7 +171,7 @@ def ParseArgs(argv=None):
}
args_to_pass = []
for arg in unrecognized_args:
if any(arg.startswith(k) for k in webrtc_flags_to_change.keys()):
if any(arg.startswith(k) for k in list(webrtc_flags_to_change.keys())):
arg_split = arg.split('=')
args_to_pass.append(webrtc_flags_to_change[arg_split[0]] + '=' +
arg_split[1])

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
#
@ -34,8 +34,7 @@ class GtestParallelWrapperHelpersTest(unittest.TestCase):
def testGetTwiceWorkers(self):
expected = 2 * multiprocessing.cpu_count()
# pylint: disable=protected-access
self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('2x'),
expected)
self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('2x'), expected)
def testGetHalfWorkers(self):
expected = max(multiprocessing.cpu_count() // 2, 1)
@ -56,21 +55,19 @@ class GtestParallelWrapperTest(unittest.TestCase):
self.assertEqual(result.gtest_parallel_args, expected)
def testMixing(self):
result = gtest_parallel_wrapper.ParseArgs([
'--timeout=123', '--param1', 'exec', '--param2', '--timeout', '124'
])
result = gtest_parallel_wrapper.ParseArgs(
['--timeout=123', '--param1', 'exec', '--param2', '--timeout', '124'])
expected = self._Expected(
['--timeout=124', 'exec', '--', '--param1', '--param2'])
self.assertEqual(result.gtest_parallel_args, expected)
def testMixingPositional(self):
result = gtest_parallel_wrapper.ParseArgs([
'--timeout=123', 'exec', '--foo1', 'bar1', '--timeout', '124',
'--foo2', 'bar2'
])
expected = self._Expected([
'--timeout=124', 'exec', '--', '--foo1', 'bar1', '--foo2', 'bar2'
'--timeout=123', 'exec', '--foo1', 'bar1', '--timeout', '124', '--foo2',
'bar2'
])
expected = self._Expected(
['--timeout=124', 'exec', '--', '--foo1', 'bar1', '--foo2', 'bar2'])
self.assertEqual(result.gtest_parallel_args, expected)
def testDoubleDash1(self):
@ -83,8 +80,7 @@ class GtestParallelWrapperTest(unittest.TestCase):
def testDoubleDash2(self):
result = gtest_parallel_wrapper.ParseArgs(
['--timeout=123', '--', 'exec', '--timeout=124'])
expected = self._Expected(
['--timeout=123', 'exec', '--', '--timeout=124'])
expected = self._Expected(['--timeout=123', 'exec', '--', '--timeout=124'])
self.assertEqual(result.gtest_parallel_args, expected)
def testArtifacts(self):
@ -135,16 +131,16 @@ class GtestParallelWrapperTest(unittest.TestCase):
result = gtest_parallel_wrapper.ParseArgs([
'some_test', '--some_flag=some_value', '--another_flag',
'--output_dir=' + output_dir, '--store-test-artifacts',
'--isolated-script-test-perf-output=SOME_OTHER_DIR',
'--foo=bar', '--baz'
'--isolated-script-test-perf-output=SOME_OTHER_DIR', '--foo=bar',
'--baz'
])
expected_artifacts_dir = os.path.join(output_dir, 'test_artifacts')
expected = self._Expected([
'--output_dir=' + output_dir, 'some_test', '--',
'--test_artifacts_dir=' + expected_artifacts_dir,
'--some_flag=some_value', '--another_flag',
'--isolated_script_test_perf_output=SOME_OTHER_DIR',
'--foo=bar', '--baz'
'--isolated_script_test_perf_output=SOME_OTHER_DIR', '--foo=bar',
'--baz'
])
self.assertEqual(result.gtest_parallel_args, expected)
@ -161,8 +157,7 @@ class GtestParallelWrapperTest(unittest.TestCase):
self.assertEqual(result.gtest_parallel_args, expected)
def testUseHalfTheCpuCores(self):
result = gtest_parallel_wrapper.ParseArgs(
['--workers', '0.5x', 'exec'])
result = gtest_parallel_wrapper.ParseArgs(['--workers', '0.5x', 'exec'])
workers = max(multiprocessing.cpu_count() // 2, 1)
expected = self._Expected(['--workers=%s' % workers, 'exec'])
self.assertEqual(result.gtest_parallel_args, expected)

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
@ -57,8 +57,7 @@ def _ParseArgs():
choices=['debug', 'release'],
help='The build config. Can be "debug" or "release". '
'Defaults to "release".')
parser.add_argument(
'--arch',
parser.add_argument('--arch',
nargs='+',
default=DEFAULT_ARCHS,
choices=ENABLED_ARCHS,
@ -69,8 +68,7 @@ def _ParseArgs():
action='store_true',
default=False,
help='Removes the previously generated build output, if any.')
parser.add_argument(
'-p',
parser.add_argument('-p',
'--purify',
action='store_true',
default=False,
@ -158,8 +156,10 @@ def BuildWebRTC(output_dir, target_environment, target_arch, flavor,
gn_target_name, ios_deployment_target, libvpx_build_vp9,
use_bitcode, use_goma, extra_gn_args):
gn_args = [
'target_os="ios"', 'ios_enable_code_signing=false',
'is_component_build=false', 'rtc_include_tests=false',
'target_os="ios"',
'ios_enable_code_signing=false',
'is_component_build=false',
'rtc_include_tests=false',
]
# Add flavor option.
@ -179,8 +179,7 @@ def BuildWebRTC(output_dir, target_environment, target_arch, flavor,
gn_args.append('rtc_libvpx_build_vp9=' +
('true' if libvpx_build_vp9 else 'false'))
gn_args.append('enable_ios_bitcode=' +
('true' if use_bitcode else 'false'))
gn_args.append('enable_ios_bitcode=' + ('true' if use_bitcode else 'false'))
gn_args.append('use_goma=' + ('true' if use_goma else 'false'))
gn_args.append('rtc_enable_objc_symbol_export=true')
@ -224,7 +223,7 @@ def main():
gn_args = args.extra_gn_args
if args.purify:
_CleanTemporary(args.output_dir, architectures.keys())
_CleanTemporary(args.output_dir, list(architectures.keys()))
return 0
gn_target_name = 'framework_objc'
@ -235,7 +234,7 @@ def main():
# Build all architectures.
framework_paths = []
all_lib_paths = []
for (environment, archs) in architectures.items():
for (environment, archs) in list(architectures.items()):
framework_path = os.path.join(args.output_dir, environment)
framework_paths.append(framework_path)
lib_paths = []
@ -250,11 +249,9 @@ def main():
# Combine the slices.
dylib_path = os.path.join(SDK_FRAMEWORK_NAME, 'WebRTC')
# Dylibs will be combined, all other files are the same across archs.
shutil.rmtree(
os.path.join(framework_path, SDK_FRAMEWORK_NAME),
shutil.rmtree(os.path.join(framework_path, SDK_FRAMEWORK_NAME),
ignore_errors=True)
shutil.copytree(
os.path.join(lib_paths[0], SDK_FRAMEWORK_NAME),
shutil.copytree(os.path.join(lib_paths[0], SDK_FRAMEWORK_NAME),
os.path.join(framework_path, SDK_FRAMEWORK_NAME),
symlinks=True)
logging.info('Merging framework slices for %s.', environment)
@ -273,24 +270,20 @@ def main():
# Merge the dSYM slices.
lib_dsym_dir_path = os.path.join(lib_paths[0], SDK_DSYM_NAME)
if os.path.isdir(lib_dsym_dir_path):
shutil.rmtree(
os.path.join(framework_path, SDK_DSYM_NAME),
shutil.rmtree(os.path.join(framework_path, SDK_DSYM_NAME),
ignore_errors=True)
shutil.copytree(
lib_dsym_dir_path, os.path.join(framework_path, SDK_DSYM_NAME))
shutil.copytree(lib_dsym_dir_path,
os.path.join(framework_path, SDK_DSYM_NAME))
logging.info('Merging dSYM slices.')
dsym_path = os.path.join(SDK_DSYM_NAME, 'Contents', 'Resources',
'DWARF', 'WebRTC')
lib_dsym_paths = [
os.path.join(path, dsym_path) for path in lib_paths
]
dsym_path = os.path.join(SDK_DSYM_NAME, 'Contents', 'Resources', 'DWARF',
'WebRTC')
lib_dsym_paths = [os.path.join(path, dsym_path) for path in lib_paths]
out_dsym_path = os.path.join(framework_path, dsym_path)
try:
os.remove(out_dsym_path)
except OSError:
pass
cmd = ['lipo'
] + lib_dsym_paths + ['-create', '-output', out_dsym_path]
cmd = ['lipo'] + lib_dsym_paths + ['-create', '-output', out_dsym_path]
_RunCommand(cmd)
# Check for Mac-style WebRTC.framework/Resources/ (for Catalyst)...

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license

View file

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/env vpython3
# Copyright 2016 The WebRTC project authors. All Rights Reserved.
#
@ -10,11 +10,12 @@
"""Script for merging generated iOS libraries."""
import sys
import argparse
import os
import re
import subprocess
from six.moves import range
# Valid arch subdir names.
VALID_ARCHS = ['arm_libs', 'arm64_libs', 'ia32_libs', 'x64_libs']
@ -49,12 +50,12 @@ def MergeLibs(lib_base_dir):
libs[filename] = entry
orphaned_libs = {}
valid_libs = {}
for library, paths in libs.items():
for library, paths in list(libs.items()):
if len(paths) < len(archs):
orphaned_libs[library] = paths
else:
valid_libs[library] = paths
for library, paths in orphaned_libs.items():
for library, paths in list(orphaned_libs.items()):
components = library[:-2].split('_')[:-1]
found = False
# Find directly matching parent libs by stripping suffix.
@ -68,7 +69,7 @@ def MergeLibs(lib_base_dir):
# Find next best match by finding parent libs with the same prefix.
if not found:
base_prefix = library[:-2].split('_')[0]
for valid_lib, valid_paths in valid_libs.items():
for valid_lib, valid_paths in list(valid_libs.items()):
if valid_lib[:len(base_prefix)] == base_prefix:
valid_paths.extend(paths)
found = True
@ -89,18 +90,16 @@ def MergeLibs(lib_base_dir):
# Merge libraries using libtool.
libtool_returncode = 0
for library, paths in valid_libs.items():
for library, paths in list(valid_libs.items()):
cmd_list = [
'libtool', '-static', '-v', '-o',
os.path.join(output_dir_path, library)
] + paths
libtoolout = subprocess.Popen(cmd_list,
stderr=subprocess.PIPE,
env=env)
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
_, err = libtoolout.communicate()
for line in err.splitlines():
if not libtool_re.match(line):
print >> sys.stderr, line
print(line, file=sys.stderr)
# Unconditionally touch the output .a file on the command line if present
# and the command succeeded. A bit hacky.
libtool_returncode = libtoolout.returncode
@ -112,7 +111,7 @@ def MergeLibs(lib_base_dir):
return libtool_returncode
def Main():
def main():
parser_description = 'Merge WebRTC libraries.'
parser = argparse.ArgumentParser(description=parser_description)
parser.add_argument('lib_base_dir',
@ -124,4 +123,4 @@ def Main():
if __name__ == '__main__':
sys.exit(Main())
sys.exit(main())

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python3
#!/usr/bin/env vpython3
# Copyright 2016 The WebRTC project authors. All Rights Reserved.
#
@ -13,7 +13,8 @@ Licenses are taken from dependent libraries which are determined by
GN desc command `gn desc` on all targets specified via `--target` argument.
One can see all dependencies by invoking this command:
$ gn.py desc --all --format=json <out_directory> <target> | python -m json.tool
$ gn.py desc --all --format=json <out_directory> <target> | \
vpython3 -m json.tool
(see "deps" subarray)
Libraries are mapped to licenses via LIB_TO_LICENSES_DICT dictionary.
@ -21,18 +22,13 @@ Libraries are mapped to licenses via LIB_TO_LICENSES_DICT dictionary.
"""
import sys
import argparse
import json
import logging
import os
import re
import subprocess
try:
# python 3.2+
from html import escape
except ImportError:
from cgi import escape
# Third_party library to licences mapping. Keys are names of the libraries
# (right after the `third_party/` prefix)
@ -124,7 +120,7 @@ THIRD_PARTY_LIB_SIMPLE_NAME_REGEX = r'^.*/third_party/([\w\-+]+).*$'
THIRD_PARTY_LIB_REGEX_TEMPLATE = r'^.*/third_party/%s$'
class LicenseBuilder(object):
class LicenseBuilder:
def __init__(self,
buildfile_dirs,
targets,
@ -194,7 +190,7 @@ class LicenseBuilder(object):
def _GetThirdPartyLibraries(self, buildfile_dir, target):
output = json.loads(LicenseBuilder._RunGN(buildfile_dir, target))
libraries = set()
for described_target in output.values():
for described_target in list(output.values()):
third_party_libs = (self._ParseLibrary(dep)
for dep in described_target['deps'])
libraries |= set(lib for lib in third_party_libs if lib)
@ -206,12 +202,10 @@ class LicenseBuilder(object):
third_party_libs = set()
for buildfile in self.buildfile_dirs:
for target in self.targets:
third_party_libs |= self._GetThirdPartyLibraries(
buildfile, target)
third_party_libs |= self._GetThirdPartyLibraries(buildfile, target)
assert len(third_party_libs) > 0
missing_licenses = third_party_libs - set(
self.common_licenses_dict.keys())
missing_licenses = third_party_libs - set(self.common_licenses_dict.keys())
if missing_licenses:
error_msg = 'Missing licenses for following third_party targets: %s' % \
', '.join(sorted(missing_licenses))
@ -225,12 +219,10 @@ class LicenseBuilder(object):
logging.info('List of licenses: %s', ', '.join(license_libs))
# Generate markdown.
output_license_file = open(os.path.join(output_dir, 'LICENSE.md'),
'w+')
output_license_file = open(os.path.join(output_dir, 'LICENSE.md'), 'w+')
for license_lib in license_libs:
if len(self.common_licenses_dict[license_lib]) == 0:
logging.info(
'Skipping compile time or internal dependency: %s',
logging.info('Skipping compile time or internal dependency: %s',
license_lib)
continue # Compile time dependency
@ -258,8 +250,7 @@ def main():
action='append',
default=[],
help='Name of the GN target to generate a license for')
parser.add_argument('output_dir',
help='Directory to output LICENSE.md to.')
parser.add_argument('output_dir', help='Directory to output LICENSE.md to.')
parser.add_argument('buildfile_dirs',
nargs='+',
help='Directories containing gn generated ninja files')

View file

@ -1,5 +1,6 @@
#!/usr/bin/env vpython
# pylint: disable=relative-import,protected-access,unused-argument
#!/usr/bin/env vpython3
# pylint: disable=protected-access,unused-argument
# Copyright 2017 The WebRTC project authors. All Rights Reserved.
#
@ -10,11 +11,6 @@
# be found in the AUTHORS file in the root of the source tree.
import unittest
try:
# python 3.3+
from unittest.mock import patch
except ImportError:
# From site-package
from mock import patch
from generate_licenses import LicenseBuilder
@ -41,11 +37,11 @@ class TestLicenseBuilder(unittest.TestCase):
LicenseBuilder._ParseLibraryName('//a/b/third_party/libname1:c'),
'libname1')
self.assertEqual(
LicenseBuilder._ParseLibraryName(
'//a/b/third_party/libname2:c(d)'), 'libname2')
LicenseBuilder._ParseLibraryName('//a/b/third_party/libname2:c(d)'),
'libname2')
self.assertEqual(
LicenseBuilder._ParseLibraryName(
'//a/b/third_party/libname3/c:d(e)'), 'libname3')
LicenseBuilder._ParseLibraryName('//a/b/third_party/libname3/c:d(e)'),
'libname3')
self.assertEqual(
LicenseBuilder._ParseLibraryName('//a/b/not_third_party/c'), None)
@ -60,8 +56,7 @@ class TestLicenseBuilder(unittest.TestCase):
}
builder = LicenseBuilder([], [], lib_dict, {})
self.assertEqual(
builder._ParseLibrary('//a/b/third_party/libname:bar_java'),
'libname')
builder._ParseLibrary('//a/b/third_party/libname:bar_java'), 'libname')
def testParseLibraryRegExMatch(self):
lib_regex_dict = {
@ -87,15 +82,13 @@ class TestLicenseBuilder(unittest.TestCase):
}
builder = LicenseBuilder([], [], {}, lib_regex_dict)
self.assertEqual(
builder._ParseLibrary(
'//a/b/third_party/libname/fooHAHA:bar_java'),
builder._ParseLibrary('//a/b/third_party/libname/fooHAHA:bar_java'),
'libname/foo.*bar.*')
@patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
def testGetThirdPartyLibrariesWithoutRegex(self):
builder = LicenseBuilder([], [], {}, {})
self.assertEqual(
builder._GetThirdPartyLibraries('out/arm', 'target1'),
self.assertEqual(builder._GetThirdPartyLibraries('out/arm', 'target1'),
set(['libname1', 'libname2', 'libname3']))
@patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
@ -104,8 +97,7 @@ class TestLicenseBuilder(unittest.TestCase):
'libname2:c.*': ['path/to/LICENSE'],
}
builder = LicenseBuilder([], [], {}, lib_regex_dict)
self.assertEqual(
builder._GetThirdPartyLibraries('out/arm', 'target1'),
self.assertEqual(builder._GetThirdPartyLibraries('out/arm', 'target1'),
set(['libname1', 'libname2:c.*', 'libname3']))
@patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -7,6 +9,10 @@
# be found in the AUTHORS file in the root of the source tree.
# Runs PRESUBMIT.py in py3 mode by git cl presubmit.
USE_PYTHON3 = True
def _CommonChecks(input_api, output_api):
results = []
@ -27,15 +33,16 @@ def _CommonChecks(input_api, output_api):
results.extend(input_api.RunTests(pylint_checks))
# Run the MB unittests.
results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
input_api,
results.extend(
input_api.canned_checks.RunUnitTestsInDirectory(input_api,
output_api,
'.',
[r'^.+_unittest\.py$'],
skip_shebang_check=True))
skip_shebang_check=False,
run_on_python2=False))
# Validate the format of the mb_config.pyl file.
cmd = [input_api.python_executable, 'mb.py', 'validate']
cmd = [input_api.python3_executable, 'mb.py', 'validate']
kwargs = {'cwd': input_api.PresubmitLocalPath()}
results.extend(input_api.RunTests([
input_api.Command(name='mb_validate',

View file

@ -3,4 +3,4 @@ setlocal
:: This is required with cygwin only.
PATH=%~dp0;%PATH%
set PYTHONDONTWRITEBYTECODE=1
call python "%~dp0mb.py" %*
call vpython3 "%~dp0mb.py" %*

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -13,8 +14,6 @@ MB is a wrapper script for GN that can be used to generate build files
for sets of canned configurations and analyze them.
"""
from __future__ import print_function
import argparse
import ast
import errno
@ -28,10 +27,7 @@ import sys
import subprocess
import tempfile
import traceback
try:
from urllib2 import urlopen # for Python2
except ImportError:
from urllib.request import urlopen # for Python3
from urllib.request import urlopen
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
SRC_DIR = os.path.dirname(os.path.dirname(SCRIPT_DIR))
@ -280,7 +276,7 @@ class MetaBuildWrapper(object):
def CmdExport(self):
self.ReadConfigFile()
obj = {}
for builder_group, builders in self.builder_groups.items():
for builder_group, builders in list(self.builder_groups.items()):
obj[builder_group] = {}
for builder in builders:
config = self.builder_groups[builder_group][builder]
@ -290,7 +286,7 @@ class MetaBuildWrapper(object):
if isinstance(config, dict):
args = {
k: self.FlattenConfig(v)['gn_args']
for k, v in config.items()
for k, v in list(config.items())
}
elif config.startswith('//'):
args = config
@ -476,15 +472,15 @@ class MetaBuildWrapper(object):
# Build a list of all of the configs referenced by builders.
all_configs = {}
for builder_group in self.builder_groups:
for config in self.builder_groups[builder_group].values():
for config in list(self.builder_groups[builder_group].values()):
if isinstance(config, dict):
for c in config.values():
for c in list(config.values()):
all_configs[c] = builder_group
else:
all_configs[config] = builder_group
# Check that every referenced args file or config actually exists.
for config, loc in all_configs.items():
for config, loc in list(all_configs.items()):
if config.startswith('//'):
if not self.Exists(self.ToAbsPath(config)):
errs.append('Unknown args file "%s" referenced from "%s".' %
@ -500,7 +496,7 @@ class MetaBuildWrapper(object):
# Figure out the whole list of mixins, and check that every mixin
# listed by a config or another mixin actually exists.
referenced_mixins = set()
for config, mixins in self.configs.items():
for config, mixins in list(self.configs.items()):
for mixin in mixins:
if not mixin in self.mixins:
errs.append('Unknown mixin "%s" referenced by config "%s".' %
@ -1172,7 +1168,7 @@ class MetaBuildWrapper(object):
self.Print('%s%s=%s' % (env_prefix, var, env_quoter(env[var])))
if cmd[0] == self.executable:
cmd = ['python'] + cmd[1:]
cmd = ['vpython3'] + cmd[1:]
self.Print(*[shell_quoter(arg) for arg in cmd])
def PrintJSON(self, obj):

View file

@ -1,4 +1,5 @@
#!/usr/bin/python
#!/usr/bin/env vpython3
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -11,9 +12,6 @@
import ast
import json
try:
from StringIO import StringIO # for Python2
except ImportError:
from io import StringIO # for Python3
import os
import re
@ -35,14 +33,14 @@ class FakeMBW(mb.MetaBuildWrapper):
self.default_isolate_map = ('c:\\fake_src\\testing\\buildbot\\'
'gn_isolate_map.pyl')
self.platform = 'win32'
self.executable = 'c:\\python\\python.exe'
self.executable = 'c:\\python\\vpython3.exe'
self.sep = '\\'
self.cwd = 'c:\\fake_src\\out\\Default'
else:
self.src_dir = '/fake_src'
self.default_config = '/fake_src/tools_webrtc/mb/mb_config.pyl'
self.default_isolate_map = '/fake_src/testing/buildbot/gn_isolate_map.pyl'
self.executable = '/usr/bin/python'
self.executable = '/usr/bin/vpython3'
self.platform = 'linux2'
self.sep = '/'
self.cwd = '/fake_src/out/Default'
@ -197,7 +195,7 @@ class UnitTest(unittest.TestCase):
mbw.ToAbsPath('//build/args/bots/fake_group/fake_args_bot.gn'),
'is_debug = false\n')
if files:
for path, contents in files.items():
for path, contents in list(files.items()):
mbw.files[path] = contents
return mbw
@ -846,8 +844,8 @@ class UnitTest(unittest.TestCase):
'/fake_src/out/Default/base_unittests.archive.json':
("{\"base_unittests\":\"fake_hash\"}"),
'/fake_src/third_party/depot_tools/cipd_manifest.txt':
("# vpython\n"
"/some/vpython/pkg git_revision:deadbeef\n"),
("# vpython3\n"
"/some/vpython3/pkg git_revision:deadbeef\n"),
}
task_json = json.dumps({'tasks': [{'task_id': '00000'}]})
collect_json = json.dumps({'00000': {'results': {}}})

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -9,7 +10,7 @@
"""Configuration class for network emulation."""
class ConnectionConfig(object):
class ConnectionConfig:
"""Configuration containing the characteristics of a network connection."""
def __init__(self, num, name, receive_bw_kbps, send_bw_kbps, delay_ms,
@ -31,6 +32,5 @@ class ConnectionConfig(object):
"""
left_aligned_name = self.name.ljust(24, ' ')
return '%2s %24s %5s kbps %5s kbps %4s %5s ms %3s %%' % (
self.num, left_aligned_name, self.receive_bw_kbps,
self.send_bw_kbps, self.queue_slots, self.delay_ms,
self.packet_loss_percent)
self.num, left_aligned_name, self.receive_bw_kbps, self.send_bw_kbps,
self.queue_slots, self.delay_ms, self.packet_loss_percent)

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -74,8 +75,7 @@ def _ParseArgs():
default=_DEFAULT_PRESET_ID,
help=('ConnectionConfig configuration, specified by ID. '
'Default: %default'))
parser.add_option(
'-r',
parser.add_option('-r',
'--receive-bw',
type='int',
default=_DEFAULT_PRESET.receive_bw_kbps,
@ -95,19 +95,16 @@ def _ParseArgs():
type='float',
default=_DEFAULT_PRESET.packet_loss_percent,
help=('Packet loss in %. Default: %default'))
parser.add_option(
'-q',
parser.add_option('-q',
'--queue',
type='int',
default=_DEFAULT_PRESET.queue_slots,
help=('Queue size as number of slots. Default: %default'))
parser.add_option(
'--port-range',
parser.add_option('--port-range',
default='%s,%s' % _DEFAULT_PORT_RANGE,
help=('Range of ports for constrained network. Specify as '
'two comma separated integers. Default: %default'))
parser.add_option(
'--target-ip',
parser.add_option('--target-ip',
default=None,
help=('The interface IP address to apply the rules for. '
'Default: the external facing interface IP address.'))
@ -121,7 +118,7 @@ def _ParseArgs():
options = parser.parse_args()[0]
# Find preset by ID, if specified.
if options.preset and not _PRESETS_DICT.has_key(options.preset):
if options.preset and options.preset not in _PRESETS_DICT:
parser.error('Invalid preset: %s' % options.preset)
# Simple validation of the IP address, if supplied.
@ -129,8 +126,7 @@ def _ParseArgs():
try:
socket.inet_aton(options.target_ip)
except socket.error:
parser.error('Invalid IP address specified: %s' %
options.target_ip)
parser.error('Invalid IP address specified: %s' % options.target_ip)
# Convert port range into the desired tuple format.
try:
@ -138,8 +134,7 @@ def _ParseArgs():
options.port_range = tuple(
int(port) for port in options.port_range.split(','))
if len(options.port_range) != 2:
parser.error(
'Invalid port range specified, please specify two '
parser.error('Invalid port range specified, please specify two '
'integers separated by a comma.')
except ValueError:
parser.error('Invalid port range specified.')
@ -195,13 +190,12 @@ def main():
' Delay : %s ms\n'
' Packet loss : %s %%\n'
' Queue slots : %s', connection_config.receive_bw_kbps,
connection_config.receive_bw_kbps / 8,
connection_config.send_bw_kbps, connection_config.send_bw_kbps / 8,
connection_config.delay_ms, connection_config.packet_loss_percent,
connection_config.queue_slots)
connection_config.receive_bw_kbps / 8, connection_config.send_bw_kbps,
connection_config.send_bw_kbps / 8, connection_config.delay_ms,
connection_config.packet_loss_percent, connection_config.queue_slots)
logging.info('Affected traffic: IP traffic on ports %s-%s',
options.port_range[0], options.port_range[1])
raw_input('Press Enter to abort Network Emulation...')
input('Press Enter to abort Network Emulation...')
logging.info('Flushing all Dummynet rules...')
network_emulator.Cleanup()
logging.info('Completed Network Emulation.')

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -40,7 +41,7 @@ class NetworkEmulatorError(BaseException):
self.error = error
class NetworkEmulator(object):
class NetworkEmulator:
"""A network emulator that can constrain the network using Dummynet."""
def __init__(self, connection_config, port_range):
@ -70,19 +71,17 @@ class NetworkEmulator(object):
self._connection_config.queue_slots)
logging.debug('Created receive pipe: %s', receive_pipe_id)
send_pipe_id = self._CreateDummynetPipe(
self._connection_config.send_bw_kbps,
self._connection_config.delay_ms,
self._connection_config.send_bw_kbps, self._connection_config.delay_ms,
self._connection_config.packet_loss_percent,
self._connection_config.queue_slots)
logging.debug('Created send pipe: %s', send_pipe_id)
# Adding the rules will start the emulation.
incoming_rule_id = self._CreateDummynetRule(receive_pipe_id, 'any',
target_ip,
self._port_range)
target_ip, self._port_range)
logging.debug('Created incoming rule: %s', incoming_rule_id)
outgoing_rule_id = self._CreateDummynetRule(send_pipe_id, target_ip,
'any', self._port_range)
outgoing_rule_id = self._CreateDummynetRule(send_pipe_id, target_ip, 'any',
self._port_range)
logging.debug('Created outgoing rule: %s', outgoing_rule_id)
@staticmethod
@ -95,18 +94,15 @@ class NetworkEmulator(object):
"""
try:
if os.getuid() != 0:
raise NetworkEmulatorError(
'You must run this script with sudo.')
except AttributeError:
raise NetworkEmulatorError('You must run this script with sudo.')
except AttributeError as permission_error:
# AttributeError will be raised on Windows.
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
raise NetworkEmulatorError(
'You must run this script with administrator'
' privileges.')
raise NetworkEmulatorError('You must run this script with administrator'
' privileges.') from permission_error
def _CreateDummynetRule(self, pipe_id, from_address, to_address,
port_range):
def _CreateDummynetRule(self, pipe_id, from_address, to_address, port_range):
"""Creates a network emulation rule and returns its ID.
Args:
@ -123,19 +119,17 @@ class NetworkEmulator(object):
"""
self._rule_counter += 100
add_part = [
'add', self._rule_counter, 'pipe', pipe_id, 'ip', 'from',
from_address, 'to', to_address
'add', self._rule_counter, 'pipe', pipe_id, 'ip', 'from', from_address,
'to', to_address
]
_RunIpfwCommand(add_part +
['src-port', '%s-%s' % port_range],
_RunIpfwCommand(add_part + ['src-port', '%s-%s' % port_range],
'Failed to add Dummynet src-port rule.')
_RunIpfwCommand(add_part +
['dst-port', '%s-%s' % port_range],
_RunIpfwCommand(add_part + ['dst-port', '%s-%s' % port_range],
'Failed to add Dummynet dst-port rule.')
return self._rule_counter
def _CreateDummynetPipe(self, bandwidth_kbps, delay_ms,
packet_loss_percent, queue_slots):
def _CreateDummynetPipe(self, bandwidth_kbps, delay_ms, packet_loss_percent,
queue_slots):
"""Creates a Dummynet pipe and return its ID.
Args:
@ -155,8 +149,7 @@ class NetworkEmulator(object):
]
error_message = 'Failed to create Dummynet pipe. '
if sys.platform.startswith('linux'):
error_message += (
'Make sure you have loaded the ipfw_mod.ko module to '
error_message += ('Make sure you have loaded the ipfw_mod.ko module to '
'your kernel (sudo insmod /path/to/ipfw_mod.ko).')
_RunIpfwCommand(cmd, error_message)
return self._pipe_counter
@ -197,6 +190,6 @@ def _RunIpfwCommand(command, fail_msg=None):
stderr=subprocess.PIPE)
output, error = process.communicate()
if process.returncode != 0:
raise NetworkEmulatorError(fail_msg, cmd_string, process.returncode,
output, error)
raise NetworkEmulatorError(fail_msg, cmd_string, process.returncode, output,
error)
return output.strip()

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -8,12 +9,13 @@
# be found in the AUTHORS file in the root of the source tree.
import datetime
import httplib2
import json
import subprocess
import time
import zlib
import httplib2
from tracing.value import histogram
from tracing.value import histogram_set
from tracing.value.diagnostics import generic_set
@ -26,7 +28,6 @@ def _GenerateOauthToken():
if p.wait() == 0:
output = p.stdout.read()
return output.strip()
else:
raise RuntimeError(
'Error generating authentication token.\nStdout: %s\nStderr:%s' %
(p.stdout.read(), p.stderr.read()))
@ -54,7 +55,7 @@ def _SendHistogramSet(url, histograms):
else:
data = zlib.compress(serialized)
print 'Sending %d bytes to %s.' % (len(data), url + '/add_histograms')
print('Sending %d bytes to %s.' % (len(data), url + '/add_histograms'))
http = httplib2.Http()
response, content = http.request(url + '/add_histograms',
@ -97,12 +98,13 @@ def _WaitForUploadConfirmation(url, upload_token, wait_timeout,
next_poll_time = datetime.datetime.now() + wait_polling_period
response, content = http.request(url + '/uploads/' + upload_token,
method='GET', headers=headers)
method='GET',
headers=headers)
print 'Upload state polled. Response: %r.' % content
print('Upload state polled. Response: %r.' % content)
if not oauth_refreshed and response.status == 403:
print 'Oauth token refreshed. Continue polling.'
print('Oauth token refreshed. Continue polling.')
headers = _CreateHeaders(_GenerateOauthToken())
oauth_refreshed = True
continue
@ -143,28 +145,27 @@ def _CheckFullUploadInfo(url, upload_token,
response, content = http.request(url + '/uploads/' + upload_token +
'?additional_info=measurements',
method='GET', headers=headers)
method='GET',
headers=headers)
if response.status != 200:
print 'Failed to reach the dashboard to get full upload info.'
print('Failed to reach the dashboard to get full upload info.')
return False
resp_json = json.loads(content)
print 'Full upload info: %s.' % json.dumps(resp_json, indent=4)
print('Full upload info: %s.' % json.dumps(resp_json, indent=4))
if 'measurements' in resp_json:
measurements_cnt = len(resp_json['measurements'])
not_completed_state_cnt = len([
m for m in resp_json['measurements']
if m['state'] != 'COMPLETED'
])
not_completed_state_cnt = len(
[m for m in resp_json['measurements'] if m['state'] != 'COMPLETED'])
if (measurements_cnt >= min_measurements_amount and
(not_completed_state_cnt / (measurements_cnt * 1.0) <=
max_failed_measurements_percent)):
print('Not all measurements were confirmed to upload. '
if (measurements_cnt >= min_measurements_amount
and (not_completed_state_cnt /
(measurements_cnt * 1.0) <= max_failed_measurements_percent)):
print(('Not all measurements were confirmed to upload. '
'Measurements count: %d, failed to upload or timed out: %d' %
(measurements_cnt, not_completed_state_cnt))
(measurements_cnt, not_completed_state_cnt)))
return True
return False
@ -207,9 +208,9 @@ def _AddBuildInfo(histograms, options):
reserved_infos.BUILD_URLS: options.build_page_url,
}
for k, v in common_diagnostics.items():
histograms.AddSharedDiagnosticToAllHistograms(
k.name, generic_set.GenericSet([v]))
for k, v in list(common_diagnostics.items()):
histograms.AddSharedDiagnosticToAllHistograms(k.name,
generic_set.GenericSet([v]))
def _DumpOutput(histograms, output_file):
@ -227,36 +228,35 @@ def UploadToDashboard(options):
response, content = _SendHistogramSet(options.dashboard_url, histograms)
if response.status != 200:
print('Upload failed with %d: %s\n\n%s' % (response.status,
response.reason, content))
print(('Upload failed with %d: %s\n\n%s' %
(response.status, response.reason, content)))
return 1
upload_token = json.loads(content).get('token')
if not options.wait_for_upload or not upload_token:
print('Received 200 from dashboard. ',
'Not waiting for the upload status confirmation.')
print(('Received 200 from dashboard. ',
'Not waiting for the upload status confirmation.'))
return 0
response, resp_json = _WaitForUploadConfirmation(
options.dashboard_url,
upload_token,
options.dashboard_url, upload_token,
datetime.timedelta(seconds=options.wait_timeout_sec),
datetime.timedelta(seconds=options.wait_polling_period_sec))
if ((resp_json and resp_json['state'] == 'COMPLETED') or
_CheckFullUploadInfo(options.dashboard_url, upload_token)):
print 'Upload completed.'
if ((resp_json and resp_json['state'] == 'COMPLETED')
or _CheckFullUploadInfo(options.dashboard_url, upload_token)):
print('Upload completed.')
return 0
if response.status != 200:
print('Upload status poll failed with %d: %s' % (response.status,
response.reason))
print(('Upload status poll failed with %d: %s' %
(response.status, response.reason)))
return 1
if resp_json['state'] == 'FAILED':
print 'Upload failed.'
print('Upload failed.')
return 1
print('Upload wasn\'t completed in a given time: %d seconds.' %
options.wait_timeout_sec)
print(('Upload wasn\'t completed in a given time: %d seconds.' %
options.wait_timeout_sec))
return 1

View file

@ -1,4 +1,5 @@
#!/usr/bin/env vpython
#!/usr/bin/env vpython3
# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -29,42 +30,58 @@ import google.protobuf # pylint: disable=unused-import
def _CreateParser():
parser = argparse.ArgumentParser()
parser.add_argument('--perf-dashboard-machine-group', required=True,
parser.add_argument('--perf-dashboard-machine-group',
required=True,
help='The "master" the bots are grouped under. This '
'string is the group in the the perf dashboard path '
'group/bot/perf_id/metric/subtest.')
parser.add_argument('--bot', required=True,
parser.add_argument('--bot',
required=True,
help='The bot running the test (e.g. '
'webrtc-win-large-tests).')
parser.add_argument('--test-suite', required=True,
parser.add_argument('--test-suite',
required=True,
help='The key for the test in the dashboard (i.e. what '
'you select in the top-level test suite selector in '
'the dashboard')
parser.add_argument('--webrtc-git-hash', required=True,
parser.add_argument('--webrtc-git-hash',
required=True,
help='webrtc.googlesource.com commit hash.')
parser.add_argument('--commit-position', type=int, required=True,
parser.add_argument('--commit-position',
type=int,
required=True,
help='Commit pos corresponding to the git hash.')
parser.add_argument('--build-page-url', required=True,
parser.add_argument('--build-page-url',
required=True,
help='URL to the build page for this build.')
parser.add_argument('--dashboard-url', required=True,
parser.add_argument('--dashboard-url',
required=True,
help='Which dashboard to use.')
parser.add_argument('--input-results-file', type=argparse.FileType(),
parser.add_argument('--input-results-file',
type=argparse.FileType(),
required=True,
help='A HistogramSet proto file with output from '
'WebRTC tests.')
parser.add_argument('--output-json-file', type=argparse.FileType('w'),
parser.add_argument('--output-json-file',
type=argparse.FileType('w'),
help='Where to write the output (for debugging).')
parser.add_argument('--outdir', required=True,
parser.add_argument('--outdir',
required=True,
help='Path to the local out/ dir (usually out/Default)')
parser.add_argument('--wait-for-upload', action='store_true',
parser.add_argument('--wait-for-upload',
action='store_true',
help='If specified, script will wait untill Chrome '
'perf dashboard confirms that the data was succesfully '
'proccessed and uploaded')
parser.add_argument('--wait-timeout-sec', type=int, default=1200,
parser.add_argument('--wait-timeout-sec',
type=int,
default=1200,
help='Used only if wait-for-upload is True. Maximum '
'amount of time in seconds that the script will wait '
'for the confirmation.')
parser.add_argument('--wait-polling-period-sec', type=int, default=120,
parser.add_argument('--wait-polling-period-sec',
type=int,
default=120,
help='Used only if wait-for-upload is True. Status '
'will be requested from the Dashboard every '
'wait-polling-period-sec seconds.')
@ -82,8 +99,8 @@ def _ConfigurePythonPath(options):
# It would be better if there was an equivalent to py_binary in GN, but
# there's not.
script_dir = os.path.dirname(os.path.realpath(__file__))
checkout_root = os.path.abspath(
os.path.join(script_dir, os.pardir, os.pardir))
checkout_root = os.path.abspath(os.path.join(script_dir, os.pardir,
os.pardir))
sys.path.insert(
0, os.path.join(checkout_root, 'third_party', 'catapult', 'tracing'))
@ -100,8 +117,7 @@ def _ConfigurePythonPath(options):
# Fail early in case the proto hasn't been built.
from tracing.proto import histogram_proto
if not histogram_proto.HAS_PROTO:
raise ImportError(
'Could not find histogram_pb2. You need to build the '
raise ImportError('Could not find histogram_pb2. You need to build the '
'webrtc_dashboard_upload target before invoking this '
'script. Expected to find '
'histogram_pb2.py in %s.' % histogram_proto_path)

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -61,7 +63,7 @@ def RunGnCheck(root_dir=None):
error = RunGnCommand(['gen', '--check', out_dir], root_dir)
finally:
shutil.rmtree(out_dir, ignore_errors=True)
return GN_ERROR_RE.findall(error) if error else []
return GN_ERROR_RE.findall(error.decode('utf-8')) if error else []
def RunNinjaCommand(args, root_dir=None):
@ -105,8 +107,8 @@ def GetCompilationCommand(filepath, gn_args, work_dir):
"""
gn_errors = RunGnCommand(['gen'] + gn_args + [work_dir])
if gn_errors:
raise (RuntimeError('FYI, cannot complete check due to gn error:\n%s\n'
'Please open a bug.' % gn_errors))
raise RuntimeError('FYI, cannot complete check due to gn error:\n%s\n'
'Please open a bug.' % gn_errors)
# Needed for single file compilation.
commands = GetCompilationDb(work_dir)
@ -117,9 +119,9 @@ def GetCompilationCommand(filepath, gn_args, work_dir):
# Gather defines, include path and flags (such as -std=c++11).
try:
compilation_entry = commands[rel_path]
except KeyError:
except KeyError as not_found:
raise ValueError('%s: Not found in compilation database.\n'
'Please check the path.' % filepath)
'Please check the path.' % filepath) from not_found
command = compilation_entry['command'].split()
# Remove troublesome flags. May trigger an error otherwise.

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
@ -12,7 +12,6 @@ import re
import os
import unittest
#pylint: disable=relative-import
import build_helpers
TESTDATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
@ -26,7 +25,7 @@ class GnCheckTest(unittest.TestCase):
expected_error = re.compile('ERROR Dependency cycle')
gn_output = build_helpers.RunGnCheck(test_dir)
self.assertEqual(1, len(gn_output))
self.assertRegexpMatches(gn_output[0], expected_error)
self.assertRegex(gn_output[0], expected_error)
if __name__ == '__main__':

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -9,7 +10,6 @@
import os
import re
import string
# TARGET_RE matches a GN target, and extracts the target name and the contents.
TARGET_RE = re.compile(
@ -66,9 +66,7 @@ def GetBuildGnPathFromFilePath(file_path, file_exists_check, root_dir_path):
candidate_build_gn_path = os.path.join(candidate_dir, 'BUILD.gn')
if file_exists_check(candidate_build_gn_path):
return candidate_build_gn_path
else:
candidate_dir = os.path.abspath(
os.path.join(candidate_dir, os.pardir))
candidate_dir = os.path.abspath(os.path.join(candidate_dir, os.pardir))
raise NoBuildGnFoundError(
'No BUILD.gn file found for file: `{}`'.format(file_path))
@ -87,8 +85,8 @@ def IsHeaderInBuildGn(header_path, build_gn_path):
"""
target_abs_path = os.path.dirname(build_gn_path)
build_gn_content = _ReadFile(build_gn_path)
headers_in_build_gn = GetHeadersInBuildGnFileSources(
build_gn_content, target_abs_path)
headers_in_build_gn = GetHeadersInBuildGnFileSources(build_gn_content,
target_abs_path)
return header_path in headers_in_build_gn
@ -112,7 +110,7 @@ def GetHeadersInBuildGnFileSources(file_content, target_abs_path):
for source_file_match in SOURCE_FILE_RE.finditer(sources):
source_file = source_file_match.group('source_file')
if source_file.endswith('.h'):
source_file_tokens = string.split(source_file, '/')
source_file_tokens = source_file.split('/')
headers_in_sources.add(
os.path.join(target_abs_path, *source_file_tokens))
return headers_in_sources

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -11,14 +12,11 @@ import os
import sys
import unittest
#pylint: disable=relative-import
import check_orphan_headers
def _GetRootBasedOnPlatform():
if sys.platform.startswith('win'):
return 'C:\\'
else:
return '/'
@ -31,42 +29,40 @@ class GetBuildGnPathFromFilePathTest(unittest.TestCase):
file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h')
expected_build_path = _GetPath('home', 'projects', 'webrtc', 'base',
'BUILD.gn')
file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc',
'base', 'BUILD.gn')
file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc', 'base',
'BUILD.gn')
src_dir_path = _GetPath('home', 'projects', 'webrtc')
self.assertEqual(
expected_build_path,
check_orphan_headers.GetBuildGnPathFromFilePath(
file_path, file_exists, src_dir_path))
check_orphan_headers.GetBuildGnPathFromFilePath(file_path, file_exists,
src_dir_path))
def testGetBuildPathFromParentDirectory(self):
file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h')
expected_build_path = _GetPath('home', 'projects', 'webrtc',
'BUILD.gn')
expected_build_path = _GetPath('home', 'projects', 'webrtc', 'BUILD.gn')
file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc',
'BUILD.gn')
src_dir_path = _GetPath('home', 'projects', 'webrtc')
self.assertEqual(
expected_build_path,
check_orphan_headers.GetBuildGnPathFromFilePath(
file_path, file_exists, src_dir_path))
check_orphan_headers.GetBuildGnPathFromFilePath(file_path, file_exists,
src_dir_path))
def testExceptionIfNoBuildGnFilesAreFound(self):
with self.assertRaises(check_orphan_headers.NoBuildGnFoundError):
file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h')
file_exists = lambda p: False
src_dir_path = _GetPath('home', 'projects', 'webrtc')
check_orphan_headers.GetBuildGnPathFromFilePath(
file_path, file_exists, src_dir_path)
check_orphan_headers.GetBuildGnPathFromFilePath(file_path, file_exists,
src_dir_path)
def testExceptionIfFilePathIsNotAnHeader(self):
with self.assertRaises(check_orphan_headers.WrongFileTypeError):
file_path = _GetPath('home', 'projects', 'webrtc', 'base',
'foo.cc')
file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.cc')
file_exists = lambda p: False
src_dir_path = _GetPath('home', 'projects', 'webrtc')
check_orphan_headers.GetBuildGnPathFromFilePath(
file_path, file_exists, src_dir_path)
check_orphan_headers.GetBuildGnPathFromFilePath(file_path, file_exists,
src_dir_path)
class GetHeadersInBuildGnFileSourcesTest(unittest.TestCase):

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
@ -74,9 +74,8 @@ def _CheckBuildFile(build_file_path, packages):
subpackage = subpackages_match.group('subpackage')
source_file = subpackages_match.group('source_file')
if subpackage:
yield PackageBoundaryViolation(build_file_path,
target_name, source_file,
subpackage)
yield PackageBoundaryViolation(build_file_path, target_name,
source_file, subpackage)
def CheckPackageBoundaries(root_dir, build_files=None):
@ -88,9 +87,7 @@ def CheckPackageBoundaries(root_dir, build_files=None):
for build_file_path in build_files:
assert build_file_path.startswith(root_dir)
else:
build_files = [
os.path.join(package, 'BUILD.gn') for package in packages
]
build_files = [os.path.join(package, 'BUILD.gn') for package in packages]
messages = []
for build_file_path in build_files:
@ -126,8 +123,8 @@ def main(argv):
for i, message in enumerate(messages):
if i > 0:
print
print message
print()
print(message)
return bool(messages)

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
@ -12,8 +12,7 @@ import ast
import os
import unittest
#pylint: disable=relative-import
from check_package_boundaries import CheckPackageBoundaries
import check_package_boundaries
MSG_FORMAT = 'ERROR:check_package_boundaries.py: Unexpected %s.'
TESTDATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
@ -32,12 +31,11 @@ class UnitTest(unittest.TestCase):
build_files = None
messages = []
for violation in CheckPackageBoundaries(test_dir, build_files):
build_file_path = os.path.relpath(violation.build_file_path,
test_dir)
for violation in check_package_boundaries.CheckPackageBoundaries(
test_dir, build_files):
build_file_path = os.path.relpath(violation.build_file_path, test_dir)
build_file_path = build_file_path.replace(os.path.sep, '/')
messages.append(
violation._replace(build_file_path=build_file_path))
messages.append(violation._replace(build_file_path=build_file_path))
expected_messages = ReadPylFile(os.path.join(test_dir, 'expected.pyl'))
self.assertListEqual(sorted(expected_messages), sorted(messages))
@ -46,12 +44,11 @@ class UnitTest(unittest.TestCase):
self._RunTest(os.path.join(TESTDATA_DIR, 'no_errors'))
def testMultipleErrorsSingleTarget(self):
self._RunTest(
os.path.join(TESTDATA_DIR, 'multiple_errors_single_target'))
self._RunTest(os.path.join(TESTDATA_DIR, 'multiple_errors_single_target'))
def testMultipleErrorsMultipleTargets(self):
self._RunTest(
os.path.join(TESTDATA_DIR, 'multiple_errors_multiple_targets'))
self._RunTest(os.path.join(TESTDATA_DIR,
'multiple_errors_multiple_targets'))
def testCommonPrefix(self):
self._RunTest(os.path.join(TESTDATA_DIR, 'common_prefix'))
@ -67,7 +64,7 @@ class UnitTest(unittest.TestCase):
def testRelativeFilename(self):
test_dir = os.path.join(TESTDATA_DIR, 'all_build_files')
with self.assertRaises(AssertionError):
CheckPackageBoundaries(test_dir, ["BUILD.gn"])
check_package_boundaries.CheckPackageBoundaries(test_dir, ["BUILD.gn"])
if __name__ == '__main__':

View file

@ -9,7 +9,7 @@ by WebRTC follow this instructions:
2. Launch the script:
```
$ python tools_webrtc/sslroots/generate_sslroots.py roots.pem
$ vpython3 tools_webrtc/sslroots/generate_sslroots.py roots.pem
```
3. Step 2 should have generated an ssl_roots.h file right next to roots.pem.

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# -*- coding:utf-8 -*-
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
#
@ -17,7 +19,7 @@ Arguments:
generated file size.
"""
import commands
import subprocess
from optparse import OptionParser
import os
import re
@ -42,10 +44,7 @@ def main():
"""The main entrypoint."""
parser = OptionParser('usage %prog FILE')
parser.add_option('-v', '--verbose', dest='verbose', action='store_true')
parser.add_option('-f',
'--full_cert',
dest='full_cert',
action='store_true')
parser.add_option('-f', '--full_cert', dest='full_cert', action='store_true')
options, args = parser.parse_args()
if len(args) < 1:
parser.error('No crt file specified.')
@ -93,8 +92,8 @@ def _GenCFiles(root_dir, options):
_CHAR_TYPE, options)
certificate_list = _CreateArraySectionHeader(_CERTIFICATE_VARIABLE,
_CHAR_TYPE, options)
certificate_size_list = _CreateArraySectionHeader(
_CERTIFICATE_SIZE_VARIABLE, _INT_TYPE, options)
certificate_size_list = _CreateArraySectionHeader(_CERTIFICATE_SIZE_VARIABLE,
_INT_TYPE, options)
for _, _, files in os.walk(root_dir):
for current_file in files:
@ -102,14 +101,12 @@ def _GenCFiles(root_dir, options):
prefix_length = len(_PREFIX)
length = len(current_file) - len(_EXTENSION)
label = current_file[prefix_length:length]
filtered_output, cert_size = _CreateCertSection(
root_dir, current_file, label, options)
filtered_output, cert_size = _CreateCertSection(root_dir, current_file,
label, options)
output_header_file.write(filtered_output + '\n\n\n')
if options.full_cert:
subject_name_list += _AddLabelToArray(
label, _SUBJECT_NAME_ARRAY)
public_key_list += _AddLabelToArray(
label, _PUBLIC_KEY_ARRAY)
subject_name_list += _AddLabelToArray(label, _SUBJECT_NAME_ARRAY)
public_key_list += _AddLabelToArray(label, _PUBLIC_KEY_ARRAY)
certificate_list += _AddLabelToArray(label, _CERTIFICATE_ARRAY)
certificate_size_list += (' %s,\n') % (cert_size)
@ -135,7 +132,7 @@ def _Cleanup(root_dir):
def _CreateCertSection(root_dir, source_file, label, options):
command = 'openssl x509 -in %s%s -noout -C' % (root_dir, source_file)
_PrintOutput(command, options)
output = commands.getstatusoutput(command)[1]
output = subprocess.getstatusoutput(command)[1]
renamed_output = output.replace('unsigned char XXX_',
'const unsigned char ' + label + '_')
filtered_output = ''
@ -155,8 +152,7 @@ def _CreateCertSection(root_dir, source_file, label, options):
def _CreateOutputHeader():
output = (
'/*\n'
output = ('/*\n'
' * Copyright 2004 The WebRTC Project Authors. All rights '
'reserved.\n'
' *\n'
@ -173,7 +169,7 @@ def _CreateOutputHeader():
'// This file is the root certificates in C form that are needed to'
' connect to\n// Google.\n\n'
'// It was generated with the following command line:\n'
'// > python tools_webrtc/sslroots/generate_sslroots.py'
'// > vpython3 tools_webrtc/sslroots/generate_sslroots.py'
'\n// https://pki.goog/roots.pem\n\n'
'// clang-format off\n'
'// Don\'t bother formatting generated code,\n'
@ -182,7 +178,7 @@ def _CreateOutputHeader():
def _CreateOutputFooter():
output = ('// clang-format on\n\n' '#endif // RTC_BASE_SSL_ROOTS_H_\n')
output = ('// clang-format on\n\n#endif // RTC_BASE_SSL_ROOTS_H_\n')
return output
@ -211,7 +207,7 @@ def _SafeName(original_file_name):
def _PrintOutput(output, options):
if options.verbose:
print output
print(output)
if __name__ == '__main__':

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python
#!/usr/bin/env vpython3
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -37,8 +38,7 @@ def _RemovePreviousUpdateBranch():
if active_branch == UPDATE_BRANCH_NAME:
active_branch = 'master'
if UPDATE_BRANCH_NAME in branches:
logging.info('Removing previous update branch (%s)',
UPDATE_BRANCH_NAME)
logging.info('Removing previous update branch (%s)', UPDATE_BRANCH_NAME)
subprocess.check_call(['git', 'checkout', active_branch])
subprocess.check_call(['git', 'branch', '-D', UPDATE_BRANCH_NAME])
logging.info('No branch to remove')
@ -46,9 +46,8 @@ def _RemovePreviousUpdateBranch():
def _GetLastAuthor():
"""Returns a string with the author of the last commit."""
author = subprocess.check_output(['git', 'log',
'-1',
'--pretty=format:"%an"']).splitlines()
author = subprocess.check_output(
['git', 'log', '-1', '--pretty=format:"%an"']).splitlines()
return author
@ -85,12 +84,8 @@ def _UpdateWebRTCVersion(filename):
# pylint: disable=line-too-long
new_content = re.sub(
r'WebRTC source stamp [0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}',
r'WebRTC source stamp %02d-%02d-%02dT%02d:%02d:%02d' % (d.year,
d.month,
d.day,
d.hour,
d.minute,
d.second),
r'WebRTC source stamp %02d-%02d-%02dT%02d:%02d:%02d' %
(d.year, d.month, d.day, d.hour, d.minute, d.second),
content,
flags=re.MULTILINE)
# pylint: enable=line-too-long
@ -109,12 +104,9 @@ def _LocalCommit():
logging.info('Committing changes locally.')
d = datetime.datetime.utcnow()
git_author = subprocess.check_output(['git', 'config',
'user.email']).strip()
commit_msg = ('Update WebRTC code version (%02d-%02d-%02dT%02d:%02d:%02d).'
'\n\nBug: None')
commit_msg = commit_msg % (d.year, d.month, d.day, d.hour, d.minute,
d.second)
commit_msg = commit_msg % (d.year, d.month, d.day, d.hour, d.minute, d.second)
subprocess.check_call(['git', 'add', '--update', '.'])
subprocess.check_call(['git', 'commit', '-m', commit_msg])
@ -127,8 +119,9 @@ def _UploadCL(commit_queue_mode):
- 1: Run trybots but do not submit to CQ.
- 0: Skip CQ, upload only.
"""
cmd = ['git', 'cl', 'upload', '--force', '--bypass-hooks',
'--bypass-watchlist']
cmd = [
'git', 'cl', 'upload', '--force', '--bypass-hooks', '--bypass-watchlist'
]
if commit_queue_mode >= 2:
logging.info('Sending the CL to the CQ...')
cmd.extend(['-o', 'label=Bot-Commit+1'])

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
@ -75,10 +77,6 @@ _EXTENSION_FLAGS = {
}
def PathExists(*args):
return os.path.exists(os.path.join(*args))
def FindWebrtcSrcFromFilename(filename):
"""Searches for the root of the WebRTC checkout.
@ -92,9 +90,9 @@ def FindWebrtcSrcFromFilename(filename):
"""
curdir = os.path.normpath(os.path.dirname(filename))
while not (os.path.basename(curdir) == 'src'
and PathExists(curdir, 'DEPS') and
(PathExists(curdir, '..', '.gclient')
or PathExists(curdir, '.git'))):
and os.path.exists(os.path.join(curdir, 'DEPS')) and
(os.path.exists(os.path.join(curdir, '..', '.gclient'))
or os.path.exists(os.path.join(curdir, '.git')))):
nextdir = os.path.normpath(os.path.join(curdir, '..'))
if nextdir == curdir:
return None
@ -248,8 +246,7 @@ def GetClangOptionsFromCommandLine(clang_commandline, out_dir,
elif flag.startswith('-std'):
clang_flags.append(flag)
elif flag.startswith('-') and flag[1] in 'DWFfmO':
if (flag == '-Wno-deprecated-register' or
flag == '-Wno-header-guard'):
if flag in ['-Wno-deprecated-register', '-Wno-header-guard']:
# These flags causes libclang (3.3) to crash. Remove it until
# things are fixed.
continue
@ -266,8 +263,7 @@ def GetClangOptionsFromCommandLine(clang_commandline, out_dir,
if sysroot_path.startswith('/'):
clang_flags.append(flag)
else:
abs_path = os.path.normpath(os.path.join(
out_dir, sysroot_path))
abs_path = os.path.normpath(os.path.join(out_dir, sysroot_path))
clang_flags.append('--sysroot=' + abs_path)
return clang_flags
@ -332,8 +328,7 @@ def GetClangOptionsFromNinjaForFilename(webrtc_root, filename):
if not clang_line:
return additional_flags
return GetClangOptionsFromCommandLine(clang_line, out_dir,
additional_flags)
return GetClangOptionsFromCommandLine(clang_line, out_dir, additional_flags)
def FlagsForFile(filename):
@ -349,8 +344,7 @@ def FlagsForFile(filename):
"""
abs_filename = os.path.abspath(filename)
webrtc_root = FindWebrtcSrcFromFilename(abs_filename)
clang_flags = GetClangOptionsFromNinjaForFilename(webrtc_root,
abs_filename)
clang_flags = GetClangOptionsFromNinjaForFilename(webrtc_root, abs_filename)
# If clang_flags could not be determined, then assume that was due to a
# transient failure. Preventing YCM from caching the flags allows us to