tools_webrtc dir converted to py3 + top level PRESUBMIT script

Bug: webrtc:13607
Change-Id: Ib018e43ea977cc24dd71048e68e3343741f7f31b
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/249083
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Harald Alvestrand <hta@webrtc.org>
Reviewed-by: Jeremy Leconte <jleconte@google.com>
Commit-Queue: Christoffer Jansson <jansson@google.com>
Cr-Commit-Position: refs/heads/main@{#35953}
This commit is contained in:
Christoffer Jansson 2022-02-08 09:01:12 +01:00 committed by WebRTC LUCI CQ
parent b5cba85c2f
commit 4e8a773b4b
50 changed files with 4570 additions and 4673 deletions

View file

@ -33,10 +33,16 @@ wheel: <
# Used by tools_webrtc/perf/webrtc_dashboard_upload.py. # Used by tools_webrtc/perf/webrtc_dashboard_upload.py.
wheel: < wheel: <
name: "infra/python/wheels/httplib2-py2_py3" name: "infra/python/wheels/httplib2-py3"
version: "version:0.10.3" version: "version:0.19.1"
> >
wheel: <
name: "infra/python/wheels/pyparsing-py2_py3"
version: "version:2.4.7"
>
# Used by: # Used by:
# build/toolchain/win # build/toolchain/win
wheel: < wheel: <

File diff suppressed because it is too large Load diff

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright 2017 The WebRTC project authors. All Rights Reserved. # Copyright 2017 The WebRTC project authors. All Rights Reserved.
# #
@ -8,6 +8,7 @@
# in the file PATENTS. All contributing project authors may # in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
from __future__ import absolute_import
import os import os
import shutil import shutil
import tempfile import tempfile
@ -20,145 +21,145 @@ from presubmit_test_mocks import MockInputApi, MockOutputApi, MockFile, MockChan
class CheckBugEntryFieldTest(unittest.TestCase): class CheckBugEntryFieldTest(unittest.TestCase):
def testCommitMessageBugEntryWithNoError(self): def testCommitMessageBugEntryWithNoError(self):
mock_input_api = MockInputApi() mock_input_api = MockInputApi()
mock_output_api = MockOutputApi() mock_output_api = MockOutputApi()
mock_input_api.change = MockChange([], ['webrtc:1234']) mock_input_api.change = MockChange([], ['webrtc:1234'])
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
mock_output_api) mock_output_api)
self.assertEqual(0, len(errors)) self.assertEqual(0, len(errors))
def testCommitMessageBugEntryReturnError(self): def testCommitMessageBugEntryReturnError(self):
mock_input_api = MockInputApi() mock_input_api = MockInputApi()
mock_output_api = MockOutputApi() mock_output_api = MockOutputApi()
mock_input_api.change = MockChange([], ['webrtc:1234', 'webrtc=4321']) mock_input_api.change = MockChange([], ['webrtc:1234', 'webrtc=4321'])
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
mock_output_api) mock_output_api)
self.assertEqual(1, len(errors)) self.assertEqual(1, len(errors))
self.assertEqual(('Bogus Bug entry: webrtc=4321. Please specify' self.assertEqual(('Bogus Bug entry: webrtc=4321. Please specify'
' the issue tracker prefix and the issue number,' ' the issue tracker prefix and the issue number,'
' separated by a colon, e.g. webrtc:123 or' ' separated by a colon, e.g. webrtc:123 or'
' chromium:12345.'), str(errors[0])) ' chromium:12345.'), str(errors[0]))
def testCommitMessageBugEntryWithoutPrefix(self): def testCommitMessageBugEntryWithoutPrefix(self):
mock_input_api = MockInputApi() mock_input_api = MockInputApi()
mock_output_api = MockOutputApi() mock_output_api = MockOutputApi()
mock_input_api.change = MockChange([], ['1234']) mock_input_api.change = MockChange([], ['1234'])
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
mock_output_api) mock_output_api)
self.assertEqual(1, len(errors)) self.assertEqual(1, len(errors))
self.assertEqual(('Bug entry requires issue tracker prefix, ' self.assertEqual(('Bug entry requires issue tracker prefix, '
'e.g. webrtc:1234'), str(errors[0])) 'e.g. webrtc:1234'), str(errors[0]))
def testCommitMessageBugEntryIsNone(self): def testCommitMessageBugEntryIsNone(self):
mock_input_api = MockInputApi() mock_input_api = MockInputApi()
mock_output_api = MockOutputApi() mock_output_api = MockOutputApi()
mock_input_api.change = MockChange([], ['None']) mock_input_api.change = MockChange([], ['None'])
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
mock_output_api) mock_output_api)
self.assertEqual(0, len(errors)) self.assertEqual(0, len(errors))
def testCommitMessageBugEntrySupportInternalBugReference(self): def testCommitMessageBugEntrySupportInternalBugReference(self):
mock_input_api = MockInputApi() mock_input_api = MockInputApi()
mock_output_api = MockOutputApi() mock_output_api = MockOutputApi()
mock_input_api.change.BUG = 'b/12345' mock_input_api.change.BUG = 'b/12345'
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
mock_output_api) mock_output_api)
self.assertEqual(0, len(errors)) self.assertEqual(0, len(errors))
mock_input_api.change.BUG = 'b/12345, webrtc:1234' mock_input_api.change.BUG = 'b/12345, webrtc:1234'
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api, errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
mock_output_api) mock_output_api)
self.assertEqual(0, len(errors)) self.assertEqual(0, len(errors))
class CheckNewlineAtTheEndOfProtoFilesTest(unittest.TestCase): class CheckNewlineAtTheEndOfProtoFilesTest(unittest.TestCase):
def setUp(self): def setUp(self):
self.tmp_dir = tempfile.mkdtemp() self.tmp_dir = tempfile.mkdtemp()
self.proto_file_path = os.path.join(self.tmp_dir, 'foo.proto') self.proto_file_path = os.path.join(self.tmp_dir, 'foo.proto')
self.input_api = MockInputApi() self.input_api = MockInputApi()
self.output_api = MockOutputApi() self.output_api = MockOutputApi()
def tearDown(self): def tearDown(self):
shutil.rmtree(self.tmp_dir, ignore_errors=True) shutil.rmtree(self.tmp_dir, ignore_errors=True)
def testErrorIfProtoFileDoesNotEndWithNewline(self): def testErrorIfProtoFileDoesNotEndWithNewline(self):
self._GenerateProtoWithoutNewlineAtTheEnd() self._GenerateProtoWithoutNewlineAtTheEnd()
self.input_api.files = [MockFile(self.proto_file_path)] self.input_api.files = [MockFile(self.proto_file_path)]
errors = PRESUBMIT.CheckNewlineAtTheEndOfProtoFiles( errors = PRESUBMIT.CheckNewlineAtTheEndOfProtoFiles(
self.input_api, self.output_api, lambda x: True) self.input_api, self.output_api, lambda x: True)
self.assertEqual(1, len(errors)) self.assertEqual(1, len(errors))
self.assertEqual( self.assertEqual(
'File %s must end with exactly one newline.' % 'File %s must end with exactly one newline.' % self.proto_file_path,
self.proto_file_path, str(errors[0])) str(errors[0]))
def testNoErrorIfProtoFileEndsWithNewline(self): def testNoErrorIfProtoFileEndsWithNewline(self):
self._GenerateProtoWithNewlineAtTheEnd() self._GenerateProtoWithNewlineAtTheEnd()
self.input_api.files = [MockFile(self.proto_file_path)] self.input_api.files = [MockFile(self.proto_file_path)]
errors = PRESUBMIT.CheckNewlineAtTheEndOfProtoFiles( errors = PRESUBMIT.CheckNewlineAtTheEndOfProtoFiles(
self.input_api, self.output_api, lambda x: True) self.input_api, self.output_api, lambda x: True)
self.assertEqual(0, len(errors)) self.assertEqual(0, len(errors))
def _GenerateProtoWithNewlineAtTheEnd(self): def _GenerateProtoWithNewlineAtTheEnd(self):
with open(self.proto_file_path, 'w') as f: with open(self.proto_file_path, 'w') as f:
f.write( f.write(
textwrap.dedent(""" textwrap.dedent("""
syntax = "proto2"; syntax = "proto2";
option optimize_for = LITE_RUNTIME; option optimize_for = LITE_RUNTIME;
package webrtc.audioproc; package webrtc.audioproc;
""")) """))
def _GenerateProtoWithoutNewlineAtTheEnd(self): def _GenerateProtoWithoutNewlineAtTheEnd(self):
with open(self.proto_file_path, 'w') as f: with open(self.proto_file_path, 'w') as f:
f.write( f.write(
textwrap.dedent(""" textwrap.dedent("""
syntax = "proto2"; syntax = "proto2";
option optimize_for = LITE_RUNTIME; option optimize_for = LITE_RUNTIME;
package webrtc.audioproc;""")) package webrtc.audioproc;"""))
class CheckNoMixingSourcesTest(unittest.TestCase): class CheckNoMixingSourcesTest(unittest.TestCase):
def setUp(self): def setUp(self):
self.tmp_dir = tempfile.mkdtemp() self.tmp_dir = tempfile.mkdtemp()
self.file_path = os.path.join(self.tmp_dir, 'BUILD.gn') self.file_path = os.path.join(self.tmp_dir, 'BUILD.gn')
self.input_api = MockInputApi() self.input_api = MockInputApi()
self.output_api = MockOutputApi() self.output_api = MockOutputApi()
def tearDown(self): def tearDown(self):
shutil.rmtree(self.tmp_dir, ignore_errors=True) shutil.rmtree(self.tmp_dir, ignore_errors=True)
def testErrorIfCAndCppAreMixed(self): def testErrorIfCAndCppAreMixed(self):
self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.cc', 'bar.h']) self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.cc', 'bar.h'])
def testErrorIfCAndObjCAreMixed(self): def testErrorIfCAndObjCAreMixed(self):
self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.m', 'bar.h']) self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.m', 'bar.h'])
def testErrorIfCAndObjCppAreMixed(self): def testErrorIfCAndObjCppAreMixed(self):
self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.mm', 'bar.h']) self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.mm', 'bar.h'])
def testErrorIfCppAndObjCAreMixed(self): def testErrorIfCppAndObjCAreMixed(self):
self._AssertNumberOfErrorsWithSources(1, ['foo.cc', 'bar.m', 'bar.h']) self._AssertNumberOfErrorsWithSources(1, ['foo.cc', 'bar.m', 'bar.h'])
def testErrorIfCppAndObjCppAreMixed(self): def testErrorIfCppAndObjCppAreMixed(self):
self._AssertNumberOfErrorsWithSources(1, ['foo.cc', 'bar.mm', 'bar.h']) self._AssertNumberOfErrorsWithSources(1, ['foo.cc', 'bar.mm', 'bar.h'])
def testNoErrorIfOnlyC(self): def testNoErrorIfOnlyC(self):
self._AssertNumberOfErrorsWithSources(0, ['foo.c', 'bar.c', 'bar.h']) self._AssertNumberOfErrorsWithSources(0, ['foo.c', 'bar.c', 'bar.h'])
def testNoErrorIfOnlyCpp(self): def testNoErrorIfOnlyCpp(self):
self._AssertNumberOfErrorsWithSources(0, ['foo.cc', 'bar.cc', 'bar.h']) self._AssertNumberOfErrorsWithSources(0, ['foo.cc', 'bar.cc', 'bar.h'])
def testNoErrorIfOnlyObjC(self): def testNoErrorIfOnlyObjC(self):
self._AssertNumberOfErrorsWithSources(0, ['foo.m', 'bar.m', 'bar.h']) self._AssertNumberOfErrorsWithSources(0, ['foo.m', 'bar.m', 'bar.h'])
def testNoErrorIfOnlyObjCpp(self): def testNoErrorIfOnlyObjCpp(self):
self._AssertNumberOfErrorsWithSources(0, ['foo.mm', 'bar.mm', 'bar.h']) self._AssertNumberOfErrorsWithSources(0, ['foo.mm', 'bar.mm', 'bar.h'])
def testNoErrorIfObjCAndObjCppAreMixed(self): def testNoErrorIfObjCAndObjCppAreMixed(self):
self._AssertNumberOfErrorsWithSources(0, ['foo.m', 'bar.mm', 'bar.h']) self._AssertNumberOfErrorsWithSources(0, ['foo.m', 'bar.mm', 'bar.h'])
def testNoErrorIfSourcesAreInExclusiveIfBranches(self): def testNoErrorIfSourcesAreInExclusiveIfBranches(self):
self._GenerateBuildFile( self._GenerateBuildFile(
textwrap.dedent(""" textwrap.dedent("""
rtc_library("bar_foo") { rtc_library("bar_foo") {
if (is_win) { if (is_win) {
sources = [ sources = [
@ -184,15 +185,15 @@ class CheckNoMixingSourcesTest(unittest.TestCase):
} }
} }
""")) """))
self.input_api.files = [MockFile(self.file_path)] self.input_api.files = [MockFile(self.file_path)]
errors = PRESUBMIT.CheckNoMixingSources(self.input_api, errors = PRESUBMIT.CheckNoMixingSources(self.input_api,
[MockFile(self.file_path)], [MockFile(self.file_path)],
self.output_api) self.output_api)
self.assertEqual(0, len(errors)) self.assertEqual(0, len(errors))
def testErrorIfSourcesAreNotInExclusiveIfBranches(self): def testErrorIfSourcesAreNotInExclusiveIfBranches(self):
self._GenerateBuildFile( self._GenerateBuildFile(
textwrap.dedent(""" textwrap.dedent("""
rtc_library("bar_foo") { rtc_library("bar_foo") {
if (is_win) { if (is_win) {
sources = [ sources = [
@ -224,23 +225,22 @@ class CheckNoMixingSourcesTest(unittest.TestCase):
} }
} }
""")) """))
self.input_api.files = [MockFile(self.file_path)] self.input_api.files = [MockFile(self.file_path)]
errors = PRESUBMIT.CheckNoMixingSources(self.input_api, errors = PRESUBMIT.CheckNoMixingSources(self.input_api,
[MockFile(self.file_path)], [MockFile(self.file_path)],
self.output_api) self.output_api)
self.assertEqual(1, len(errors)) self.assertEqual(1, len(errors))
self.assertTrue('bar.cc' in str(errors[0])) self.assertTrue('bar.cc' in str(errors[0]))
self.assertTrue('bar.mm' in str(errors[0])) self.assertTrue('bar.mm' in str(errors[0]))
self.assertTrue('foo.cc' in str(errors[0])) self.assertTrue('foo.cc' in str(errors[0]))
self.assertTrue('foo.mm' in str(errors[0])) self.assertTrue('foo.mm' in str(errors[0]))
self.assertTrue('bar.m' in str(errors[0])) self.assertTrue('bar.m' in str(errors[0]))
self.assertTrue('bar.c' in str(errors[0])) self.assertTrue('bar.c' in str(errors[0]))
def _AssertNumberOfErrorsWithSources(self, number_of_errors, sources): def _AssertNumberOfErrorsWithSources(self, number_of_errors, sources):
assert len( assert len(sources) == 3, 'This function accepts a list of 3 source files'
sources) == 3, 'This function accepts a list of 3 source files' self._GenerateBuildFile(
self._GenerateBuildFile( textwrap.dedent("""
textwrap.dedent("""
rtc_static_library("bar_foo") { rtc_static_library("bar_foo") {
sources = [ sources = [
"%s", "%s",
@ -256,84 +256,76 @@ class CheckNoMixingSourcesTest(unittest.TestCase):
], ],
} }
""" % (tuple(sources) * 2))) """ % (tuple(sources) * 2)))
self.input_api.files = [MockFile(self.file_path)] self.input_api.files = [MockFile(self.file_path)]
errors = PRESUBMIT.CheckNoMixingSources(self.input_api, errors = PRESUBMIT.CheckNoMixingSources(self.input_api,
[MockFile(self.file_path)], [MockFile(self.file_path)],
self.output_api) self.output_api)
self.assertEqual(number_of_errors, len(errors)) self.assertEqual(number_of_errors, len(errors))
if number_of_errors == 1: if number_of_errors == 1:
for source in sources: for source in sources:
if not source.endswith('.h'): if not source.endswith('.h'):
self.assertTrue(source in str(errors[0])) self.assertTrue(source in str(errors[0]))
def _GenerateBuildFile(self, content): def _GenerateBuildFile(self, content):
with open(self.file_path, 'w') as f: with open(self.file_path, 'w') as f:
f.write(content) f.write(content)
class CheckAssertUsageTest(unittest.TestCase): class CheckAssertUsageTest(unittest.TestCase):
def setUp(self): def setUp(self):
self.input_api = MockInputApi() self.input_api = MockInputApi()
self.output_api = MockOutputApi() self.output_api = MockOutputApi()
self._content_with_assert = [ self._content_with_assert = ['void Foo() {', ' assert(true);', '}']
'void Foo() {', self._content_without_assert = ['void Foo() {', ' RTC_CHECK(true);', '}']
' assert(true);',
'}'
]
self._content_without_assert = [
'void Foo() {',
' RTC_CHECK(true);',
'}'
]
def testDetectsAssertInCcFile(self): def testDetectsAssertInCcFile(self):
self.input_api.files = [ self.input_api.files = [
MockFile('with_assert.cc', self._content_with_assert), MockFile('with_assert.cc', self._content_with_assert),
MockFile('without_assert.cc', self._content_without_assert), MockFile('without_assert.cc', self._content_without_assert),
] ]
errors = PRESUBMIT.CheckAssertUsage( errors = PRESUBMIT.CheckAssertUsage(self.input_api,
self.input_api, self.output_api, lambda x: True) self.output_api, lambda x: True)
self.assertEqual(1, len(errors)) self.assertEqual(1, len(errors))
self.assertEqual('with_assert.cc', errors[0].items[0]) self.assertEqual('with_assert.cc', errors[0].items[0])
def testDetectsAssertInHeaderFile(self): def testDetectsAssertInHeaderFile(self):
self.input_api.files = [ self.input_api.files = [
MockFile('with_assert.h', self._content_with_assert), MockFile('with_assert.h', self._content_with_assert),
MockFile('without_assert.h', self._content_without_assert), MockFile('without_assert.h', self._content_without_assert),
] ]
errors = PRESUBMIT.CheckAssertUsage( errors = PRESUBMIT.CheckAssertUsage(self.input_api,
self.input_api, self.output_api, lambda x: True) self.output_api, lambda x: True)
self.assertEqual(1, len(errors)) self.assertEqual(1, len(errors))
self.assertEqual('with_assert.h', errors[0].items[0]) self.assertEqual('with_assert.h', errors[0].items[0])
def testDetectsAssertInObjCFile(self): def testDetectsAssertInObjCFile(self):
self.input_api.files = [ self.input_api.files = [
MockFile('with_assert.m', self._content_with_assert), MockFile('with_assert.m', self._content_with_assert),
MockFile('without_assert.m', self._content_without_assert), MockFile('without_assert.m', self._content_without_assert),
] ]
errors = PRESUBMIT.CheckAssertUsage( errors = PRESUBMIT.CheckAssertUsage(self.input_api,
self.input_api, self.output_api, lambda x: True) self.output_api, lambda x: True)
self.assertEqual(1, len(errors)) self.assertEqual(1, len(errors))
self.assertEqual('with_assert.m', errors[0].items[0]) self.assertEqual('with_assert.m', errors[0].items[0])
def testDetectsAssertInObjCppFile(self): def testDetectsAssertInObjCppFile(self):
self.input_api.files = [ self.input_api.files = [
MockFile('with_assert.mm', self._content_with_assert), MockFile('with_assert.mm', self._content_with_assert),
MockFile('without_assert.mm', self._content_without_assert), MockFile('without_assert.mm', self._content_without_assert),
] ]
errors = PRESUBMIT.CheckAssertUsage( errors = PRESUBMIT.CheckAssertUsage(self.input_api,
self.input_api, self.output_api, lambda x: True) self.output_api, lambda x: True)
self.assertEqual(1, len(errors)) self.assertEqual(1, len(errors))
self.assertEqual('with_assert.mm', errors[0].items[0]) self.assertEqual('with_assert.mm', errors[0].items[0])
def testDoesntDetectAssertInOtherFiles(self): def testDoesntDetectAssertInOtherFiles(self):
self.input_api.files = [ self.input_api.files = [
MockFile('with_assert.cpp', self._content_with_assert), MockFile('with_assert.cpp', self._content_with_assert),
] ]
errors = PRESUBMIT.CheckAssertUsage( errors = PRESUBMIT.CheckAssertUsage(self.input_api,
self.input_api, self.output_api, lambda x: True) self.output_api, lambda x: True)
self.assertEqual(0, len(errors)) self.assertEqual(0, len(errors))
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -9,135 +11,131 @@
# This file is inspired to [1]. # This file is inspired to [1].
# [1] - https://cs.chromium.org/chromium/src/PRESUBMIT_test_mocks.py # [1] - https://cs.chromium.org/chromium/src/PRESUBMIT_test_mocks.py
from __future__ import absolute_import
import os.path import os.path
import re import re
class MockInputApi(object): class MockInputApi:
"""Mock class for the InputApi class. """Mock class for the InputApi class.
This class can be used for unittests for presubmit by initializing the files This class can be used for unittests for presubmit by initializing the files
attribute as the list of changed files. attribute as the list of changed files.
""" """
def __init__(self): def __init__(self):
self.change = MockChange([], []) self.change = MockChange([], [])
self.files = [] self.files = []
self.presubmit_local_path = os.path.dirname(__file__) self.presubmit_local_path = os.path.dirname(__file__)
self.re = re # pylint: disable=invalid-name self.re = re # pylint: disable=invalid-name
def AffectedSourceFiles(self, file_filter=None): def AffectedSourceFiles(self, file_filter=None):
return self.AffectedFiles(file_filter=file_filter) return self.AffectedFiles(file_filter=file_filter)
def AffectedFiles(self, file_filter=None, include_deletes=False): def AffectedFiles(self, file_filter=None, include_deletes=False):
for f in self.files: for f in self.files:
if file_filter and not file_filter(f): if file_filter and not file_filter(f):
continue continue
if not include_deletes and f.Action() == 'D': if not include_deletes and f.Action() == 'D':
continue continue
yield f yield f
@classmethod @classmethod
def FilterSourceFile(cls, def FilterSourceFile(cls, affected_file, files_to_check=(), files_to_skip=()):
affected_file, # pylint: disable=unused-argument
files_to_check=(), return True
files_to_skip=()):
# pylint: disable=unused-argument
return True
def PresubmitLocalPath(self): def PresubmitLocalPath(self):
return self.presubmit_local_path return self.presubmit_local_path
def ReadFile(self, affected_file, mode='rU'): def ReadFile(self, affected_file, mode='r'):
filename = affected_file.AbsoluteLocalPath() filename = affected_file.AbsoluteLocalPath()
for f in self.files: for f in self.files:
if f.LocalPath() == filename: if f.LocalPath() == filename:
with open(filename, mode) as f: with open(filename, mode) as f:
return f.read() return f.read()
# Otherwise, file is not in our mock API. # Otherwise, file is not in our mock API.
raise IOError, "No such file or directory: '%s'" % filename raise IOError("No such file or directory: '%s'" % filename)
class MockOutputApi(object): class MockOutputApi:
"""Mock class for the OutputApi class. """Mock class for the OutputApi class.
An instance of this class can be passed to presubmit unittests for outputing An instance of this class can be passed to presubmit unittests for outputing
various types of results. various types of results.
""" """
class PresubmitResult(object): class PresubmitResult:
def __init__(self, message, items=None, long_text=''): def __init__(self, message, items=None, long_text=''):
self.message = message self.message = message
self.items = items self.items = items
self.long_text = long_text self.long_text = long_text
def __repr__(self): def __repr__(self):
return self.message return self.message
class PresubmitError(PresubmitResult): class PresubmitError(PresubmitResult):
def __init__(self, message, items=None, long_text=''): def __init__(self, message, items=None, long_text=''):
MockOutputApi.PresubmitResult.__init__(self, message, items, MockOutputApi.PresubmitResult.__init__(self, message, items, long_text)
long_text) self.type = 'error'
self.type = 'error'
class MockChange(object): class MockChange:
"""Mock class for Change class. """Mock class for Change class.
This class can be used in presubmit unittests to mock the query of the This class can be used in presubmit unittests to mock the query of the
current change. current change.
""" """
def __init__(self, changed_files, bugs_from_description, tags=None): def __init__(self, changed_files, bugs_from_description, tags=None):
self._changed_files = changed_files self._changed_files = changed_files
self._bugs_from_description = bugs_from_description self._bugs_from_description = bugs_from_description
self.tags = dict() if not tags else tags self.tags = dict() if not tags else tags
def BugsFromDescription(self): def BugsFromDescription(self):
return self._bugs_from_description return self._bugs_from_description
def __getattr__(self, attr): def __getattr__(self, attr):
"""Return tags directly as attributes on the object.""" """Return tags directly as attributes on the object."""
if not re.match(r"^[A-Z_]*$", attr): if not re.match(r"^[A-Z_]*$", attr):
raise AttributeError(self, attr) raise AttributeError(self, attr)
return self.tags.get(attr) return self.tags.get(attr)
class MockFile(object): class MockFile:
"""Mock class for the File class. """Mock class for the File class.
This class can be used to form the mock list of changed files in This class can be used to form the mock list of changed files in
MockInputApi for presubmit unittests. MockInputApi for presubmit unittests.
""" """
def __init__(self, def __init__(self,
local_path, local_path,
new_contents=None, new_contents=None,
old_contents=None, old_contents=None,
action='A'): action='A'):
if new_contents is None: if new_contents is None:
new_contents = ["Data"] new_contents = ["Data"]
self._local_path = local_path self._local_path = local_path
self._new_contents = new_contents self._new_contents = new_contents
self._changed_contents = [(i + 1, l) self._changed_contents = [(i + 1, l) for i, l in enumerate(new_contents)]
for i, l in enumerate(new_contents)] self._action = action
self._action = action self._old_contents = old_contents
self._old_contents = old_contents
def Action(self): def Action(self):
return self._action return self._action
def ChangedContents(self): def ChangedContents(self):
return self._changed_contents return self._changed_contents
def NewContents(self): def NewContents(self):
return self._new_contents return self._new_contents
def LocalPath(self): def LocalPath(self):
return self._local_path return self._local_path
def AbsoluteLocalPath(self): def AbsoluteLocalPath(self):
return self._local_path return self._local_path
def OldContents(self): def OldContents(self):
return self._old_contents return self._old_contents

View file

@ -28,6 +28,7 @@ disable=
exec-used, exec-used,
fixme, fixme,
import-error, import-error,
import-outside-toplevel,
missing-docstring, missing-docstring,
no-init, no-init,
no-member, no-member,

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -6,45 +8,48 @@
# in the file PATENTS. All contributing project authors may # in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
# Runs PRESUBMIT.py in py3 mode by git cl presubmit.
USE_PYTHON3 = True
def _LicenseHeader(input_api): def _LicenseHeader(input_api):
"""Returns the license header regexp.""" """Returns the license header regexp."""
# Accept any year number from 2003 to the current year # Accept any year number from 2003 to the current year
current_year = int(input_api.time.strftime('%Y')) current_year = int(input_api.time.strftime('%Y'))
allowed_years = (str(s) for s in reversed(xrange(2003, current_year + 1))) allowed_years = (str(s) for s in reversed(range(2003, current_year + 1)))
years_re = '(' + '|'.join(allowed_years) + ')' years_re = '(' + '|'.join(allowed_years) + ')'
license_header = ( license_header = (
r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. ' r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. '
r'All [Rr]ights [Rr]eserved\.\n' r'All [Rr]ights [Rr]eserved\.\n'
r'.*?\n' r'.*?\n'
r'.*? Use of this source code is governed by a BSD-style license\n' r'.*? Use of this source code is governed by a BSD-style license\n'
r'.*? that can be found in the LICENSE file in the root of the source\n' r'.*? that can be found in the LICENSE file in the root of the source\n'
r'.*? tree\. An additional intellectual property rights grant can be ' r'.*? tree\. An additional intellectual property rights grant can be '
r'found\n' r'found\n'
r'.*? in the file PATENTS\. All contributing project authors may\n' r'.*? in the file PATENTS\. All contributing project authors may\n'
r'.*? be found in the AUTHORS file in the root of the source tree\.\n' r'.*? be found in the AUTHORS file in the root of the source tree\.\n'
) % { ) % {
'year': years_re, 'year': years_re,
} }
return license_header return license_header
def _CommonChecks(input_api, output_api): def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit.""" """Checks common to both upload and commit."""
results = [] results = []
results.extend( results.extend(
input_api.canned_checks.CheckLicense(input_api, output_api, input_api.canned_checks.CheckLicense(input_api, output_api,
_LicenseHeader(input_api))) _LicenseHeader(input_api)))
return results return results
def CheckChangeOnUpload(input_api, output_api): def CheckChangeOnUpload(input_api, output_api):
results = [] results = []
results.extend(_CommonChecks(input_api, output_api)) results.extend(_CommonChecks(input_api, output_api))
return results return results
def CheckChangeOnCommit(input_api, output_api): def CheckChangeOnCommit(input_api, output_api):
results = [] results = []
results.extend(_CommonChecks(input_api, output_api)) results.extend(_CommonChecks(input_api, output_api))
return results return results

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
@ -51,172 +51,167 @@ import find_depot_tools
def _ParseArgs(): def _ParseArgs():
parser = argparse.ArgumentParser(description='libwebrtc.aar generator.') parser = argparse.ArgumentParser(description='libwebrtc.aar generator.')
parser.add_argument( parser.add_argument(
'--build-dir', '--build-dir',
type=os.path.abspath, type=os.path.abspath,
help='Build dir. By default will create and use temporary dir.') help='Build dir. By default will create and use temporary dir.')
parser.add_argument('--output', parser.add_argument('--output',
default='libwebrtc.aar', default='libwebrtc.aar',
type=os.path.abspath, type=os.path.abspath,
help='Output file of the script.') help='Output file of the script.')
parser.add_argument( parser.add_argument('--arch',
'--arch', default=DEFAULT_ARCHS,
default=DEFAULT_ARCHS, nargs='*',
nargs='*', help='Architectures to build. Defaults to %(default)s.')
help='Architectures to build. Defaults to %(default)s.') parser.add_argument('--use-goma',
parser.add_argument('--use-goma', action='store_true',
action='store_true', default=False,
default=False, help='Use goma.')
help='Use goma.') parser.add_argument('--verbose',
parser.add_argument('--verbose', action='store_true',
action='store_true', default=False,
default=False, help='Debug logging.')
help='Debug logging.') parser.add_argument(
parser.add_argument( '--extra-gn-args',
'--extra-gn-args', default=[],
default=[], nargs='*',
nargs='*', help="""Additional GN arguments to be used during Ninja generation.
help="""Additional GN arguments to be used during Ninja generation.
These are passed to gn inside `--args` switch and These are passed to gn inside `--args` switch and
applied after any other arguments and will applied after any other arguments and will
override any values defined by the script. override any values defined by the script.
Example of building debug aar file: Example of building debug aar file:
build_aar.py --extra-gn-args='is_debug=true'""") build_aar.py --extra-gn-args='is_debug=true'""")
parser.add_argument( parser.add_argument(
'--extra-ninja-switches', '--extra-ninja-switches',
default=[], default=[],
nargs='*', nargs='*',
help="""Additional Ninja switches to be used during compilation. help="""Additional Ninja switches to be used during compilation.
These are applied after any other Ninja switches. These are applied after any other Ninja switches.
Example of enabling verbose Ninja output: Example of enabling verbose Ninja output:
build_aar.py --extra-ninja-switches='-v'""") build_aar.py --extra-ninja-switches='-v'""")
parser.add_argument( parser.add_argument(
'--extra-gn-switches', '--extra-gn-switches',
default=[], default=[],
nargs='*', nargs='*',
help="""Additional GN switches to be used during compilation. help="""Additional GN switches to be used during compilation.
These are applied after any other GN switches. These are applied after any other GN switches.
Example of enabling verbose GN output: Example of enabling verbose GN output:
build_aar.py --extra-gn-switches='-v'""") build_aar.py --extra-gn-switches='-v'""")
return parser.parse_args() return parser.parse_args()
def _RunGN(args): def _RunGN(args):
cmd = [ cmd = [
sys.executable, sys.executable,
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py') os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py')
] ]
cmd.extend(args) cmd.extend(args)
logging.debug('Running: %r', cmd) logging.debug('Running: %r', cmd)
subprocess.check_call(cmd) subprocess.check_call(cmd)
def _RunNinja(output_directory, args): def _RunNinja(output_directory, args):
cmd = [ cmd = [
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja'), '-C', os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja'), '-C',
output_directory output_directory
] ]
cmd.extend(args) cmd.extend(args)
logging.debug('Running: %r', cmd) logging.debug('Running: %r', cmd)
subprocess.check_call(cmd) subprocess.check_call(cmd)
def _EncodeForGN(value): def _EncodeForGN(value):
"""Encodes value as a GN literal.""" """Encodes value as a GN literal."""
if isinstance(value, str): if isinstance(value, str):
return '"' + value + '"' return '"' + value + '"'
elif isinstance(value, bool): if isinstance(value, bool):
return repr(value).lower() return repr(value).lower()
else: return repr(value)
return repr(value)
def _GetOutputDirectory(build_dir, arch): def _GetOutputDirectory(build_dir, arch):
"""Returns the GN output directory for the target architecture.""" """Returns the GN output directory for the target architecture."""
return os.path.join(build_dir, arch) return os.path.join(build_dir, arch)
def _GetTargetCpu(arch): def _GetTargetCpu(arch):
"""Returns target_cpu for the GN build with the given architecture.""" """Returns target_cpu for the GN build with the given architecture."""
if arch in ['armeabi', 'armeabi-v7a']: if arch in ['armeabi', 'armeabi-v7a']:
return 'arm' return 'arm'
elif arch == 'arm64-v8a': if arch == 'arm64-v8a':
return 'arm64' return 'arm64'
elif arch == 'x86': if arch == 'x86':
return 'x86' return 'x86'
elif arch == 'x86_64': if arch == 'x86_64':
return 'x64' return 'x64'
else: raise Exception('Unknown arch: ' + arch)
raise Exception('Unknown arch: ' + arch)
def _GetArmVersion(arch): def _GetArmVersion(arch):
"""Returns arm_version for the GN build with the given architecture.""" """Returns arm_version for the GN build with the given architecture."""
if arch == 'armeabi': if arch == 'armeabi':
return 6 return 6
elif arch == 'armeabi-v7a': if arch == 'armeabi-v7a':
return 7 return 7
elif arch in ['arm64-v8a', 'x86', 'x86_64']: if arch in ['arm64-v8a', 'x86', 'x86_64']:
return None return None
else: raise Exception('Unknown arch: ' + arch)
raise Exception('Unknown arch: ' + arch)
def Build(build_dir, arch, use_goma, extra_gn_args, extra_gn_switches, def Build(build_dir, arch, use_goma, extra_gn_args, extra_gn_switches,
extra_ninja_switches): extra_ninja_switches):
"""Generates target architecture using GN and builds it using ninja.""" """Generates target architecture using GN and builds it using ninja."""
logging.info('Building: %s', arch) logging.info('Building: %s', arch)
output_directory = _GetOutputDirectory(build_dir, arch) output_directory = _GetOutputDirectory(build_dir, arch)
gn_args = { gn_args = {
'target_os': 'android', 'target_os': 'android',
'is_debug': False, 'is_debug': False,
'is_component_build': False, 'is_component_build': False,
'rtc_include_tests': False, 'rtc_include_tests': False,
'target_cpu': _GetTargetCpu(arch), 'target_cpu': _GetTargetCpu(arch),
'use_goma': use_goma 'use_goma': use_goma
} }
arm_version = _GetArmVersion(arch) arm_version = _GetArmVersion(arch)
if arm_version: if arm_version:
gn_args['arm_version'] = arm_version gn_args['arm_version'] = arm_version
gn_args_str = '--args=' + ' '.join( gn_args_str = '--args=' + ' '.join(
[k + '=' + _EncodeForGN(v) [k + '=' + _EncodeForGN(v) for k, v in gn_args.items()] + extra_gn_args)
for k, v in gn_args.items()] + extra_gn_args)
gn_args_list = ['gen', output_directory, gn_args_str] gn_args_list = ['gen', output_directory, gn_args_str]
gn_args_list.extend(extra_gn_switches) gn_args_list.extend(extra_gn_switches)
_RunGN(gn_args_list) _RunGN(gn_args_list)
ninja_args = TARGETS[:] ninja_args = TARGETS[:]
if use_goma: if use_goma:
ninja_args.extend(['-j', '200']) ninja_args.extend(['-j', '200'])
ninja_args.extend(extra_ninja_switches) ninja_args.extend(extra_ninja_switches)
_RunNinja(output_directory, ninja_args) _RunNinja(output_directory, ninja_args)
def CollectCommon(aar_file, build_dir, arch): def CollectCommon(aar_file, build_dir, arch):
"""Collects architecture independent files into the .aar-archive.""" """Collects architecture independent files into the .aar-archive."""
logging.info('Collecting common files.') logging.info('Collecting common files.')
output_directory = _GetOutputDirectory(build_dir, arch) output_directory = _GetOutputDirectory(build_dir, arch)
aar_file.write(MANIFEST_FILE, 'AndroidManifest.xml') aar_file.write(MANIFEST_FILE, 'AndroidManifest.xml')
aar_file.write(os.path.join(output_directory, JAR_FILE), 'classes.jar') aar_file.write(os.path.join(output_directory, JAR_FILE), 'classes.jar')
def Collect(aar_file, build_dir, arch): def Collect(aar_file, build_dir, arch):
"""Collects architecture specific files into the .aar-archive.""" """Collects architecture specific files into the .aar-archive."""
logging.info('Collecting: %s', arch) logging.info('Collecting: %s', arch)
output_directory = _GetOutputDirectory(build_dir, arch) output_directory = _GetOutputDirectory(build_dir, arch)
abi_dir = os.path.join('jni', arch) abi_dir = os.path.join('jni', arch)
for so_file in NEEDED_SO_FILES: for so_file in NEEDED_SO_FILES:
aar_file.write(os.path.join(output_directory, so_file), aar_file.write(os.path.join(output_directory, so_file),
os.path.join(abi_dir, so_file)) os.path.join(abi_dir, so_file))
def GenerateLicenses(output_dir, build_dir, archs): def GenerateLicenses(output_dir, build_dir, archs):
builder = LicenseBuilder( builder = LicenseBuilder(
[_GetOutputDirectory(build_dir, arch) for arch in archs], TARGETS) [_GetOutputDirectory(build_dir, arch) for arch in archs], TARGETS)
builder.GenerateLicenseText(output_dir) builder.GenerateLicenseText(output_dir)
def BuildAar(archs, def BuildAar(archs,
@ -226,35 +221,35 @@ def BuildAar(archs,
ext_build_dir=None, ext_build_dir=None,
extra_gn_switches=None, extra_gn_switches=None,
extra_ninja_switches=None): extra_ninja_switches=None):
extra_gn_args = extra_gn_args or [] extra_gn_args = extra_gn_args or []
extra_gn_switches = extra_gn_switches or [] extra_gn_switches = extra_gn_switches or []
extra_ninja_switches = extra_ninja_switches or [] extra_ninja_switches = extra_ninja_switches or []
build_dir = ext_build_dir if ext_build_dir else tempfile.mkdtemp() build_dir = ext_build_dir if ext_build_dir else tempfile.mkdtemp()
for arch in archs:
Build(build_dir, arch, use_goma, extra_gn_args, extra_gn_switches,
extra_ninja_switches)
with zipfile.ZipFile(output_file, 'w') as aar_file:
# Architecture doesn't matter here, arbitrarily using the first one.
CollectCommon(aar_file, build_dir, archs[0])
for arch in archs: for arch in archs:
Build(build_dir, arch, use_goma, extra_gn_args, extra_gn_switches, Collect(aar_file, build_dir, arch)
extra_ninja_switches)
with zipfile.ZipFile(output_file, 'w') as aar_file: license_dir = os.path.dirname(os.path.realpath(output_file))
# Architecture doesn't matter here, arbitrarily using the first one. GenerateLicenses(license_dir, build_dir, archs)
CollectCommon(aar_file, build_dir, archs[0])
for arch in archs:
Collect(aar_file, build_dir, arch)
license_dir = os.path.dirname(os.path.realpath(output_file)) if not ext_build_dir:
GenerateLicenses(license_dir, build_dir, archs) shutil.rmtree(build_dir, True)
if not ext_build_dir:
shutil.rmtree(build_dir, True)
def main(): def main():
args = _ParseArgs() args = _ParseArgs()
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
BuildAar(args.arch, args.output, args.use_goma, args.extra_gn_args, BuildAar(args.arch, args.output, args.use_goma, args.extra_gn_args,
args.build_dir, args.extra_gn_switches, args.extra_ninja_switches) args.build_dir, args.extra_gn_switches, args.extra_ninja_switches)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python3 #!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
@ -7,8 +7,7 @@
# tree. An additional intellectual property rights grant can be found # tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may # in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
"""Script for building and testing WebRTC AAR. """Script for building and testing WebRTC AAR."""
"""
import argparse import argparse
import logging import logging
@ -36,110 +35,109 @@ AAR_PROJECT_DIR = os.path.join(CHECKOUT_ROOT, 'examples/aarproject')
def _ParseArgs(): def _ParseArgs():
parser = argparse.ArgumentParser(description='Releases WebRTC on Bintray.') parser = argparse.ArgumentParser(description='Releases WebRTC on Bintray.')
parser.add_argument('--use-goma', parser.add_argument('--use-goma',
action='store_true', action='store_true',
default=False, default=False,
help='Use goma.') help='Use goma.')
parser.add_argument('--skip-tests', parser.add_argument('--skip-tests',
action='store_true', action='store_true',
default=False, default=False,
help='Skips running the tests.') help='Skips running the tests.')
parser.add_argument( parser.add_argument(
'--build-dir', '--build-dir',
default=None, default=None,
help='Temporary directory to store the build files. If not specified, ' help='Temporary directory to store the build files. If not specified, '
'a new directory will be created.') 'a new directory will be created.')
parser.add_argument('--verbose', parser.add_argument('--verbose',
action='store_true', action='store_true',
default=False, default=False,
help='Debug logging.') help='Debug logging.')
return parser.parse_args() return parser.parse_args()
def _GetCommitHash(): def _GetCommitHash():
commit_hash = subprocess.check_output( commit_hash = subprocess.check_output(
['git', 'rev-parse', 'HEAD'], cwd=CHECKOUT_ROOT).decode('UTF-8').strip() ['git', 'rev-parse', 'HEAD'], cwd=CHECKOUT_ROOT).decode('UTF-8').strip()
return commit_hash return commit_hash
def _GetCommitPos(): def _GetCommitPos():
commit_message = subprocess.check_output( commit_message = subprocess.check_output(
['git', 'rev-list', '--format=%B', '--max-count=1', 'HEAD'], ['git', 'rev-list', '--format=%B', '--max-count=1', 'HEAD'],
cwd=CHECKOUT_ROOT).decode('UTF-8') cwd=CHECKOUT_ROOT).decode('UTF-8')
commit_pos_match = re.search(COMMIT_POSITION_REGEX, commit_message, commit_pos_match = re.search(COMMIT_POSITION_REGEX, commit_message,
re.MULTILINE) re.MULTILINE)
if not commit_pos_match: if not commit_pos_match:
raise Exception('Commit position not found in the commit message: %s' % raise Exception('Commit position not found in the commit message: %s' %
commit_message) commit_message)
return commit_pos_match.group(1) return commit_pos_match.group(1)
def _TestAAR(build_dir): def _TestAAR(build_dir):
"""Runs AppRTCMobile tests using the AAR. Returns true if the tests pass.""" """Runs AppRTCMobile tests using the AAR. Returns true if the tests pass."""
logging.info('Testing library.') logging.info('Testing library.')
# Uninstall any existing version of AppRTCMobile. # Uninstall any existing version of AppRTCMobile.
logging.info( logging.info('Uninstalling previous AppRTCMobile versions. It is okay for '
'Uninstalling previous AppRTCMobile versions. It is okay for ' 'these commands to fail if AppRTCMobile is not installed.')
'these commands to fail if AppRTCMobile is not installed.') subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc'])
subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc']) subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc.test'])
subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc.test'])
# Run tests. # Run tests.
try: try:
# First clean the project. # First clean the project.
subprocess.check_call([GRADLEW_BIN, 'clean'], cwd=AAR_PROJECT_DIR) subprocess.check_call([GRADLEW_BIN, 'clean'], cwd=AAR_PROJECT_DIR)
# Then run the tests. # Then run the tests.
subprocess.check_call([ subprocess.check_call([
GRADLEW_BIN, GRADLEW_BIN, 'connectedDebugAndroidTest',
'connectedDebugAndroidTest', '-PaarDir=' + os.path.abspath(build_dir)
'-PaarDir=' + os.path.abspath(build_dir)], ],
cwd=AAR_PROJECT_DIR) cwd=AAR_PROJECT_DIR)
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
logging.exception('Test failure.') logging.exception('Test failure.')
return False # Clean or tests failed return False # Clean or tests failed
return True # Tests pass return True # Tests pass
def BuildAndTestAar(use_goma, skip_tests, build_dir): def BuildAndTestAar(use_goma, skip_tests, build_dir):
version = '1.0.' + _GetCommitPos() version = '1.0.' + _GetCommitPos()
commit = _GetCommitHash() commit = _GetCommitHash()
logging.info( logging.info('Building and Testing AAR version %s with hash %s', version,
'Building and Testing AAR version %s with hash %s', version, commit) commit)
# If build directory is not specified, create a temporary directory. # If build directory is not specified, create a temporary directory.
use_tmp_dir = not build_dir use_tmp_dir = not build_dir
if use_tmp_dir:
build_dir = tempfile.mkdtemp()
try:
base_name = ARTIFACT_ID + '-' + version
aar_file = os.path.join(build_dir, base_name + '.aar')
logging.info('Building at %s', build_dir)
BuildAar(ARCHS,
aar_file,
use_goma=use_goma,
ext_build_dir=os.path.join(build_dir, 'aar-build'))
tests_pass = skip_tests or _TestAAR(build_dir)
if not tests_pass:
raise Exception('Test failure.')
logging.info('Test success.')
finally:
if use_tmp_dir: if use_tmp_dir:
build_dir = tempfile.mkdtemp() shutil.rmtree(build_dir, True)
try:
base_name = ARTIFACT_ID + '-' + version
aar_file = os.path.join(build_dir, base_name + '.aar')
logging.info('Building at %s', build_dir)
BuildAar(ARCHS,
aar_file,
use_goma=use_goma,
ext_build_dir=os.path.join(build_dir, 'aar-build'))
tests_pass = skip_tests or _TestAAR(build_dir)
if not tests_pass:
raise Exception('Test failure.')
logging.info('Test success.')
finally:
if use_tmp_dir:
shutil.rmtree(build_dir, True)
def main(): def main():
args = _ParseArgs() args = _ParseArgs()
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
BuildAndTestAar(args.use_goma, args.skip_tests, args.build_dir) BuildAndTestAar(args.use_goma, args.skip_tests, args.build_dir)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -12,36 +13,34 @@ import re
import sys import sys
def replace_double_quote(line): def _ReplaceDoubleQuote(line):
re_rtc_import = re.compile( re_rtc_import = re.compile(r'(\s*)#import\s+"(\S+/|)(\w+\+|)RTC(\w+)\.h"(.*)',
r'(\s*)#import\s+"(\S+/|)(\w+\+|)RTC(\w+)\.h"(.*)', re.DOTALL) re.DOTALL)
match = re_rtc_import.match(line) match = re_rtc_import.match(line)
if not match: if not match:
return line return line
return '%s#import <WebRTC/%sRTC%s.h>%s' % (match.group(1), match.group(3), return '%s#import <WebRTC/%sRTC%s.h>%s' % (match.group(1), match.group(3),
match.group(4), match.group(5)) match.group(4), match.group(5))
def process(input_file, output_file): def Process(input_file, output_file):
with open(input_file, 'rb') as fb, open(output_file, 'wb') as fw: with open(input_file, 'rb') as fb, open(output_file, 'wb') as fw:
for line in fb.read().decode('UTF-8').splitlines(): for line in fb.read().decode('UTF-8').splitlines():
fw.write(replace_double_quote(line).encode('UTF-8')) fw.write(_ReplaceDoubleQuote(line).encode('UTF-8'))
fw.write(b"\n") fw.write(b"\n")
def main(): def main():
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description= description=
"Copy headers of framework and replace double-quoted includes to" + "Copy headers of framework and replace double-quoted includes to" +
" angle-bracketed respectively.") " angle-bracketed respectively.")
parser.add_argument('--input', parser.add_argument('--input', help='Input header files to copy.', type=str)
help='Input header files to copy.', parser.add_argument('--output', help='Output file.', type=str)
type=str) parsed_args = parser.parse_args()
parser.add_argument('--output', help='Output file.', type=str) return Process(parsed_args.input, parsed_args.output)
parsed_args = parser.parse_args()
return process(parsed_args.input, parsed_args.output)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -8,28 +9,27 @@
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
import unittest import unittest
from copy_framework_header import replace_double_quote from copy_framework_header import _ReplaceDoubleQuote
class TestCopyFramework(unittest.TestCase): class TestCopyFramework(unittest.TestCase):
def testReplaceDoubleQuote(self): def testReplaceDoubleQuote(self):
self.assertEqual(replace_double_quote("""#import "RTCMacros.h\""""), self.assertEqual(_ReplaceDoubleQuote("""#import "RTCMacros.h\""""),
"""#import <WebRTC/RTCMacros.h>""") """#import <WebRTC/RTCMacros.h>""")
self.assertEqual(replace_double_quote("""#import "RTCMacros.h\"\n"""), self.assertEqual(_ReplaceDoubleQuote("""#import "RTCMacros.h\"\n"""),
"""#import <WebRTC/RTCMacros.h>\n""") """#import <WebRTC/RTCMacros.h>\n""")
self.assertEqual( self.assertEqual(
replace_double_quote("""#import "UIDevice+RTCDevice.h\"\n"""), _ReplaceDoubleQuote("""#import "UIDevice+RTCDevice.h\"\n"""),
"""#import <WebRTC/UIDevice+RTCDevice.h>\n""") """#import <WebRTC/UIDevice+RTCDevice.h>\n""")
self.assertEqual( self.assertEqual(
replace_double_quote("#import \"components/video_codec/" + _ReplaceDoubleQuote("#import \"components/video_codec/" +
"RTCVideoDecoderFactoryH264.h\"\n"), "RTCVideoDecoderFactoryH264.h\"\n"),
"""#import <WebRTC/RTCVideoDecoderFactoryH264.h>\n""") """#import <WebRTC/RTCVideoDecoderFactoryH264.h>\n""")
self.assertEqual( self.assertEqual(
replace_double_quote( _ReplaceDoubleQuote(
"""@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *\n""" """@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *\n"""),
), """@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *\n""")
"""@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *\n""")
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

File diff suppressed because it is too large Load diff

View file

@ -1,4 +1,5 @@
#!/usr/bin/env vpython #!/usr/bin/env vpython3
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -7,7 +8,6 @@
# in the file PATENTS. All contributing project authors may # in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
from __future__ import absolute_import
import glob import glob
import os import os
@ -15,16 +15,11 @@ import shutil
import sys import sys
import tempfile import tempfile
import unittest import unittest
import mock
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PARENT_DIR = os.path.join(SCRIPT_DIR, os.pardir) PARENT_DIR = os.path.join(SCRIPT_DIR, os.pardir)
sys.path.append(PARENT_DIR) sys.path.append(PARENT_DIR)
# Workaround for the presubmit, plan only to run in py3 now.
# TODO(webrtc:13418) Remove when py2 presubmit is gone.
if sys.version_info >= (3, 3):
from unittest import mock
else:
import mock
import roll_deps import roll_deps
from roll_deps import CalculateChangedDeps, FindAddedDeps, \ from roll_deps import CalculateChangedDeps, FindAddedDeps, \
@ -54,293 +49,284 @@ NO_CHROMIUM_REVISION_UPDATE = ChromiumRevisionUpdate('cafe', 'cafe')
class TestError(Exception): class TestError(Exception):
pass pass
class FakeCmd(object): class FakeCmd:
def __init__(self): def __init__(self):
self.expectations = [] self.expectations = []
def AddExpectation(self, *args, **kwargs): def AddExpectation(self, *args, **kwargs):
returns = kwargs.pop('_returns', None) returns = kwargs.pop('_returns', None)
ignores = kwargs.pop('_ignores', []) ignores = kwargs.pop('_ignores', [])
self.expectations.append((args, kwargs, returns, ignores)) self.expectations.append((args, kwargs, returns, ignores))
def __call__(self, *args, **kwargs): def __call__(self, *args, **kwargs):
if not self.expectations: if not self.expectations:
raise TestError('Got unexpected\n%s\n%s' % (args, kwargs)) raise TestError('Got unexpected\n%s\n%s' % (args, kwargs))
exp_args, exp_kwargs, exp_returns, ignores = self.expectations.pop(0) exp_args, exp_kwargs, exp_returns, ignores = self.expectations.pop(0)
for item in ignores: for item in ignores:
kwargs.pop(item, None) kwargs.pop(item, None)
if args != exp_args or kwargs != exp_kwargs: if args != exp_args or kwargs != exp_kwargs:
message = 'Expected:\n args: %s\n kwargs: %s\n' % (exp_args, message = 'Expected:\n args: %s\n kwargs: %s\n' % (exp_args, exp_kwargs)
exp_kwargs) message += 'Got:\n args: %s\n kwargs: %s\n' % (args, kwargs)
message += 'Got:\n args: %s\n kwargs: %s\n' % (args, kwargs) raise TestError(message)
raise TestError(message) return exp_returns
return exp_returns
class NullCmd(object): class NullCmd:
"""No-op mock when calls mustn't be checked. """ """No-op mock when calls mustn't be checked. """
def __call__(self, *args, **kwargs): def __call__(self, *args, **kwargs):
# Empty stdout and stderr. # Empty stdout and stderr.
return None, None return None, None
class TestRollChromiumRevision(unittest.TestCase): class TestRollChromiumRevision(unittest.TestCase):
def setUp(self): def setUp(self):
self._output_dir = tempfile.mkdtemp() self._output_dir = tempfile.mkdtemp()
test_data_dir = os.path.join(SCRIPT_DIR, 'testdata', 'roll_deps') test_data_dir = os.path.join(SCRIPT_DIR, 'testdata', 'roll_deps')
for test_file in glob.glob(os.path.join(test_data_dir, '*')): for test_file in glob.glob(os.path.join(test_data_dir, '*')):
shutil.copy(test_file, self._output_dir) shutil.copy(test_file, self._output_dir)
join = lambda f: os.path.join(self._output_dir, f) join = lambda f: os.path.join(self._output_dir, f)
self._webrtc_depsfile = join('DEPS') self._webrtc_depsfile = join('DEPS')
self._new_cr_depsfile = join('DEPS.chromium.new') self._new_cr_depsfile = join('DEPS.chromium.new')
self._webrtc_depsfile_android = join('DEPS.with_android_deps') self._webrtc_depsfile_android = join('DEPS.with_android_deps')
self._new_cr_depsfile_android = join('DEPS.chromium.with_android_deps') self._new_cr_depsfile_android = join('DEPS.chromium.with_android_deps')
self.fake = FakeCmd() self.fake = FakeCmd()
def tearDown(self): def tearDown(self):
shutil.rmtree(self._output_dir, ignore_errors=True) shutil.rmtree(self._output_dir, ignore_errors=True)
self.assertEqual(self.fake.expectations, []) self.assertEqual(self.fake.expectations, [])
def testVarLookup(self): def testVarLookup(self):
local_scope = {'foo': 'wrong', 'vars': {'foo': 'bar'}} local_scope = {'foo': 'wrong', 'vars': {'foo': 'bar'}}
lookup = roll_deps.VarLookup(local_scope) lookup = roll_deps.VarLookup(local_scope)
self.assertEqual(lookup('foo'), 'bar') self.assertEqual(lookup('foo'), 'bar')
def testUpdateDepsFile(self): def testUpdateDepsFile(self):
new_rev = 'aaaaabbbbbcccccdddddeeeeefffff0000011111' new_rev = 'aaaaabbbbbcccccdddddeeeeefffff0000011111'
current_rev = TEST_DATA_VARS['chromium_revision'] current_rev = TEST_DATA_VARS['chromium_revision']
with open(self._new_cr_depsfile_android) as deps_file: with open(self._new_cr_depsfile_android) as deps_file:
new_cr_contents = deps_file.read() new_cr_contents = deps_file.read()
UpdateDepsFile(self._webrtc_depsfile, UpdateDepsFile(self._webrtc_depsfile,
ChromiumRevisionUpdate(current_rev, new_rev), [], ChromiumRevisionUpdate(current_rev, new_rev), [],
new_cr_contents) new_cr_contents)
with open(self._webrtc_depsfile) as deps_file: with open(self._webrtc_depsfile) as deps_file:
deps_contents = deps_file.read() deps_contents = deps_file.read()
self.assertTrue( self.assertTrue(new_rev in deps_contents,
new_rev in deps_contents, 'Failed to find %s in\n%s' % (new_rev, deps_contents))
'Failed to find %s in\n%s' % (new_rev, deps_contents))
def _UpdateDepsSetup(self): def _UpdateDepsSetup(self):
with open(self._webrtc_depsfile_android) as deps_file: with open(self._webrtc_depsfile_android) as deps_file:
webrtc_contents = deps_file.read() webrtc_contents = deps_file.read()
with open(self._new_cr_depsfile_android) as deps_file: with open(self._new_cr_depsfile_android) as deps_file:
new_cr_contents = deps_file.read() new_cr_contents = deps_file.read()
webrtc_deps = ParseDepsDict(webrtc_contents) webrtc_deps = ParseDepsDict(webrtc_contents)
new_cr_deps = ParseDepsDict(new_cr_contents) new_cr_deps = ParseDepsDict(new_cr_contents)
changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
with mock.patch('roll_deps._RunCommand', NullCmd()): with mock.patch('roll_deps._RunCommand', NullCmd()):
UpdateDepsFile(self._webrtc_depsfile_android, UpdateDepsFile(self._webrtc_depsfile_android, NO_CHROMIUM_REVISION_UPDATE,
NO_CHROMIUM_REVISION_UPDATE, changed_deps, changed_deps, new_cr_contents)
new_cr_contents)
with open(self._webrtc_depsfile_android) as deps_file: with open(self._webrtc_depsfile_android) as deps_file:
updated_contents = deps_file.read() updated_contents = deps_file.read()
return webrtc_contents, updated_contents return webrtc_contents, updated_contents
def testUpdateAndroidGeneratedDeps(self): def testUpdateAndroidGeneratedDeps(self):
_, updated_contents = self._UpdateDepsSetup() _, updated_contents = self._UpdateDepsSetup()
changed = 'third_party/android_deps/libs/android_arch_core_common' changed = 'third_party/android_deps/libs/android_arch_core_common'
changed_version = '1.0.0-cr0' changed_version = '1.0.0-cr0'
self.assertTrue(changed in updated_contents) self.assertTrue(changed in updated_contents)
self.assertTrue(changed_version in updated_contents) self.assertTrue(changed_version in updated_contents)
def testAddAndroidGeneratedDeps(self): def testAddAndroidGeneratedDeps(self):
webrtc_contents, updated_contents = self._UpdateDepsSetup() webrtc_contents, updated_contents = self._UpdateDepsSetup()
added = 'third_party/android_deps/libs/android_arch_lifecycle_common' added = 'third_party/android_deps/libs/android_arch_lifecycle_common'
self.assertFalse(added in webrtc_contents) self.assertFalse(added in webrtc_contents)
self.assertTrue(added in updated_contents) self.assertTrue(added in updated_contents)
def testRemoveAndroidGeneratedDeps(self): def testRemoveAndroidGeneratedDeps(self):
webrtc_contents, updated_contents = self._UpdateDepsSetup() webrtc_contents, updated_contents = self._UpdateDepsSetup()
removed = 'third_party/android_deps/libs/android_arch_lifecycle_runtime' removed = 'third_party/android_deps/libs/android_arch_lifecycle_runtime'
self.assertTrue(removed in webrtc_contents) self.assertTrue(removed in webrtc_contents)
self.assertFalse(removed in updated_contents) self.assertFalse(removed in updated_contents)
def testParseDepsDict(self): def testParseDepsDict(self):
with open(self._webrtc_depsfile) as deps_file: with open(self._webrtc_depsfile) as deps_file:
deps_contents = deps_file.read() deps_contents = deps_file.read()
local_scope = ParseDepsDict(deps_contents) local_scope = ParseDepsDict(deps_contents)
vars_dict = local_scope['vars'] vars_dict = local_scope['vars']
def AssertVar(variable_name): def AssertVar(variable_name):
self.assertEqual(vars_dict[variable_name], self.assertEqual(vars_dict[variable_name], TEST_DATA_VARS[variable_name])
TEST_DATA_VARS[variable_name])
AssertVar('chromium_git') AssertVar('chromium_git')
AssertVar('chromium_revision') AssertVar('chromium_revision')
self.assertEqual(len(local_scope['deps']), 3) self.assertEqual(len(local_scope['deps']), 3)
self.assertEqual(len(local_scope['deps_os']), 1) self.assertEqual(len(local_scope['deps_os']), 1)
def testGetMatchingDepsEntriesReturnsPathInSimpleCase(self): def testGetMatchingDepsEntriesReturnsPathInSimpleCase(self):
entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing/gtest') entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing/gtest')
self.assertEqual(len(entries), 1) self.assertEqual(len(entries), 1)
self.assertEqual(entries[0], DEPS_ENTRIES['src/testing/gtest']) self.assertEqual(entries[0], DEPS_ENTRIES['src/testing/gtest'])
def testGetMatchingDepsEntriesHandlesSimilarStartingPaths(self): def testGetMatchingDepsEntriesHandlesSimilarStartingPaths(self):
entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing') entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing')
self.assertEqual(len(entries), 2) self.assertEqual(len(entries), 2)
def testGetMatchingDepsEntriesHandlesTwoPathsWithIdenticalFirstParts(self): def testGetMatchingDepsEntriesHandlesTwoPathsWithIdenticalFirstParts(self):
entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/build') entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/build')
self.assertEqual(len(entries), 1) self.assertEqual(len(entries), 1)
def testCalculateChangedDeps(self): def testCalculateChangedDeps(self):
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile) webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile)
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile) new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile)
with mock.patch('roll_deps._RunCommand', self.fake): with mock.patch('roll_deps._RunCommand', self.fake):
_SetupGitLsRemoteCall( _SetupGitLsRemoteCall(
self.fake, self.fake, 'https://chromium.googlesource.com/chromium/src/build',
'https://chromium.googlesource.com/chromium/src/build', BUILD_NEW_REV)
BUILD_NEW_REV) changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
self.assertEqual(len(changed_deps), 3) self.assertEqual(len(changed_deps), 3)
self.assertEqual(changed_deps[0].path, 'src/build') self.assertEqual(changed_deps[0].path, 'src/build')
self.assertEqual(changed_deps[0].current_rev, BUILD_OLD_REV) self.assertEqual(changed_deps[0].current_rev, BUILD_OLD_REV)
self.assertEqual(changed_deps[0].new_rev, BUILD_NEW_REV) self.assertEqual(changed_deps[0].new_rev, BUILD_NEW_REV)
self.assertEqual(changed_deps[1].path, 'src/buildtools/linux64') self.assertEqual(changed_deps[1].path, 'src/buildtools/linux64')
self.assertEqual(changed_deps[1].package, 'gn/gn/linux-amd64') self.assertEqual(changed_deps[1].package, 'gn/gn/linux-amd64')
self.assertEqual(changed_deps[1].current_version, self.assertEqual(changed_deps[1].current_version,
'git_revision:69ec4fca1fa69ddadae13f9e6b7507efa0675263') 'git_revision:69ec4fca1fa69ddadae13f9e6b7507efa0675263')
self.assertEqual(changed_deps[1].new_version, self.assertEqual(changed_deps[1].new_version, 'git_revision:new-revision')
'git_revision:new-revision')
self.assertEqual(changed_deps[2].path, 'src/third_party/depot_tools') self.assertEqual(changed_deps[2].path, 'src/third_party/depot_tools')
self.assertEqual(changed_deps[2].current_rev, DEPOTTOOLS_OLD_REV) self.assertEqual(changed_deps[2].current_rev, DEPOTTOOLS_OLD_REV)
self.assertEqual(changed_deps[2].new_rev, DEPOTTOOLS_NEW_REV) self.assertEqual(changed_deps[2].new_rev, DEPOTTOOLS_NEW_REV)
def testWithDistinctDeps(self): def testWithDistinctDeps(self):
"""Check CalculateChangedDeps works when deps are added/removed.""" """Check CalculateChangedDeps works when deps are added/removed."""
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android)
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
self.assertEqual(len(changed_deps), 1) self.assertEqual(len(changed_deps), 1)
self.assertEqual( self.assertEqual(
changed_deps[0].path, changed_deps[0].path,
'src/third_party/android_deps/libs/android_arch_core_common') 'src/third_party/android_deps/libs/android_arch_core_common')
self.assertEqual( self.assertEqual(
changed_deps[0].package, changed_deps[0].package,
'chromium/third_party/android_deps/libs/android_arch_core_common') 'chromium/third_party/android_deps/libs/android_arch_core_common')
self.assertEqual(changed_deps[0].current_version, 'version:0.9.0') self.assertEqual(changed_deps[0].current_version, 'version:0.9.0')
self.assertEqual(changed_deps[0].new_version, 'version:1.0.0-cr0') self.assertEqual(changed_deps[0].new_version, 'version:1.0.0-cr0')
def testFindAddedDeps(self): def testFindAddedDeps(self):
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android)
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
added_android_paths, other_paths = FindAddedDeps( added_android_paths, other_paths = FindAddedDeps(webrtc_deps, new_cr_deps)
webrtc_deps, new_cr_deps) self.assertEqual(
self.assertEqual(added_android_paths, [ added_android_paths,
'src/third_party/android_deps/libs/android_arch_lifecycle_common' ['src/third_party/android_deps/libs/android_arch_lifecycle_common'])
]) self.assertEqual(other_paths, [])
self.assertEqual(other_paths, [])
def testFindRemovedDeps(self): def testFindRemovedDeps(self):
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android)
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
removed_android_paths, other_paths = FindRemovedDeps( removed_android_paths, other_paths = FindRemovedDeps(
webrtc_deps, new_cr_deps) webrtc_deps, new_cr_deps)
self.assertEqual(removed_android_paths, [ self.assertEqual(
'src/third_party/android_deps/libs/android_arch_lifecycle_runtime' removed_android_paths,
]) ['src/third_party/android_deps/libs/android_arch_lifecycle_runtime'])
self.assertEqual(other_paths, []) self.assertEqual(other_paths, [])
def testMissingDepsIsDetected(self): def testMissingDepsIsDetected(self):
"""Check error is reported when deps cannot be automatically removed.""" """Check error is reported when deps cannot be automatically removed."""
# The situation at test is the following: # The situation at test is the following:
# * A WebRTC DEPS entry is missing from Chromium. # * A WebRTC DEPS entry is missing from Chromium.
# * The dependency isn't an android_deps (those are supported). # * The dependency isn't an android_deps (those are supported).
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile) webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile)
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
_, other_paths = FindRemovedDeps(webrtc_deps, new_cr_deps) _, other_paths = FindRemovedDeps(webrtc_deps, new_cr_deps)
self.assertEqual( self.assertEqual(other_paths,
other_paths, ['src/buildtools/linux64', 'src/third_party/depot_tools'])
['src/buildtools/linux64', 'src/third_party/depot_tools'])
def testExpectedDepsIsNotReportedMissing(self): def testExpectedDepsIsNotReportedMissing(self):
"""Some deps musn't be seen as missing, even if absent from Chromium.""" """Some deps musn't be seen as missing, even if absent from Chromium."""
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile) webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile)
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
removed_android_paths, other_paths = FindRemovedDeps( removed_android_paths, other_paths = FindRemovedDeps(
webrtc_deps, new_cr_deps) webrtc_deps, new_cr_deps)
self.assertTrue('src/build' not in removed_android_paths) self.assertTrue('src/build' not in removed_android_paths)
self.assertTrue('src/build' not in other_paths) self.assertTrue('src/build' not in other_paths)
def _CommitMessageSetup(self): def _CommitMessageSetup(self):
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android) webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android)
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android) new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps) changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
added_paths, _ = FindAddedDeps(webrtc_deps, new_cr_deps) added_paths, _ = FindAddedDeps(webrtc_deps, new_cr_deps)
removed_paths, _ = FindRemovedDeps(webrtc_deps, new_cr_deps) removed_paths, _ = FindRemovedDeps(webrtc_deps, new_cr_deps)
current_commit_pos = 'cafe' current_commit_pos = 'cafe'
new_commit_pos = 'f00d' new_commit_pos = 'f00d'
commit_msg = GenerateCommitMessage(NO_CHROMIUM_REVISION_UPDATE, commit_msg = GenerateCommitMessage(NO_CHROMIUM_REVISION_UPDATE,
current_commit_pos, current_commit_pos, new_commit_pos,
new_commit_pos, changed_deps, changed_deps, added_paths, removed_paths)
added_paths, removed_paths)
return [l.strip() for l in commit_msg.split('\n')] return [l.strip() for l in commit_msg.split('\n')]
def testChangedDepsInCommitMessage(self): def testChangedDepsInCommitMessage(self):
commit_lines = self._CommitMessageSetup() commit_lines = self._CommitMessageSetup()
changed = '* src/third_party/android_deps/libs/' \ changed = '* src/third_party/android_deps/libs/' \
'android_arch_core_common: version:0.9.0..version:1.0.0-cr0' 'android_arch_core_common: version:0.9.0..version:1.0.0-cr0'
self.assertTrue(changed in commit_lines) self.assertTrue(changed in commit_lines)
# Check it is in adequate section. # Check it is in adequate section.
changed_line = commit_lines.index(changed) changed_line = commit_lines.index(changed)
self.assertTrue('Changed' in commit_lines[changed_line - 1]) self.assertTrue('Changed' in commit_lines[changed_line - 1])
def testAddedDepsInCommitMessage(self): def testAddedDepsInCommitMessage(self):
commit_lines = self._CommitMessageSetup() commit_lines = self._CommitMessageSetup()
added = '* src/third_party/android_deps/libs/' \ added = '* src/third_party/android_deps/libs/' \
'android_arch_lifecycle_common' 'android_arch_lifecycle_common'
self.assertTrue(added in commit_lines) self.assertTrue(added in commit_lines)
# Check it is in adequate section. # Check it is in adequate section.
added_line = commit_lines.index(added) added_line = commit_lines.index(added)
self.assertTrue('Added' in commit_lines[added_line - 1]) self.assertTrue('Added' in commit_lines[added_line - 1])
def testRemovedDepsInCommitMessage(self): def testRemovedDepsInCommitMessage(self):
commit_lines = self._CommitMessageSetup() commit_lines = self._CommitMessageSetup()
removed = '* src/third_party/android_deps/libs/' \ removed = '* src/third_party/android_deps/libs/' \
'android_arch_lifecycle_runtime' 'android_arch_lifecycle_runtime'
self.assertTrue(removed in commit_lines) self.assertTrue(removed in commit_lines)
# Check it is in adequate section. # Check it is in adequate section.
removed_line = commit_lines.index(removed) removed_line = commit_lines.index(removed)
self.assertTrue('Removed' in commit_lines[removed_line - 1]) self.assertTrue('Removed' in commit_lines[removed_line - 1])
class TestChooseCQMode(unittest.TestCase): class TestChooseCQMode(unittest.TestCase):
def testSkip(self): def testSkip(self):
self.assertEqual(ChooseCQMode(True, 99, 500000, 500100), 0) self.assertEqual(ChooseCQMode(True, 99, 500000, 500100), 0)
def testDryRun(self): def testDryRun(self):
self.assertEqual(ChooseCQMode(False, 101, 500000, 500100), 1) self.assertEqual(ChooseCQMode(False, 101, 500000, 500100), 1)
def testSubmit(self): def testSubmit(self):
self.assertEqual(ChooseCQMode(False, 100, 500000, 500100), 2) self.assertEqual(ChooseCQMode(False, 100, 500000, 500100), 2)
def _SetupGitLsRemoteCall(cmd_fake, url, revision): def _SetupGitLsRemoteCall(cmd_fake, url, revision):
cmd = ['git', 'ls-remote', url, revision] cmd = ['git', 'ls-remote', url, revision]
cmd_fake.AddExpectation(cmd, _returns=(revision, None)) cmd_fake.AddExpectation(cmd, _returns=(revision, None))
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -16,19 +18,19 @@ WEBRTC_VERSION_RE = re.compile(
if __name__ == '__main__': if __name__ == '__main__':
args = sys.argv args = sys.argv
if len(args) != 2: if len(args) != 2:
print('Usage: binary_version_test.py <FILE_NAME>') print('Usage: binary_version_test.py <FILE_NAME>')
exit(1) sys.exit(1)
filename = sys.argv[1] filename = sys.argv[1]
output = subprocess.check_output(['strings', filename]) output = subprocess.check_output(['strings', filename])
strings_in_binary = output.decode('utf-8').splitlines() strings_in_binary = output.decode('utf-8').splitlines()
for symbol in strings_in_binary: for symbol in strings_in_binary:
if WEBRTC_VERSION_RE.match(symbol): if WEBRTC_VERSION_RE.match(symbol):
with open('webrtc_binary_version_check', 'w') as f: with open('webrtc_binary_version_check', 'w') as f:
f.write(symbol) f.write(symbol)
exit(0) sys.exit(0)
print('WebRTC source timestamp not found in "%s"' % filename) print('WebRTC source timestamp not found in "%s"' % filename)
print('Check why "kSourceTimestamp" from call/version.cc is not linked ' print('Check why "kSourceTimestamp" from call/version.cc is not linked '
'(or why it has been optimized away by the compiler/linker)') '(or why it has been optimized away by the compiler/linker)')
exit(1) sys.exit(1)

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -20,9 +21,8 @@ import shutil
import subprocess import subprocess
import sys import sys
import tempfile import tempfile
#pylint: disable=relative-import from presubmit_checks_lib.build_helpers import (GetClangTidyPath,
from presubmit_checks_lib.build_helpers import GetClangTidyPath, \ GetCompilationCommand)
GetCompilationCommand
# We enable all checkers by default for investigation purpose. # We enable all checkers by default for investigation purpose.
# This includes clang-analyzer-* checks. # This includes clang-analyzer-* checks.
@ -32,66 +32,66 @@ CHECKER_OPTION = '-checks=*'
def Process(filepath, args): def Process(filepath, args):
# Build directory is needed to gather compilation flags. # Build directory is needed to gather compilation flags.
# Create a temporary one (instead of reusing an existing one) # Create a temporary one (instead of reusing an existing one)
# to keep the CLI simple and unencumbered. # to keep the CLI simple and unencumbered.
out_dir = tempfile.mkdtemp('clang_tidy') out_dir = tempfile.mkdtemp('clang_tidy')
try: try:
gn_args = [] # Use default build. gn_args = [] # Use default build.
command = GetCompilationCommand(filepath, gn_args, out_dir) command = GetCompilationCommand(filepath, gn_args, out_dir)
# Remove warning flags. They aren't needed and they cause trouble # Remove warning flags. They aren't needed and they cause trouble
# when clang-tidy doesn't match most recent clang. # when clang-tidy doesn't match most recent clang.
# Same battle for -f (e.g. -fcomplete-member-pointers). # Same battle for -f (e.g. -fcomplete-member-pointers).
command = [ command = [
arg for arg in command arg for arg in command
if not (arg.startswith('-W') or arg.startswith('-f')) if not (arg.startswith('-W') or arg.startswith('-f'))
] ]
# Path from build dir. # Path from build dir.
rel_path = os.path.relpath(os.path.abspath(filepath), out_dir) rel_path = os.path.relpath(os.path.abspath(filepath), out_dir)
# Replace clang++ by clang-tidy # Replace clang++ by clang-tidy
command[0:1] = [GetClangTidyPath(), CHECKER_OPTION, rel_path command[0:1] = [GetClangTidyPath(), CHECKER_OPTION, rel_path
] + args + ['--'] # Separator for clang flags. ] + args + ['--'] # Separator for clang flags.
print "Running: %s" % ' '.join(command) print("Running: %s" % ' '.join(command))
# Run from build dir so that relative paths are correct. # Run from build dir so that relative paths are correct.
p = subprocess.Popen(command, p = subprocess.Popen(command,
cwd=out_dir, cwd=out_dir,
stdout=sys.stdout, stdout=sys.stdout,
stderr=sys.stderr) stderr=sys.stderr)
p.communicate() p.communicate()
return p.returncode return p.returncode
finally: finally:
shutil.rmtree(out_dir, ignore_errors=True) shutil.rmtree(out_dir, ignore_errors=True)
def ValidateCC(filepath): def ValidateCC(filepath):
"""We can only analyze .cc files. Provide explicit message about that.""" """We can only analyze .cc files. Provide explicit message about that."""
if filepath.endswith('.cc'): if filepath.endswith('.cc'):
return filepath return filepath
msg = ('%s not supported.\n' msg = ('%s not supported.\n'
'For now, we can only analyze translation units (.cc files).' % 'For now, we can only analyze translation units (.cc files).' %
filepath) filepath)
raise argparse.ArgumentTypeError(msg) raise argparse.ArgumentTypeError(msg)
def Main(): def Main():
description = ( description = (
"Run clang-tidy on single cc file.\n" "Run clang-tidy on single cc file.\n"
"Use flags, defines and include paths as in default debug build.\n" "Use flags, defines and include paths as in default debug build.\n"
"WARNING, this is a POC version with rough edges.") "WARNING, this is a POC version with rough edges.")
parser = argparse.ArgumentParser(description=description) parser = argparse.ArgumentParser(description=description)
parser.add_argument('filepath', parser.add_argument('filepath',
help='Specifies the path of the .cc file to analyze.', help='Specifies the path of the .cc file to analyze.',
type=ValidateCC) type=ValidateCC)
parser.add_argument('args', parser.add_argument('args',
nargs=argparse.REMAINDER, nargs=argparse.REMAINDER,
help='Arguments passed to clang-tidy') help='Arguments passed to clang-tidy')
parsed_args = parser.parse_args() parsed_args = parser.parse_args()
return Process(parsed_args.filepath, parsed_args.args) return Process(parsed_args.filepath, parsed_args.args)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(Main()) sys.exit(Main())

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -27,21 +28,20 @@ TESTS = [
def main(): def main():
cmd = ([sys.executable, 'tools/code_coverage/coverage.py'] + TESTS + cmd = ([sys.executable, 'tools/code_coverage/coverage.py'] + TESTS +
['-b out/coverage', '-o out/report'] + ['-b out/coverage', '-o out/report'] +
['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''] + ['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''] +
['-c \'out/coverage/%s\'' % t for t in TESTS]) ['-c \'out/coverage/%s\'' % t for t in TESTS])
def WithXvfb(binary): def WithXvfb(binary):
return '-c \'%s testing/xvfb.py %s\'' % (sys.executable, binary) return '-c \'%s testing/xvfb.py %s\'' % (sys.executable, binary)
modules_unittests = 'out/coverage/modules_unittests' modules_unittests = 'out/coverage/modules_unittests'
cmd[cmd.index('-c \'%s\'' % cmd[cmd.index('-c \'%s\'' % modules_unittests)] = WithXvfb(modules_unittests)
modules_unittests)] = WithXvfb(modules_unittests)
print ' '.join(cmd) print(' '.join(cmd))
return 0 return 0
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -46,7 +47,6 @@ if os.path.exists(binary_path):
========== ENDING OF PATCH ========== ========== ENDING OF PATCH ==========
""" """
import sys import sys
DIRECTORY = 'out/coverage' DIRECTORY = 'out/coverage'
@ -77,89 +77,89 @@ XC_TESTS = [
def FormatIossimTest(test_name, is_xctest=False): def FormatIossimTest(test_name, is_xctest=False):
args = ['%s/%s.app' % (DIRECTORY, test_name)] args = ['%s/%s.app' % (DIRECTORY, test_name)]
if is_xctest: if is_xctest:
args += ['%s/%s_module.xctest' % (DIRECTORY, test_name)] args += ['%s/%s_module.xctest' % (DIRECTORY, test_name)]
return '-c \'%s/iossim %s\'' % (DIRECTORY, ' '.join(args)) return '-c \'%s/iossim %s\'' % (DIRECTORY, ' '.join(args))
def GetGNArgs(is_simulator): def GetGNArgs(is_simulator):
target_cpu = 'x64' if is_simulator else 'arm64' target_cpu = 'x64' if is_simulator else 'arm64'
return ([] + ['target_os="ios"'] + ['target_cpu="%s"' % target_cpu] + return ([] + ['target_os="ios"'] + ['target_cpu="%s"' % target_cpu] +
['use_clang_coverage=true'] + ['is_component_build=false'] + ['use_clang_coverage=true'] + ['is_component_build=false'] +
['dcheck_always_on=true']) ['dcheck_always_on=true'])
def GenerateIOSSimulatorCommand(): def GenerateIOSSimulatorCommand():
gn_args_string = ' '.join(GetGNArgs(is_simulator=True)) gn_args_string = ' '.join(GetGNArgs(is_simulator=True))
gn_cmd = ['gn', 'gen', DIRECTORY, '--args=\'%s\'' % gn_args_string] gn_cmd = ['gn', 'gen', DIRECTORY, '--args=\'%s\'' % gn_args_string]
coverage_cmd = ([sys.executable, 'tools/code_coverage/coverage.py'] + coverage_cmd = ([sys.executable, 'tools/code_coverage/coverage.py'] +
["%s.app" % t for t in XC_TESTS + TESTS] + ["%s.app" % t for t in XC_TESTS + TESTS] +
['-b %s' % DIRECTORY, '-o out/report'] + ['-b %s' % DIRECTORY, '-o out/report'] +
['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''] + ['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''] +
[FormatIossimTest(t, is_xctest=True) for t in XC_TESTS] + [FormatIossimTest(t, is_xctest=True) for t in XC_TESTS] +
[FormatIossimTest(t, is_xctest=False) for t in TESTS]) [FormatIossimTest(t, is_xctest=False) for t in TESTS])
print 'To get code coverage using iOS sim just run following commands:' print('To get code coverage using iOS sim just run following commands:')
print '' print('')
print ' '.join(gn_cmd) print(' '.join(gn_cmd))
print '' print('')
print ' '.join(coverage_cmd) print(' '.join(coverage_cmd))
return 0 return 0
def GenerateIOSDeviceCommand(): def GenerateIOSDeviceCommand():
gn_args_string = ' '.join(GetGNArgs(is_simulator=False)) gn_args_string = ' '.join(GetGNArgs(is_simulator=False))
coverage_report_cmd = ( coverage_report_cmd = ([sys.executable, 'tools/code_coverage/coverage.py'] +
[sys.executable, 'tools/code_coverage/coverage.py'] + ['%s.app' % t for t in TESTS] + ['-b %s' % DIRECTORY] +
['%s.app' % t for t in TESTS] + ['-b %s' % DIRECTORY] + ['-o out/report'] +
['-o out/report'] + ['-p %s/merged.profdata' % DIRECTORY] + ['-p %s/merged.profdata' % DIRECTORY] +
['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\'']) ['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''])
print 'Computing code coverage for real iOS device is a little bit tedious.' print('Computing code coverage for real iOS device is a little bit tedious.')
print '' print('')
print 'You will need:' print('You will need:')
print '' print('')
print '1. Generate xcode project and open it with Xcode 10+:' print('1. Generate xcode project and open it with Xcode 10+:')
print ' gn gen %s --ide=xcode --args=\'%s\'' % (DIRECTORY, gn_args_string) print(' gn gen %s --ide=xcode --args=\'%s\'' % (DIRECTORY, gn_args_string))
print ' open %s/all.xcworkspace' % DIRECTORY print(' open %s/all.xcworkspace' % DIRECTORY)
print '' print('')
print '2. Execute these Run targets manually with Xcode Run button and ' print('2. Execute these Run targets manually with Xcode Run button and ')
print 'manually save generated coverage.profraw file to %s:' % DIRECTORY print('manually save generated coverage.profraw file to %s:' % DIRECTORY)
print '\n'.join('- %s' % t for t in TESTS) print('\n'.join('- %s' % t for t in TESTS))
print '' print('')
print '3. Execute these Test targets manually with Xcode Test button and ' print('3. Execute these Test targets manually with Xcode Test button and ')
print 'manually save generated coverage.profraw file to %s:' % DIRECTORY print('manually save generated coverage.profraw file to %s:' % DIRECTORY)
print '\n'.join('- %s' % t for t in XC_TESTS) print('\n'.join('- %s' % t for t in XC_TESTS))
print '' print('')
print '4. Merge *.profraw files to *.profdata using llvm-profdata tool:' print('4. Merge *.profraw files to *.profdata using llvm-profdata tool:')
print(' build/mac_files/Xcode.app/Contents/Developer/Toolchains/' + print((' build/mac_files/Xcode.app/Contents/Developer/Toolchains/' +
'XcodeDefault.xctoolchain/usr/bin/llvm-profdata merge ' + 'XcodeDefault.xctoolchain/usr/bin/llvm-profdata merge ' +
'-o %s/merged.profdata ' % DIRECTORY + '-o %s/merged.profdata ' % DIRECTORY +
'-sparse=true %s/*.profraw' % DIRECTORY) '-sparse=true %s/*.profraw' % DIRECTORY))
print '' print('')
print '5. Generate coverage report:' print('5. Generate coverage report:')
print ' ' + ' '.join(coverage_report_cmd) print(' ' + ' '.join(coverage_report_cmd))
return 0 return 0
def Main(): def main():
if len(sys.argv) < 2: if len(sys.argv) < 2:
print 'Please specify type of coverage:' print('Please specify type of coverage:')
print ' %s simulator' % sys.argv[0] print(' %s simulator' % sys.argv[0])
print ' %s device' % sys.argv[0] print(' %s device' % sys.argv[0])
elif sys.argv[1] == 'simulator': elif sys.argv[1] == 'simulator':
GenerateIOSSimulatorCommand() GenerateIOSSimulatorCommand()
elif sys.argv[1] == 'device': elif sys.argv[1] == 'device':
GenerateIOSDeviceCommand() GenerateIOSDeviceCommand()
else: else:
print 'Unsupported type of coverage' print('Unsupported type of coverage')
return 0 return 0
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(Main()) sys.exit(main())

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# #
# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
# #
@ -8,76 +8,76 @@
# in the file PATENTS. All contributing project authors may # in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
import psutil
import sys import sys
import psutil
import numpy import numpy
from matplotlib import pyplot from matplotlib import pyplot
class CpuSnapshot(object): class CpuSnapshot:
def __init__(self, label): def __init__(self, label):
self.label = label self.label = label
self.samples = [] self.samples = []
def Capture(self, sample_count): def Capture(self, sample_count):
print('Capturing %d CPU samples for %s...' % print(('Capturing %d CPU samples for %s...' %
((sample_count - len(self.samples)), self.label)) ((sample_count - len(self.samples)), self.label)))
while len(self.samples) < sample_count: while len(self.samples) < sample_count:
self.samples.append(psutil.cpu_percent(1.0, False)) self.samples.append(psutil.cpu_percent(1.0, False))
def Text(self): def Text(self):
return ('%s: avg=%s, median=%s, min=%s, max=%s' % return (
(self.label, numpy.average(self.samples), '%s: avg=%s, median=%s, min=%s, max=%s' %
numpy.median(self.samples), numpy.min( (self.label, numpy.average(self.samples), numpy.median(
self.samples), numpy.max(self.samples))) self.samples), numpy.min(self.samples), numpy.max(self.samples)))
def Max(self): def Max(self):
return numpy.max(self.samples) return numpy.max(self.samples)
def GrabCpuSamples(sample_count): def GrabCpuSamples(sample_count):
print 'Label for snapshot (enter to quit): ' print('Label for snapshot (enter to quit): ')
label = raw_input().strip() label = eval(input().strip())
if len(label) == 0: if len(label) == 0:
return None return None
snapshot = CpuSnapshot(label) snapshot = CpuSnapshot(label)
snapshot.Capture(sample_count) snapshot.Capture(sample_count)
return snapshot return snapshot
def main(): def main():
print 'How many seconds to capture per snapshot (enter for 60)?' print('How many seconds to capture per snapshot (enter for 60)?')
sample_count = raw_input().strip() sample_count = eval(input().strip())
if len(sample_count) > 0 and int(sample_count) > 0: if len(sample_count) > 0 and int(sample_count) > 0:
sample_count = int(sample_count) sample_count = int(sample_count)
else: else:
print 'Defaulting to 60 samples.' print('Defaulting to 60 samples.')
sample_count = 60 sample_count = 60
snapshots = [] snapshots = []
while True: while True:
snapshot = GrabCpuSamples(sample_count) snapshot = GrabCpuSamples(sample_count)
if snapshot is None: if snapshot is None:
break break
snapshots.append(snapshot) snapshots.append(snapshot)
if len(snapshots) == 0: if len(snapshots) == 0:
print 'no samples captured' print('no samples captured')
return -1 return -1
pyplot.title('CPU usage') pyplot.title('CPU usage')
for s in snapshots: for s in snapshots:
pyplot.plot(s.samples, label=s.Text(), linewidth=2) pyplot.plot(s.samples, label=s.Text(), linewidth=2)
pyplot.legend() pyplot.legend()
pyplot.show() pyplot.show()
return 0 return 0
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -27,34 +28,34 @@ import subprocess2
def main(directories): def main(directories):
if not directories: if not directories:
directories = [SCRIPT_DIR] directories = [SCRIPT_DIR]
for path in directories: for path in directories:
cmd = [ cmd = [
sys.executable, sys.executable,
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, os.path.join(find_depot_tools.DEPOT_TOOLS_PATH,
'download_from_google_storage.py'), 'download_from_google_storage.py'),
'--directory', '--directory',
'--num_threads=10', '--num_threads=10',
'--bucket', '--bucket',
'chrome-webrtc-resources', 'chrome-webrtc-resources',
'--auto_platform', '--auto_platform',
'--recursive', '--recursive',
path, path,
] ]
print 'Downloading precompiled tools...' print('Downloading precompiled tools...')
# Perform download similar to how gclient hooks execute. # Perform download similar to how gclient hooks execute.
try: try:
gclient_utils.CheckCallAndFilter(cmd, gclient_utils.CheckCallAndFilter(cmd,
cwd=SRC_DIR, cwd=SRC_DIR,
always_show_header=True) always_show_header=True)
except (gclient_utils.Error, subprocess2.CalledProcessError) as e: except (gclient_utils.Error, subprocess2.CalledProcessError) as e:
print 'Error: %s' % str(e) print('Error: %s' % str(e))
return 2 return 2
return 0 return 0
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))

View file

@ -1,4 +1,5 @@
#!/usr/bin/env vpython3 #!/usr/bin/env vpython3
# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -26,10 +27,10 @@ If any command line arguments are passed to the script, it is executed as a
command in a subprocess. command in a subprocess.
""" """
# psutil is not installed on non-Linux machines by default.
import psutil # pylint: disable=F0401
import subprocess import subprocess
import sys import sys
# psutil is not installed on non-Linux machines by default.
import psutil # pylint: disable=F0401
WEBCAM_WIN = ('schtasks', '/run', '/tn', 'ManyCam') WEBCAM_WIN = ('schtasks', '/run', '/tn', 'ManyCam')
WEBCAM_MAC = ('open', '/Applications/ManyCam/ManyCam.app') WEBCAM_MAC = ('open', '/Applications/ManyCam/ManyCam.app')
@ -81,7 +82,7 @@ def StartWebCam():
def _ForcePythonInterpreter(cmd): def _ForcePythonInterpreter(cmd):
"""Returns the fixed command line to call the right python executable.""" """Returns the fixed command line to call the right python executable."""
out = cmd[:] out = cmd[:]
if out[0] == 'python': if out[0] == 'vpython3':
out[0] = sys.executable out[0] = sys.executable
elif out[0].endswith('.py'): elif out[0].endswith('.py'):
out.insert(0, sys.executable) out.insert(0, sys.executable)
@ -95,8 +96,7 @@ def Main(argv):
if argv: if argv:
return subprocess.call(_ForcePythonInterpreter(argv)) return subprocess.call(_ForcePythonInterpreter(argv))
else: return 0
return 0
if __name__ == '__main__': if __name__ == '__main__':

View file

@ -1,4 +1,4 @@
#!/usr/bin/env/python #!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
# #
@ -24,7 +24,7 @@ following executable in your out folder:
You will be able to compile the same executable targeting your host machine You will be able to compile the same executable targeting your host machine
by running: by running:
$ python tools_webrtc/executable_host_build.py --executable_name random_exec $ vpython3 tools_webrtc/executable_host_build.py --executable_name random_exec
The generated executable will have the same name as the input executable with The generated executable will have the same name as the input executable with
suffix '_host'. suffix '_host'.
@ -62,40 +62,39 @@ import find_depot_tools
def _ParseArgs(): def _ParseArgs():
desc = 'Generates a GN executable targeting the host machine.' desc = 'Generates a GN executable targeting the host machine.'
parser = argparse.ArgumentParser(description=desc) parser = argparse.ArgumentParser(description=desc)
parser.add_argument('--executable_name', parser.add_argument('--executable_name',
required=True, required=True,
help='Name of the executable to build') help='Name of the executable to build')
args = parser.parse_args() args = parser.parse_args()
return args return args
@contextmanager @contextmanager
def HostBuildDir(): def HostBuildDir():
temp_dir = tempfile.mkdtemp() temp_dir = tempfile.mkdtemp()
try: try:
yield temp_dir yield temp_dir
finally: finally:
shutil.rmtree(temp_dir) shutil.rmtree(temp_dir)
def _RunCommand(argv, cwd=SRC_DIR, **kwargs): def _RunCommand(argv, cwd=SRC_DIR, **kwargs):
with open(os.devnull, 'w') as devnull: with open(os.devnull, 'w') as devnull:
subprocess.check_call(argv, cwd=cwd, stdout=devnull, **kwargs) subprocess.check_call(argv, cwd=cwd, stdout=devnull, **kwargs)
def DepotToolPath(*args): def DepotToolPath(*args):
return os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, *args) return os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, *args)
if __name__ == '__main__': if __name__ == '__main__':
ARGS = _ParseArgs() ARGS = _ParseArgs()
EXECUTABLE_TO_BUILD = ARGS.executable_name EXECUTABLE_TO_BUILD = ARGS.executable_name
EXECUTABLE_FINAL_NAME = ARGS.executable_name + '_host' EXECUTABLE_FINAL_NAME = ARGS.executable_name + '_host'
with HostBuildDir() as build_dir: with HostBuildDir() as build_dir:
_RunCommand([sys.executable, DepotToolPath('gn.py'), 'gen', build_dir]) _RunCommand([sys.executable, DepotToolPath('gn.py'), 'gen', build_dir])
_RunCommand( _RunCommand([DepotToolPath('ninja'), '-C', build_dir, EXECUTABLE_TO_BUILD])
[DepotToolPath('ninja'), '-C', build_dir, EXECUTABLE_TO_BUILD]) shutil.copy(os.path.join(build_dir, EXECUTABLE_TO_BUILD),
shutil.copy(os.path.join(build_dir, EXECUTABLE_TO_BUILD), EXECUTABLE_FINAL_NAME)
EXECUTABLE_FINAL_NAME)

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
# #
@ -36,10 +36,11 @@ def main():
def _ForcePythonInterpreter(cmd): def _ForcePythonInterpreter(cmd):
"""Returns the fixed command line to call the right python executable.""" """Returns the fixed command line to call the right python executable."""
out = cmd[:] out = cmd[:]
if out[0] == 'python': if len(out) > 0:
out[0] = sys.executable if out[0] == 'python':
elif out[0].endswith('.py'): out[0] = sys.executable
out.insert(0, sys.executable) elif out[0].endswith('.py'):
out.insert(0, sys.executable)
return out return out

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python3 #!/usr/bin/env vpython3
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -11,9 +12,6 @@ This file emits the list of reasons why a particular build needs to be clobbered
(or a list of 'landmines'). (or a list of 'landmines').
""" """
from __future__ import absolute_import
from __future__ import print_function
import os import os
import sys import sys
@ -26,46 +24,45 @@ host_os = landmine_utils.host_os # pylint: disable=invalid-name
def print_landmines(): # pylint: disable=invalid-name def print_landmines(): # pylint: disable=invalid-name
""" """
ALL LANDMINES ARE EMITTED FROM HERE. ALL LANDMINES ARE EMITTED FROM HERE.
""" """
# DO NOT add landmines as part of a regular CL. Landmines are a last-effort # DO NOT add landmines as part of a regular CL. Landmines are a last-effort
# bandaid fix if a CL that got landed has a build dependency bug and all # bandaid fix if a CL that got landed has a build dependency bug and all
# bots need to be cleaned up. If you're writing a new CL that causes build # bots need to be cleaned up. If you're writing a new CL that causes build
# dependency problems, fix the dependency problems instead of adding a # dependency problems, fix the dependency problems instead of adding a
# landmine. # landmine.
# See the Chromium version in src/build/get_landmines.py for usage examples. # See the Chromium version in src/build/get_landmines.py for usage examples.
print('Clobber to remove out/{Debug,Release}/args.gn (webrtc:5070)') print('Clobber to remove out/{Debug,Release}/args.gn (webrtc:5070)')
if host_os() == 'win': if host_os() == 'win':
print('Clobber to resolve some issues with corrupt .pdb files on bots.') print('Clobber to resolve some issues with corrupt .pdb files on bots.')
print('Clobber due to corrupt .pdb files (after #14623)') print('Clobber due to corrupt .pdb files (after #14623)')
print( print('Clobber due to Win 64-bit Debug linking error (crbug.com/668961)')
'Clobber due to Win 64-bit Debug linking error (crbug.com/668961)') print('Clobber due to Win Clang Debug linking errors in '
print('Clobber due to Win Clang Debug linking errors in ' 'https://codereview.webrtc.org/2786603002')
'https://codereview.webrtc.org/2786603002') print('Clobber due to Win Debug linking errors in '
print('Clobber due to Win Debug linking errors in ' 'https://codereview.webrtc.org/2832063003/')
'https://codereview.webrtc.org/2832063003/') print('Clobber win x86 bots (issues with isolated files).')
print('Clobber win x86 bots (issues with isolated files).') if host_os() == 'mac':
if host_os() == 'mac': print('Clobber due to iOS compile errors (crbug.com/694721)')
print('Clobber due to iOS compile errors (crbug.com/694721)') print('Clobber to unblock https://codereview.webrtc.org/2709573003')
print('Clobber to unblock https://codereview.webrtc.org/2709573003') print('Clobber to fix https://codereview.webrtc.org/2709573003 after '
print('Clobber to fix https://codereview.webrtc.org/2709573003 after ' 'landing')
'landing') print('Clobber to fix https://codereview.webrtc.org/2767383005 before'
print('Clobber to fix https://codereview.webrtc.org/2767383005 before' 'landing (changing rtc_executable -> rtc_test on iOS)')
'landing (changing rtc_executable -> rtc_test on iOS)') print('Clobber to fix https://codereview.webrtc.org/2767383005 before'
print('Clobber to fix https://codereview.webrtc.org/2767383005 before' 'landing (changing rtc_executable -> rtc_test on iOS)')
'landing (changing rtc_executable -> rtc_test on iOS)') print('Another landmine for low_bandwidth_audio_test (webrtc:7430)')
print('Another landmine for low_bandwidth_audio_test (webrtc:7430)') print('Clobber to change neteq_rtpplay type to executable')
print('Clobber to change neteq_rtpplay type to executable') print('Clobber to remove .xctest files.')
print('Clobber to remove .xctest files.') print('Clobber to remove .xctest files (take 2).')
print('Clobber to remove .xctest files (take 2).') print('Switching rtc_executable to rtc_test')
print('Switching rtc_executable to rtc_test')
def main(): def main():
print_landmines() print_landmines()
return 0 return 0
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
# #
@ -14,9 +14,9 @@ It will run `mb gen` in a temporary directory and it is really useful to
check for different configurations. check for different configurations.
Usage: Usage:
$ python tools_webrtc/gn_check_autofix.py -m some_mater -b some_bot $ vpython3 tools_webrtc/gn_check_autofix.py -m some_mater -b some_bot
or or
$ python tools_webrtc/gn_check_autofix.py -c some_mb_config $ vpython3 tools_webrtc/gn_check_autofix.py -c some_mb_config
""" """
import os import os
@ -38,70 +38,69 @@ TARGET_RE = re.compile(
r'(?P<indentation_level>\s*)\w*\("(?P<target_name>\w*)"\) {$') r'(?P<indentation_level>\s*)\w*\("(?P<target_name>\w*)"\) {$')
class TemporaryDirectory(object): class TemporaryDirectory:
def __init__(self): def __init__(self):
self._closed = False self._closed = False
self._name = None self._name = None
self._name = tempfile.mkdtemp() self._name = tempfile.mkdtemp()
def __enter__(self): def __enter__(self):
return self._name return self._name
def __exit__(self, exc, value, _tb): def __exit__(self, exc, value, _tb):
if self._name and not self._closed: if self._name and not self._closed:
shutil.rmtree(self._name) shutil.rmtree(self._name)
self._closed = True self._closed = True
def Run(cmd): def Run(cmd):
print 'Running:', ' '.join(cmd) print('Running:', ' '.join(cmd))
sub = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) sub = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return sub.communicate() return sub.communicate()
def FixErrors(filename, missing_deps, deleted_sources): def FixErrors(filename, missing_deps, deleted_sources):
with open(filename) as f: with open(filename) as f:
lines = f.readlines() lines = f.readlines()
fixed_file = '' fixed_file = ''
indentation_level = None indentation_level = None
for line in lines: for line in lines:
match = TARGET_RE.match(line) match = TARGET_RE.match(line)
if match: if match:
target = match.group('target_name') target = match.group('target_name')
if target in missing_deps: if target in missing_deps:
indentation_level = match.group('indentation_level') indentation_level = match.group('indentation_level')
elif indentation_level is not None: elif indentation_level is not None:
match = re.match(indentation_level + '}$', line) match = re.match(indentation_level + '}$', line)
if match: if match:
line = ('deps = [\n' + ''.join(' "' + dep + '",\n' line = ('deps = [\n' + ''.join(' "' + dep + '",\n'
for dep in missing_deps[target]) for dep in missing_deps[target]) +
+ ']\n') + line ']\n') + line
indentation_level = None indentation_level = None
elif line.strip().startswith('deps'): elif line.strip().startswith('deps'):
is_empty_deps = line.strip() == 'deps = []' is_empty_deps = line.strip() == 'deps = []'
line = 'deps = [\n' if is_empty_deps else line line = 'deps = [\n' if is_empty_deps else line
line += ''.join(' "' + dep + '",\n' line += ''.join(' "' + dep + '",\n' for dep in missing_deps[target])
for dep in missing_deps[target]) line += ']\n' if is_empty_deps else ''
line += ']\n' if is_empty_deps else '' indentation_level = None
indentation_level = None
if line.strip() not in deleted_sources: if line.strip() not in deleted_sources:
fixed_file += line fixed_file += line
with open(filename, 'w') as f: with open(filename, 'w') as f:
f.write(fixed_file) f.write(fixed_file)
Run(['gn', 'format', filename]) Run(['gn', 'format', filename])
def FirstNonEmpty(iterable): def FirstNonEmpty(iterable):
"""Return first item which evaluates to True, or fallback to None.""" """Return first item which evaluates to True, or fallback to None."""
return next((x for x in iterable if x), None) return next((x for x in iterable if x), None)
def Rebase(base_path, dependency_path, dependency): def Rebase(base_path, dependency_path, dependency):
"""Adapt paths so they work both in stand-alone WebRTC and Chromium tree. """Adapt paths so they work both in stand-alone WebRTC and Chromium tree.
To cope with varying top-level directory (WebRTC VS Chromium), we use: To cope with varying top-level directory (WebRTC VS Chromium), we use:
* relative paths for WebRTC modules. * relative paths for WebRTC modules.
@ -118,82 +117,81 @@ def Rebase(base_path, dependency_path, dependency):
Full target path (E.g. '../rtc_base/time:timestamp_extrapolator'). Full target path (E.g. '../rtc_base/time:timestamp_extrapolator').
""" """
root = FirstNonEmpty(dependency_path.split('/')) root = FirstNonEmpty(dependency_path.split('/'))
if root in CHROMIUM_DIRS: if root in CHROMIUM_DIRS:
# Chromium paths must remain absolute. E.g. //third_party//abseil-cpp... # Chromium paths must remain absolute. E.g. //third_party//abseil-cpp...
rebased = dependency_path rebased = dependency_path
else: else:
base_path = base_path.split(os.path.sep) base_path = base_path.split(os.path.sep)
dependency_path = dependency_path.split(os.path.sep) dependency_path = dependency_path.split(os.path.sep)
first_difference = None first_difference = None
shortest_length = min(len(dependency_path), len(base_path)) shortest_length = min(len(dependency_path), len(base_path))
for i in range(shortest_length): for i in range(shortest_length):
if dependency_path[i] != base_path[i]: if dependency_path[i] != base_path[i]:
first_difference = i first_difference = i
break break
first_difference = first_difference or shortest_length first_difference = first_difference or shortest_length
base_path = base_path[first_difference:] base_path = base_path[first_difference:]
dependency_path = dependency_path[first_difference:] dependency_path = dependency_path[first_difference:]
rebased = os.path.sep.join((['..'] * len(base_path)) + dependency_path) rebased = os.path.sep.join((['..'] * len(base_path)) + dependency_path)
return rebased + ':' + dependency return rebased + ':' + dependency
def main(): def main():
deleted_sources = set() deleted_sources = set()
errors_by_file = defaultdict(lambda: defaultdict(set)) errors_by_file = defaultdict(lambda: defaultdict(set))
with TemporaryDirectory() as tmp_dir: with TemporaryDirectory() as tmp_dir:
mb_script_path = os.path.join(SCRIPT_DIR, 'mb', 'mb.py') mb_script_path = os.path.join(SCRIPT_DIR, 'mb', 'mb.py')
mb_config_file_path = os.path.join(SCRIPT_DIR, 'mb', 'mb_config.pyl') mb_config_file_path = os.path.join(SCRIPT_DIR, 'mb', 'mb_config.pyl')
mb_gen_command = ([ mb_gen_command = ([
mb_script_path, mb_script_path,
'gen', 'gen',
tmp_dir, tmp_dir,
'--config-file', '--config-file',
mb_config_file_path, mb_config_file_path,
] + sys.argv[1:]) ] + sys.argv[1:])
mb_output = Run(mb_gen_command) mb_output = Run(mb_gen_command)
errors = mb_output[0].split('ERROR')[1:] errors = mb_output[0].decode('utf-8').split('ERROR')[1:]
if mb_output[1]: if mb_output[1]:
print mb_output[1] print(mb_output[1])
return 1 return 1
for error in errors: for error in errors:
error = error.splitlines() error = error.splitlines()
target_msg = 'The target:' target_msg = 'The target:'
if target_msg not in error: if target_msg not in error:
target_msg = 'It is not in any dependency of' target_msg = 'It is not in any dependency of'
if target_msg not in error: if target_msg not in error:
print '\n'.join(error) print('\n'.join(error))
continue continue
index = error.index(target_msg) + 1 index = error.index(target_msg) + 1
path, target = error[index].strip().split(':') path, target = error[index].strip().split(':')
if error[index + 1] in ('is including a file from the target:', if error[index + 1] in ('is including a file from the target:',
'The include file is in the target(s):'): 'The include file is in the target(s):'):
dep = error[index + 2].strip() dep = error[index + 2].strip()
dep_path, dep = dep.split(':') dep_path, dep = dep.split(':')
dep = Rebase(path, dep_path, dep) dep = Rebase(path, dep_path, dep)
# Replacing /target:target with /target # Replacing /target:target with /target
dep = re.sub(r'/(\w+):(\1)$', r'/\1', dep) dep = re.sub(r'/(\w+):(\1)$', r'/\1', dep)
path = os.path.join(path[2:], 'BUILD.gn') path = os.path.join(path[2:], 'BUILD.gn')
errors_by_file[path][target].add(dep) errors_by_file[path][target].add(dep)
elif error[index + 1] == 'has a source file:': elif error[index + 1] == 'has a source file:':
deleted_file = '"' + os.path.basename( deleted_file = '"' + os.path.basename(error[index + 2].strip()) + '",'
error[index + 2].strip()) + '",' deleted_sources.add(deleted_file)
deleted_sources.add(deleted_file) else:
else: print('\n'.join(error))
print '\n'.join(error) continue
continue
for path, missing_deps in errors_by_file.items(): for path, missing_deps in list(errors_by_file.items()):
FixErrors(path, missing_deps, deleted_sources) FixErrors(path, missing_deps, deleted_sources)
return 0 return 0
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View file

@ -53,7 +53,7 @@ For example:
Will be converted into: Will be converted into:
python gtest-parallel \ vpython3 gtest-parallel \
--shard_index 0 \ --shard_index 0 \
--shard_count 1 \ --shard_count 1 \
--output_dir=SOME_OUTPUT_DIR \ --output_dir=SOME_OUTPUT_DIR \
@ -82,8 +82,8 @@ Args = collections.namedtuple(
['gtest_parallel_args', 'test_env', 'output_dir', 'test_artifacts_dir']) ['gtest_parallel_args', 'test_env', 'output_dir', 'test_artifacts_dir'])
def _CatFiles(file_list, output_file): def _CatFiles(file_list, output_file_destination):
with open(output_file, 'w') as output_file: with open(output_file_destination, 'w') as output_file:
for filename in file_list: for filename in file_list:
with open(filename) as input_file: with open(filename) as input_file:
output_file.write(input_file.read()) output_file.write(input_file.read())
@ -100,7 +100,7 @@ def _ParseWorkersOption(workers):
return max(result, 1) # Sanitize when using e.g. '0.5x'. return max(result, 1) # Sanitize when using e.g. '0.5x'.
class ReconstructibleArgumentGroup(object): class ReconstructibleArgumentGroup:
"""An argument group that can be converted back into a command line. """An argument group that can be converted back into a command line.
This acts like ArgumentParser.add_argument_group, but names of arguments added This acts like ArgumentParser.add_argument_group, but names of arguments added
@ -154,7 +154,7 @@ def ParseArgs(argv=None):
parser.add_argument('--store-test-artifacts', action='store_true') parser.add_argument('--store-test-artifacts', action='store_true')
# No-sandbox is a Chromium-specific flag, ignore it. # No-sandbox is a Chromium-specific flag, ignore it.
# TODO(oprypin): Remove (bugs.webrtc.org/8115) # TODO(bugs.webrtc.org/8115): Remove workaround when fixed.
parser.add_argument('--no-sandbox', parser.add_argument('--no-sandbox',
action='store_true', action='store_true',
help=argparse.SUPPRESS) help=argparse.SUPPRESS)
@ -171,7 +171,7 @@ def ParseArgs(argv=None):
} }
args_to_pass = [] args_to_pass = []
for arg in unrecognized_args: for arg in unrecognized_args:
if any(arg.startswith(k) for k in webrtc_flags_to_change.keys()): if any(arg.startswith(k) for k in list(webrtc_flags_to_change.keys())):
arg_split = arg.split('=') arg_split = arg.split('=')
args_to_pass.append(webrtc_flags_to_change[arg_split[0]] + '=' + args_to_pass.append(webrtc_flags_to_change[arg_split[0]] + '=' +
arg_split[1]) arg_split[1])

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
# #
@ -21,152 +21,147 @@ gtest_parallel_wrapper = __import__('gtest-parallel-wrapper')
@contextmanager @contextmanager
def TemporaryDirectory(): def TemporaryDirectory():
tmp_dir = tempfile.mkdtemp() tmp_dir = tempfile.mkdtemp()
yield tmp_dir yield tmp_dir
os.rmdir(tmp_dir) os.rmdir(tmp_dir)
class GtestParallelWrapperHelpersTest(unittest.TestCase): class GtestParallelWrapperHelpersTest(unittest.TestCase):
def testGetWorkersAsIs(self): def testGetWorkersAsIs(self):
# pylint: disable=protected-access # pylint: disable=protected-access
self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('12'), 12) self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('12'), 12)
def testGetTwiceWorkers(self): def testGetTwiceWorkers(self):
expected = 2 * multiprocessing.cpu_count() expected = 2 * multiprocessing.cpu_count()
# pylint: disable=protected-access # pylint: disable=protected-access
self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('2x'), self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('2x'), expected)
expected)
def testGetHalfWorkers(self): def testGetHalfWorkers(self):
expected = max(multiprocessing.cpu_count() // 2, 1) expected = max(multiprocessing.cpu_count() // 2, 1)
# pylint: disable=protected-access # pylint: disable=protected-access
self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('0.5x'), self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('0.5x'),
expected) expected)
class GtestParallelWrapperTest(unittest.TestCase): class GtestParallelWrapperTest(unittest.TestCase):
@classmethod @classmethod
def _Expected(cls, gtest_parallel_args): def _Expected(cls, gtest_parallel_args):
return ['--shard_index=0', '--shard_count=1'] + gtest_parallel_args return ['--shard_index=0', '--shard_count=1'] + gtest_parallel_args
def testOverwrite(self): def testOverwrite(self):
result = gtest_parallel_wrapper.ParseArgs( result = gtest_parallel_wrapper.ParseArgs(
['--timeout=123', 'exec', '--timeout', '124']) ['--timeout=123', 'exec', '--timeout', '124'])
expected = self._Expected(['--timeout=124', 'exec']) expected = self._Expected(['--timeout=124', 'exec'])
self.assertEqual(result.gtest_parallel_args, expected) self.assertEqual(result.gtest_parallel_args, expected)
def testMixing(self): def testMixing(self):
result = gtest_parallel_wrapper.ParseArgs([ result = gtest_parallel_wrapper.ParseArgs(
'--timeout=123', '--param1', 'exec', '--param2', '--timeout', '124' ['--timeout=123', '--param1', 'exec', '--param2', '--timeout', '124'])
]) expected = self._Expected(
expected = self._Expected( ['--timeout=124', 'exec', '--', '--param1', '--param2'])
['--timeout=124', 'exec', '--', '--param1', '--param2']) self.assertEqual(result.gtest_parallel_args, expected)
self.assertEqual(result.gtest_parallel_args, expected)
def testMixingPositional(self): def testMixingPositional(self):
result = gtest_parallel_wrapper.ParseArgs([ result = gtest_parallel_wrapper.ParseArgs([
'--timeout=123', 'exec', '--foo1', 'bar1', '--timeout', '124', '--timeout=123', 'exec', '--foo1', 'bar1', '--timeout', '124', '--foo2',
'--foo2', 'bar2' 'bar2'
]) ])
expected = self._Expected([ expected = self._Expected(
'--timeout=124', 'exec', '--', '--foo1', 'bar1', '--foo2', 'bar2' ['--timeout=124', 'exec', '--', '--foo1', 'bar1', '--foo2', 'bar2'])
]) self.assertEqual(result.gtest_parallel_args, expected)
self.assertEqual(result.gtest_parallel_args, expected)
def testDoubleDash1(self): def testDoubleDash1(self):
result = gtest_parallel_wrapper.ParseArgs( result = gtest_parallel_wrapper.ParseArgs(
['--timeout', '123', 'exec', '--', '--timeout', '124']) ['--timeout', '123', 'exec', '--', '--timeout', '124'])
expected = self._Expected( expected = self._Expected(
['--timeout=123', 'exec', '--', '--timeout', '124']) ['--timeout=123', 'exec', '--', '--timeout', '124'])
self.assertEqual(result.gtest_parallel_args, expected) self.assertEqual(result.gtest_parallel_args, expected)
def testDoubleDash2(self): def testDoubleDash2(self):
result = gtest_parallel_wrapper.ParseArgs( result = gtest_parallel_wrapper.ParseArgs(
['--timeout=123', '--', 'exec', '--timeout=124']) ['--timeout=123', '--', 'exec', '--timeout=124'])
expected = self._Expected( expected = self._Expected(['--timeout=123', 'exec', '--', '--timeout=124'])
['--timeout=123', 'exec', '--', '--timeout=124']) self.assertEqual(result.gtest_parallel_args, expected)
self.assertEqual(result.gtest_parallel_args, expected)
def testArtifacts(self): def testArtifacts(self):
with TemporaryDirectory() as tmp_dir: with TemporaryDirectory() as tmp_dir:
output_dir = os.path.join(tmp_dir, 'foo') output_dir = os.path.join(tmp_dir, 'foo')
result = gtest_parallel_wrapper.ParseArgs( result = gtest_parallel_wrapper.ParseArgs(
['exec', '--store-test-artifacts', '--output_dir', output_dir]) ['exec', '--store-test-artifacts', '--output_dir', output_dir])
exp_artifacts_dir = os.path.join(output_dir, 'test_artifacts') exp_artifacts_dir = os.path.join(output_dir, 'test_artifacts')
exp = self._Expected([ exp = self._Expected([
'--output_dir=' + output_dir, 'exec', '--', '--output_dir=' + output_dir, 'exec', '--',
'--test_artifacts_dir=' + exp_artifacts_dir '--test_artifacts_dir=' + exp_artifacts_dir
]) ])
self.assertEqual(result.gtest_parallel_args, exp) self.assertEqual(result.gtest_parallel_args, exp)
self.assertEqual(result.output_dir, output_dir) self.assertEqual(result.output_dir, output_dir)
self.assertEqual(result.test_artifacts_dir, exp_artifacts_dir) self.assertEqual(result.test_artifacts_dir, exp_artifacts_dir)
def testNoDirsSpecified(self): def testNoDirsSpecified(self):
result = gtest_parallel_wrapper.ParseArgs(['exec']) result = gtest_parallel_wrapper.ParseArgs(['exec'])
self.assertEqual(result.output_dir, None) self.assertEqual(result.output_dir, None)
self.assertEqual(result.test_artifacts_dir, None) self.assertEqual(result.test_artifacts_dir, None)
def testOutputDirSpecified(self): def testOutputDirSpecified(self):
result = gtest_parallel_wrapper.ParseArgs( result = gtest_parallel_wrapper.ParseArgs(
['exec', '--output_dir', '/tmp/foo']) ['exec', '--output_dir', '/tmp/foo'])
self.assertEqual(result.output_dir, '/tmp/foo') self.assertEqual(result.output_dir, '/tmp/foo')
self.assertEqual(result.test_artifacts_dir, None) self.assertEqual(result.test_artifacts_dir, None)
def testShortArg(self): def testShortArg(self):
result = gtest_parallel_wrapper.ParseArgs(['-d', '/tmp/foo', 'exec']) result = gtest_parallel_wrapper.ParseArgs(['-d', '/tmp/foo', 'exec'])
expected = self._Expected(['--output_dir=/tmp/foo', 'exec']) expected = self._Expected(['--output_dir=/tmp/foo', 'exec'])
self.assertEqual(result.gtest_parallel_args, expected) self.assertEqual(result.gtest_parallel_args, expected)
self.assertEqual(result.output_dir, '/tmp/foo') self.assertEqual(result.output_dir, '/tmp/foo')
def testBoolArg(self): def testBoolArg(self):
result = gtest_parallel_wrapper.ParseArgs( result = gtest_parallel_wrapper.ParseArgs(
['--gtest_also_run_disabled_tests', 'exec']) ['--gtest_also_run_disabled_tests', 'exec'])
expected = self._Expected(['--gtest_also_run_disabled_tests', 'exec']) expected = self._Expected(['--gtest_also_run_disabled_tests', 'exec'])
self.assertEqual(result.gtest_parallel_args, expected) self.assertEqual(result.gtest_parallel_args, expected)
def testNoArgs(self): def testNoArgs(self):
result = gtest_parallel_wrapper.ParseArgs(['exec']) result = gtest_parallel_wrapper.ParseArgs(['exec'])
expected = self._Expected(['exec']) expected = self._Expected(['exec'])
self.assertEqual(result.gtest_parallel_args, expected) self.assertEqual(result.gtest_parallel_args, expected)
def testDocExample(self): def testDocExample(self):
with TemporaryDirectory() as tmp_dir: with TemporaryDirectory() as tmp_dir:
output_dir = os.path.join(tmp_dir, 'foo') output_dir = os.path.join(tmp_dir, 'foo')
result = gtest_parallel_wrapper.ParseArgs([ result = gtest_parallel_wrapper.ParseArgs([
'some_test', '--some_flag=some_value', '--another_flag', 'some_test', '--some_flag=some_value', '--another_flag',
'--output_dir=' + output_dir, '--store-test-artifacts', '--output_dir=' + output_dir, '--store-test-artifacts',
'--isolated-script-test-perf-output=SOME_OTHER_DIR', '--isolated-script-test-perf-output=SOME_OTHER_DIR', '--foo=bar',
'--foo=bar', '--baz' '--baz'
]) ])
expected_artifacts_dir = os.path.join(output_dir, 'test_artifacts') expected_artifacts_dir = os.path.join(output_dir, 'test_artifacts')
expected = self._Expected([ expected = self._Expected([
'--output_dir=' + output_dir, 'some_test', '--', '--output_dir=' + output_dir, 'some_test', '--',
'--test_artifacts_dir=' + expected_artifacts_dir, '--test_artifacts_dir=' + expected_artifacts_dir,
'--some_flag=some_value', '--another_flag', '--some_flag=some_value', '--another_flag',
'--isolated_script_test_perf_output=SOME_OTHER_DIR', '--isolated_script_test_perf_output=SOME_OTHER_DIR', '--foo=bar',
'--foo=bar', '--baz' '--baz'
]) ])
self.assertEqual(result.gtest_parallel_args, expected) self.assertEqual(result.gtest_parallel_args, expected)
def testStandardWorkers(self): def testStandardWorkers(self):
"""Check integer value is passed as-is.""" """Check integer value is passed as-is."""
result = gtest_parallel_wrapper.ParseArgs(['--workers', '17', 'exec']) result = gtest_parallel_wrapper.ParseArgs(['--workers', '17', 'exec'])
expected = self._Expected(['--workers=17', 'exec']) expected = self._Expected(['--workers=17', 'exec'])
self.assertEqual(result.gtest_parallel_args, expected) self.assertEqual(result.gtest_parallel_args, expected)
def testTwoWorkersPerCpuCore(self): def testTwoWorkersPerCpuCore(self):
result = gtest_parallel_wrapper.ParseArgs(['--workers', '2x', 'exec']) result = gtest_parallel_wrapper.ParseArgs(['--workers', '2x', 'exec'])
workers = 2 * multiprocessing.cpu_count() workers = 2 * multiprocessing.cpu_count()
expected = self._Expected(['--workers=%s' % workers, 'exec']) expected = self._Expected(['--workers=%s' % workers, 'exec'])
self.assertEqual(result.gtest_parallel_args, expected) self.assertEqual(result.gtest_parallel_args, expected)
def testUseHalfTheCpuCores(self): def testUseHalfTheCpuCores(self):
result = gtest_parallel_wrapper.ParseArgs( result = gtest_parallel_wrapper.ParseArgs(['--workers', '0.5x', 'exec'])
['--workers', '0.5x', 'exec']) workers = max(multiprocessing.cpu_count() // 2, 1)
workers = max(multiprocessing.cpu_count() // 2, 1) expected = self._Expected(['--workers=%s' % workers, 'exec'])
expected = self._Expected(['--workers=%s' % workers, 'exec']) self.assertEqual(result.gtest_parallel_args, expected)
self.assertEqual(result.gtest_parallel_args, expected)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
@ -51,303 +51,296 @@ from generate_licenses import LicenseBuilder
def _ParseArgs(): def _ParseArgs():
parser = argparse.ArgumentParser(description=__doc__) parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--build_config', parser.add_argument('--build_config',
default='release', default='release',
choices=['debug', 'release'], choices=['debug', 'release'],
help='The build config. Can be "debug" or "release". ' help='The build config. Can be "debug" or "release". '
'Defaults to "release".') 'Defaults to "release".')
parser.add_argument( parser.add_argument('--arch',
'--arch', nargs='+',
nargs='+', default=DEFAULT_ARCHS,
default=DEFAULT_ARCHS, choices=ENABLED_ARCHS,
choices=ENABLED_ARCHS, help='Architectures to build. Defaults to %(default)s.')
help='Architectures to build. Defaults to %(default)s.') parser.add_argument(
parser.add_argument( '-c',
'-c', '--clean',
'--clean', action='store_true',
action='store_true', default=False,
default=False, help='Removes the previously generated build output, if any.')
help='Removes the previously generated build output, if any.') parser.add_argument('-p',
parser.add_argument( '--purify',
'-p', action='store_true',
'--purify', default=False,
action='store_true', help='Purifies the previously generated build output by '
default=False, 'removing the temporary results used when (re)building.')
help='Purifies the previously generated build output by ' parser.add_argument(
'removing the temporary results used when (re)building.') '-o',
parser.add_argument( '--output-dir',
'-o', type=os.path.abspath,
'--output-dir', default=SDK_OUTPUT_DIR,
type=os.path.abspath, help='Specifies a directory to output the build artifacts to. '
default=SDK_OUTPUT_DIR, 'If specified together with -c, deletes the dir.')
help='Specifies a directory to output the build artifacts to. ' parser.add_argument(
'If specified together with -c, deletes the dir.') '-r',
parser.add_argument( '--revision',
'-r', type=int,
'--revision', default=0,
type=int, help='Specifies a revision number to embed if building the framework.')
default=0, parser.add_argument('-e',
help='Specifies a revision number to embed if building the framework.') '--bitcode',
parser.add_argument('-e', action='store_true',
'--bitcode', default=False,
action='store_true', help='Compile with bitcode.')
default=False, parser.add_argument('--verbose',
help='Compile with bitcode.') action='store_true',
parser.add_argument('--verbose', default=False,
action='store_true', help='Debug logging.')
default=False, parser.add_argument('--use-goma',
help='Debug logging.') action='store_true',
parser.add_argument('--use-goma', default=False,
action='store_true', help='Use goma to build.')
default=False, parser.add_argument(
help='Use goma to build.') '--extra-gn-args',
parser.add_argument( default=[],
'--extra-gn-args', nargs='*',
default=[], help='Additional GN args to be used during Ninja generation.')
nargs='*',
help='Additional GN args to be used during Ninja generation.')
return parser.parse_args() return parser.parse_args()
def _RunCommand(cmd): def _RunCommand(cmd):
logging.debug('Running: %r', cmd) logging.debug('Running: %r', cmd)
subprocess.check_call(cmd, cwd=SRC_DIR) subprocess.check_call(cmd, cwd=SRC_DIR)
def _CleanArtifacts(output_dir): def _CleanArtifacts(output_dir):
if os.path.isdir(output_dir): if os.path.isdir(output_dir):
logging.info('Deleting %s', output_dir) logging.info('Deleting %s', output_dir)
shutil.rmtree(output_dir) shutil.rmtree(output_dir)
def _CleanTemporary(output_dir, architectures): def _CleanTemporary(output_dir, architectures):
if os.path.isdir(output_dir): if os.path.isdir(output_dir):
logging.info('Removing temporary build files.') logging.info('Removing temporary build files.')
for arch in architectures: for arch in architectures:
arch_lib_path = os.path.join(output_dir, arch) arch_lib_path = os.path.join(output_dir, arch)
if os.path.isdir(arch_lib_path): if os.path.isdir(arch_lib_path):
shutil.rmtree(arch_lib_path) shutil.rmtree(arch_lib_path)
def _ParseArchitecture(architectures): def _ParseArchitecture(architectures):
result = dict() result = dict()
for arch in architectures: for arch in architectures:
if ":" in arch: if ":" in arch:
target_environment, target_cpu = arch.split(":") target_environment, target_cpu = arch.split(":")
else: else:
logging.warning('The environment for build is not specified.') logging.warning('The environment for build is not specified.')
logging.warning('It is assumed based on cpu type.') logging.warning('It is assumed based on cpu type.')
logging.warning('See crbug.com/1138425 for more details.') logging.warning('See crbug.com/1138425 for more details.')
if arch == "x64": if arch == "x64":
target_environment = "simulator" target_environment = "simulator"
else: else:
target_environment = "device" target_environment = "device"
target_cpu = arch target_cpu = arch
archs = result.get(target_environment) archs = result.get(target_environment)
if archs is None: if archs is None:
result[target_environment] = {target_cpu} result[target_environment] = {target_cpu}
else: else:
archs.add(target_cpu) archs.add(target_cpu)
return result return result
def BuildWebRTC(output_dir, target_environment, target_arch, flavor, def BuildWebRTC(output_dir, target_environment, target_arch, flavor,
gn_target_name, ios_deployment_target, libvpx_build_vp9, gn_target_name, ios_deployment_target, libvpx_build_vp9,
use_bitcode, use_goma, extra_gn_args): use_bitcode, use_goma, extra_gn_args):
gn_args = [ gn_args = [
'target_os="ios"', 'ios_enable_code_signing=false', 'target_os="ios"',
'is_component_build=false', 'rtc_include_tests=false', 'ios_enable_code_signing=false',
] 'is_component_build=false',
'rtc_include_tests=false',
]
# Add flavor option. # Add flavor option.
if flavor == 'debug': if flavor == 'debug':
gn_args.append('is_debug=true') gn_args.append('is_debug=true')
elif flavor == 'release': elif flavor == 'release':
gn_args.append('is_debug=false') gn_args.append('is_debug=false')
else: else:
raise ValueError('Unexpected flavor type: %s' % flavor) raise ValueError('Unexpected flavor type: %s' % flavor)
gn_args.append('target_environment="%s"' % target_environment) gn_args.append('target_environment="%s"' % target_environment)
gn_args.append('target_cpu="%s"' % target_arch) gn_args.append('target_cpu="%s"' % target_arch)
gn_args.append('ios_deployment_target="%s"' % ios_deployment_target) gn_args.append('ios_deployment_target="%s"' % ios_deployment_target)
gn_args.append('rtc_libvpx_build_vp9=' + gn_args.append('rtc_libvpx_build_vp9=' +
('true' if libvpx_build_vp9 else 'false')) ('true' if libvpx_build_vp9 else 'false'))
gn_args.append('enable_ios_bitcode=' + gn_args.append('enable_ios_bitcode=' + ('true' if use_bitcode else 'false'))
('true' if use_bitcode else 'false')) gn_args.append('use_goma=' + ('true' if use_goma else 'false'))
gn_args.append('use_goma=' + ('true' if use_goma else 'false')) gn_args.append('rtc_enable_objc_symbol_export=true')
gn_args.append('rtc_enable_objc_symbol_export=true')
args_string = ' '.join(gn_args + extra_gn_args) args_string = ' '.join(gn_args + extra_gn_args)
logging.info('Building WebRTC with args: %s', args_string) logging.info('Building WebRTC with args: %s', args_string)
cmd = [ cmd = [
sys.executable, sys.executable,
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'), os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'),
'gen', 'gen',
output_dir, output_dir,
'--args=' + args_string, '--args=' + args_string,
] ]
_RunCommand(cmd) _RunCommand(cmd)
logging.info('Building target: %s', gn_target_name) logging.info('Building target: %s', gn_target_name)
cmd = [ cmd = [
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja'), os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja'),
'-C', '-C',
output_dir, output_dir,
gn_target_name, gn_target_name,
] ]
if use_goma: if use_goma:
cmd.extend(['-j', '200']) cmd.extend(['-j', '200'])
_RunCommand(cmd) _RunCommand(cmd)
def main(): def main():
args = _ParseArgs() args = _ParseArgs()
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
if args.clean: if args.clean:
_CleanArtifacts(args.output_dir) _CleanArtifacts(args.output_dir)
return 0 return 0
# architectures is typed as Dict[str, Set[str]], # architectures is typed as Dict[str, Set[str]],
# where key is for the environment (device or simulator) # where key is for the environment (device or simulator)
# and value is for the cpu type. # and value is for the cpu type.
architectures = _ParseArchitecture(args.arch) architectures = _ParseArchitecture(args.arch)
gn_args = args.extra_gn_args gn_args = args.extra_gn_args
if args.purify: if args.purify:
_CleanTemporary(args.output_dir, architectures.keys()) _CleanTemporary(args.output_dir, list(architectures.keys()))
return 0 return 0
gn_target_name = 'framework_objc' gn_target_name = 'framework_objc'
if not args.bitcode: if not args.bitcode:
gn_args.append('enable_dsyms=true') gn_args.append('enable_dsyms=true')
gn_args.append('enable_stripping=true') gn_args.append('enable_stripping=true')
# Build all architectures. # Build all architectures.
framework_paths = [] framework_paths = []
all_lib_paths = [] all_lib_paths = []
for (environment, archs) in architectures.items(): for (environment, archs) in list(architectures.items()):
framework_path = os.path.join(args.output_dir, environment) framework_path = os.path.join(args.output_dir, environment)
framework_paths.append(framework_path) framework_paths.append(framework_path)
lib_paths = [] lib_paths = []
for arch in archs: for arch in archs:
lib_path = os.path.join(framework_path, arch + '_libs') lib_path = os.path.join(framework_path, arch + '_libs')
lib_paths.append(lib_path) lib_paths.append(lib_path)
BuildWebRTC(lib_path, environment, arch, args.build_config, BuildWebRTC(lib_path, environment, arch, args.build_config,
gn_target_name, IOS_DEPLOYMENT_TARGET[environment], gn_target_name, IOS_DEPLOYMENT_TARGET[environment],
LIBVPX_BUILD_VP9, args.bitcode, args.use_goma, gn_args) LIBVPX_BUILD_VP9, args.bitcode, args.use_goma, gn_args)
all_lib_paths.extend(lib_paths) all_lib_paths.extend(lib_paths)
# Combine the slices.
dylib_path = os.path.join(SDK_FRAMEWORK_NAME, 'WebRTC')
# Dylibs will be combined, all other files are the same across archs.
shutil.rmtree(
os.path.join(framework_path, SDK_FRAMEWORK_NAME),
ignore_errors=True)
shutil.copytree(
os.path.join(lib_paths[0], SDK_FRAMEWORK_NAME),
os.path.join(framework_path, SDK_FRAMEWORK_NAME),
symlinks=True)
logging.info('Merging framework slices for %s.', environment)
dylib_paths = [os.path.join(path, dylib_path) for path in lib_paths]
out_dylib_path = os.path.join(framework_path, dylib_path)
if os.path.islink(out_dylib_path):
out_dylib_path = os.path.join(os.path.dirname(out_dylib_path),
os.readlink(out_dylib_path))
try:
os.remove(out_dylib_path)
except OSError:
pass
cmd = ['lipo'] + dylib_paths + ['-create', '-output', out_dylib_path]
_RunCommand(cmd)
# Merge the dSYM slices.
lib_dsym_dir_path = os.path.join(lib_paths[0], SDK_DSYM_NAME)
if os.path.isdir(lib_dsym_dir_path):
shutil.rmtree(
os.path.join(framework_path, SDK_DSYM_NAME),
ignore_errors=True)
shutil.copytree(
lib_dsym_dir_path, os.path.join(framework_path, SDK_DSYM_NAME))
logging.info('Merging dSYM slices.')
dsym_path = os.path.join(SDK_DSYM_NAME, 'Contents', 'Resources',
'DWARF', 'WebRTC')
lib_dsym_paths = [
os.path.join(path, dsym_path) for path in lib_paths
]
out_dsym_path = os.path.join(framework_path, dsym_path)
try:
os.remove(out_dsym_path)
except OSError:
pass
cmd = ['lipo'
] + lib_dsym_paths + ['-create', '-output', out_dsym_path]
_RunCommand(cmd)
# Check for Mac-style WebRTC.framework/Resources/ (for Catalyst)...
resources_dir = os.path.join(framework_path, SDK_FRAMEWORK_NAME,
'Resources')
if not os.path.exists(resources_dir):
# ...then fall back to iOS-style WebRTC.framework/
resources_dir = os.path.dirname(resources_dir)
# Modify the version number.
# Format should be <Branch cut MXX>.<Hotfix #>.<Rev #>.
# e.g. 55.0.14986 means
# branch cut 55, no hotfixes, and revision 14986.
infoplist_path = os.path.join(resources_dir, 'Info.plist')
cmd = [
'PlistBuddy', '-c', 'Print :CFBundleShortVersionString',
infoplist_path
]
major_minor = subprocess.check_output(cmd).decode('utf-8').strip()
version_number = '%s.%s' % (major_minor, args.revision)
logging.info('Substituting revision number: %s', version_number)
cmd = [
'PlistBuddy', '-c', 'Set :CFBundleVersion ' + version_number,
infoplist_path
]
_RunCommand(cmd)
_RunCommand(['plutil', '-convert', 'binary1', infoplist_path])
xcframework_dir = os.path.join(args.output_dir, SDK_XCFRAMEWORK_NAME)
if os.path.isdir(xcframework_dir):
shutil.rmtree(xcframework_dir)
logging.info('Creating xcframework.')
cmd = ['xcodebuild', '-create-xcframework', '-output', xcframework_dir]
# Apparently, xcodebuild needs absolute paths for input arguments
for framework_path in framework_paths:
cmd += [
'-framework',
os.path.abspath(os.path.join(framework_path, SDK_FRAMEWORK_NAME)),
]
dsym_full_path = os.path.join(framework_path, SDK_DSYM_NAME)
if os.path.exists(dsym_full_path):
cmd += ['-debug-symbols', os.path.abspath(dsym_full_path)]
# Combine the slices.
dylib_path = os.path.join(SDK_FRAMEWORK_NAME, 'WebRTC')
# Dylibs will be combined, all other files are the same across archs.
shutil.rmtree(os.path.join(framework_path, SDK_FRAMEWORK_NAME),
ignore_errors=True)
shutil.copytree(os.path.join(lib_paths[0], SDK_FRAMEWORK_NAME),
os.path.join(framework_path, SDK_FRAMEWORK_NAME),
symlinks=True)
logging.info('Merging framework slices for %s.', environment)
dylib_paths = [os.path.join(path, dylib_path) for path in lib_paths]
out_dylib_path = os.path.join(framework_path, dylib_path)
if os.path.islink(out_dylib_path):
out_dylib_path = os.path.join(os.path.dirname(out_dylib_path),
os.readlink(out_dylib_path))
try:
os.remove(out_dylib_path)
except OSError:
pass
cmd = ['lipo'] + dylib_paths + ['-create', '-output', out_dylib_path]
_RunCommand(cmd) _RunCommand(cmd)
# Generate the license file. # Merge the dSYM slices.
logging.info('Generate license file.') lib_dsym_dir_path = os.path.join(lib_paths[0], SDK_DSYM_NAME)
gn_target_full_name = '//sdk:' + gn_target_name if os.path.isdir(lib_dsym_dir_path):
builder = LicenseBuilder(all_lib_paths, [gn_target_full_name]) shutil.rmtree(os.path.join(framework_path, SDK_DSYM_NAME),
builder.GenerateLicenseText( ignore_errors=True)
os.path.join(args.output_dir, SDK_XCFRAMEWORK_NAME)) shutil.copytree(lib_dsym_dir_path,
os.path.join(framework_path, SDK_DSYM_NAME))
logging.info('Merging dSYM slices.')
dsym_path = os.path.join(SDK_DSYM_NAME, 'Contents', 'Resources', 'DWARF',
'WebRTC')
lib_dsym_paths = [os.path.join(path, dsym_path) for path in lib_paths]
out_dsym_path = os.path.join(framework_path, dsym_path)
try:
os.remove(out_dsym_path)
except OSError:
pass
cmd = ['lipo'] + lib_dsym_paths + ['-create', '-output', out_dsym_path]
_RunCommand(cmd)
logging.info('Done.') # Check for Mac-style WebRTC.framework/Resources/ (for Catalyst)...
return 0 resources_dir = os.path.join(framework_path, SDK_FRAMEWORK_NAME,
'Resources')
if not os.path.exists(resources_dir):
# ...then fall back to iOS-style WebRTC.framework/
resources_dir = os.path.dirname(resources_dir)
# Modify the version number.
# Format should be <Branch cut MXX>.<Hotfix #>.<Rev #>.
# e.g. 55.0.14986 means
# branch cut 55, no hotfixes, and revision 14986.
infoplist_path = os.path.join(resources_dir, 'Info.plist')
cmd = [
'PlistBuddy', '-c', 'Print :CFBundleShortVersionString',
infoplist_path
]
major_minor = subprocess.check_output(cmd).decode('utf-8').strip()
version_number = '%s.%s' % (major_minor, args.revision)
logging.info('Substituting revision number: %s', version_number)
cmd = [
'PlistBuddy', '-c', 'Set :CFBundleVersion ' + version_number,
infoplist_path
]
_RunCommand(cmd)
_RunCommand(['plutil', '-convert', 'binary1', infoplist_path])
xcframework_dir = os.path.join(args.output_dir, SDK_XCFRAMEWORK_NAME)
if os.path.isdir(xcframework_dir):
shutil.rmtree(xcframework_dir)
logging.info('Creating xcframework.')
cmd = ['xcodebuild', '-create-xcframework', '-output', xcframework_dir]
# Apparently, xcodebuild needs absolute paths for input arguments
for framework_path in framework_paths:
cmd += [
'-framework',
os.path.abspath(os.path.join(framework_path, SDK_FRAMEWORK_NAME)),
]
dsym_full_path = os.path.join(framework_path, SDK_DSYM_NAME)
if os.path.exists(dsym_full_path):
cmd += ['-debug-symbols', os.path.abspath(dsym_full_path)]
_RunCommand(cmd)
# Generate the license file.
logging.info('Generate license file.')
gn_target_full_name = '//sdk:' + gn_target_name
builder = LicenseBuilder(all_lib_paths, [gn_target_full_name])
builder.GenerateLicenseText(
os.path.join(args.output_dir, SDK_XCFRAMEWORK_NAME))
logging.info('Done.')
return 0
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -11,22 +13,22 @@ import sys
def GenerateModulemap(): def GenerateModulemap():
parser = argparse.ArgumentParser(description='Generate modulemap') parser = argparse.ArgumentParser(description='Generate modulemap')
parser.add_argument("-o", "--out", type=str, help="Output file.") parser.add_argument("-o", "--out", type=str, help="Output file.")
parser.add_argument("-n", "--name", type=str, help="Name of binary.") parser.add_argument("-n", "--name", type=str, help="Name of binary.")
args = parser.parse_args() args = parser.parse_args()
with open(args.out, "w") as outfile: with open(args.out, "w") as outfile:
module_template = 'framework module %s {\n' \ module_template = 'framework module %s {\n' \
' umbrella header "%s.h"\n' \ ' umbrella header "%s.h"\n' \
'\n' \ '\n' \
' export *\n' \ ' export *\n' \
' module * { export * }\n' \ ' module * { export * }\n' \
'}\n' % (args.name, args.name) '}\n' % (args.name, args.name)
outfile.write(module_template) outfile.write(module_template)
return 0 return 0
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(GenerateModulemap()) sys.exit(GenerateModulemap())

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -14,20 +16,20 @@ import textwrap
def GenerateUmbrellaHeader(): def GenerateUmbrellaHeader():
parser = argparse.ArgumentParser(description='Generate umbrella header') parser = argparse.ArgumentParser(description='Generate umbrella header')
parser.add_argument("-o", "--out", type=str, help="Output file.") parser.add_argument("-o", "--out", type=str, help="Output file.")
parser.add_argument("-s", parser.add_argument("-s",
"--sources", "--sources",
default=[], default=[],
type=str, type=str,
nargs='+', nargs='+',
help="Headers to include.") help="Headers to include.")
args = parser.parse_args() args = parser.parse_args()
with open(args.out, "w") as outfile: with open(args.out, "w") as outfile:
outfile.write( outfile.write(
textwrap.dedent("""\ textwrap.dedent("""\
/* /*
* Copyright %d The WebRTC project authors. All Rights Reserved. * Copyright %d The WebRTC project authors. All Rights Reserved.
* *
@ -38,11 +40,11 @@ def GenerateUmbrellaHeader():
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/\n\n""" % datetime.datetime.now().year)) */\n\n""" % datetime.datetime.now().year))
for s in args.sources: for s in args.sources:
outfile.write("#import <WebRTC/{}>\n".format(os.path.basename(s))) outfile.write("#import <WebRTC/{}>\n".format(os.path.basename(s)))
return 0 return 0
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(GenerateUmbrellaHeader()) sys.exit(GenerateUmbrellaHeader())

View file

@ -1,4 +1,4 @@
#!/usr/bin/python #!/usr/bin/env vpython3
# Copyright 2016 The WebRTC project authors. All Rights Reserved. # Copyright 2016 The WebRTC project authors. All Rights Reserved.
# #
@ -10,18 +10,19 @@
"""Script for merging generated iOS libraries.""" """Script for merging generated iOS libraries."""
import sys import sys
import argparse import argparse
import os import os
import re import re
import subprocess import subprocess
from six.moves import range
# Valid arch subdir names. # Valid arch subdir names.
VALID_ARCHS = ['arm_libs', 'arm64_libs', 'ia32_libs', 'x64_libs'] VALID_ARCHS = ['arm_libs', 'arm64_libs', 'ia32_libs', 'x64_libs']
def MergeLibs(lib_base_dir): def MergeLibs(lib_base_dir):
"""Merges generated iOS libraries for different archs. """Merges generated iOS libraries for different archs.
Uses libtool to generate FAT archive files for each generated library. Uses libtool to generate FAT archive files for each generated library.
@ -32,96 +33,94 @@ def MergeLibs(lib_base_dir):
Returns: Returns:
Exit code of libtool. Exit code of libtool.
""" """
output_dir_name = 'fat_libs' output_dir_name = 'fat_libs'
archs = [arch for arch in os.listdir(lib_base_dir) if arch in VALID_ARCHS] archs = [arch for arch in os.listdir(lib_base_dir) if arch in VALID_ARCHS]
# For each arch, find (library name, libary path) for arch. We will merge # For each arch, find (library name, libary path) for arch. We will merge
# all libraries with the same name. # all libraries with the same name.
libs = {} libs = {}
for lib_dir in [os.path.join(lib_base_dir, arch) for arch in VALID_ARCHS]: for lib_dir in [os.path.join(lib_base_dir, arch) for arch in VALID_ARCHS]:
if not os.path.exists(lib_dir): if not os.path.exists(lib_dir):
continue continue
for dirpath, _, filenames in os.walk(lib_dir): for dirpath, _, filenames in os.walk(lib_dir):
for filename in filenames: for filename in filenames:
if not filename.endswith('.a'): if not filename.endswith('.a'):
continue continue
entry = libs.get(filename, []) entry = libs.get(filename, [])
entry.append(os.path.join(dirpath, filename)) entry.append(os.path.join(dirpath, filename))
libs[filename] = entry libs[filename] = entry
orphaned_libs = {} orphaned_libs = {}
valid_libs = {} valid_libs = {}
for library, paths in libs.items(): for library, paths in list(libs.items()):
if len(paths) < len(archs): if len(paths) < len(archs):
orphaned_libs[library] = paths orphaned_libs[library] = paths
else: else:
valid_libs[library] = paths valid_libs[library] = paths
for library, paths in orphaned_libs.items(): for library, paths in list(orphaned_libs.items()):
components = library[:-2].split('_')[:-1] components = library[:-2].split('_')[:-1]
found = False found = False
# Find directly matching parent libs by stripping suffix. # Find directly matching parent libs by stripping suffix.
while components and not found: while components and not found:
parent_library = '_'.join(components) + '.a' parent_library = '_'.join(components) + '.a'
if parent_library in valid_libs: if parent_library in valid_libs:
valid_libs[parent_library].extend(paths) valid_libs[parent_library].extend(paths)
found = True found = True
break break
components = components[:-1] components = components[:-1]
# Find next best match by finding parent libs with the same prefix. # Find next best match by finding parent libs with the same prefix.
if not found: if not found:
base_prefix = library[:-2].split('_')[0] base_prefix = library[:-2].split('_')[0]
for valid_lib, valid_paths in valid_libs.items(): for valid_lib, valid_paths in list(valid_libs.items()):
if valid_lib[:len(base_prefix)] == base_prefix: if valid_lib[:len(base_prefix)] == base_prefix:
valid_paths.extend(paths) valid_paths.extend(paths)
found = True found = True
break break
assert found assert found
# Create output directory. # Create output directory.
output_dir_path = os.path.join(lib_base_dir, output_dir_name) output_dir_path = os.path.join(lib_base_dir, output_dir_name)
if not os.path.exists(output_dir_path): if not os.path.exists(output_dir_path):
os.mkdir(output_dir_path) os.mkdir(output_dir_path)
# Use this so libtool merged binaries are always the same. # Use this so libtool merged binaries are always the same.
env = os.environ.copy() env = os.environ.copy()
env['ZERO_AR_DATE'] = '1' env['ZERO_AR_DATE'] = '1'
# Ignore certain errors. # Ignore certain errors.
libtool_re = re.compile(r'^.*libtool:.*file: .* has no symbols$') libtool_re = re.compile(r'^.*libtool:.*file: .* has no symbols$')
# Merge libraries using libtool. # Merge libraries using libtool.
libtool_returncode = 0 libtool_returncode = 0
for library, paths in valid_libs.items(): for library, paths in list(valid_libs.items()):
cmd_list = [ cmd_list = [
'libtool', '-static', '-v', '-o', 'libtool', '-static', '-v', '-o',
os.path.join(output_dir_path, library) os.path.join(output_dir_path, library)
] + paths ] + paths
libtoolout = subprocess.Popen(cmd_list, libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
stderr=subprocess.PIPE, _, err = libtoolout.communicate()
env=env) for line in err.splitlines():
_, err = libtoolout.communicate() if not libtool_re.match(line):
for line in err.splitlines(): print(line, file=sys.stderr)
if not libtool_re.match(line): # Unconditionally touch the output .a file on the command line if present
print >> sys.stderr, line # and the command succeeded. A bit hacky.
# Unconditionally touch the output .a file on the command line if present libtool_returncode = libtoolout.returncode
# and the command succeeded. A bit hacky. if not libtool_returncode:
libtool_returncode = libtoolout.returncode for i in range(len(cmd_list) - 1):
if not libtool_returncode: if cmd_list[i] == '-o' and cmd_list[i + 1].endswith('.a'):
for i in range(len(cmd_list) - 1): os.utime(cmd_list[i + 1], None)
if cmd_list[i] == '-o' and cmd_list[i + 1].endswith('.a'): break
os.utime(cmd_list[i + 1], None) return libtool_returncode
break
return libtool_returncode
def Main(): def main():
parser_description = 'Merge WebRTC libraries.' parser_description = 'Merge WebRTC libraries.'
parser = argparse.ArgumentParser(description=parser_description) parser = argparse.ArgumentParser(description=parser_description)
parser.add_argument('lib_base_dir', parser.add_argument('lib_base_dir',
help='Directory with built libraries. ', help='Directory with built libraries. ',
type=str) type=str)
args = parser.parse_args() args = parser.parse_args()
lib_base_dir = args.lib_base_dir lib_base_dir = args.lib_base_dir
MergeLibs(lib_base_dir) MergeLibs(lib_base_dir)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(Main()) sys.exit(main())

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python3 #!/usr/bin/env vpython3
# Copyright 2016 The WebRTC project authors. All Rights Reserved. # Copyright 2016 The WebRTC project authors. All Rights Reserved.
# #
@ -13,7 +13,8 @@ Licenses are taken from dependent libraries which are determined by
GN desc command `gn desc` on all targets specified via `--target` argument. GN desc command `gn desc` on all targets specified via `--target` argument.
One can see all dependencies by invoking this command: One can see all dependencies by invoking this command:
$ gn.py desc --all --format=json <out_directory> <target> | python -m json.tool $ gn.py desc --all --format=json <out_directory> <target> | \
vpython3 -m json.tool
(see "deps" subarray) (see "deps" subarray)
Libraries are mapped to licenses via LIB_TO_LICENSES_DICT dictionary. Libraries are mapped to licenses via LIB_TO_LICENSES_DICT dictionary.
@ -21,18 +22,13 @@ Libraries are mapped to licenses via LIB_TO_LICENSES_DICT dictionary.
""" """
import sys import sys
import argparse import argparse
import json import json
import logging import logging
import os import os
import re import re
import subprocess import subprocess
try: from html import escape
# python 3.2+
from html import escape
except ImportError:
from cgi import escape
# Third_party library to licences mapping. Keys are names of the libraries # Third_party library to licences mapping. Keys are names of the libraries
# (right after the `third_party/` prefix) # (right after the `third_party/` prefix)
@ -107,11 +103,11 @@ LIB_REGEX_TO_LICENSES_DICT = {
def FindSrcDirPath(): def FindSrcDirPath():
"""Returns the abs path to the src/ dir of the project.""" """Returns the abs path to the src/ dir of the project."""
src_dir = os.path.dirname(os.path.abspath(__file__)) src_dir = os.path.dirname(os.path.abspath(__file__))
while os.path.basename(src_dir) != 'src': while os.path.basename(src_dir) != 'src':
src_dir = os.path.normpath(os.path.join(src_dir, os.pardir)) src_dir = os.path.normpath(os.path.join(src_dir, os.pardir))
return src_dir return src_dir
SCRIPT_DIR = os.path.dirname(os.path.realpath(sys.argv[0])) SCRIPT_DIR = os.path.dirname(os.path.realpath(sys.argv[0]))
@ -124,29 +120,29 @@ THIRD_PARTY_LIB_SIMPLE_NAME_REGEX = r'^.*/third_party/([\w\-+]+).*$'
THIRD_PARTY_LIB_REGEX_TEMPLATE = r'^.*/third_party/%s$' THIRD_PARTY_LIB_REGEX_TEMPLATE = r'^.*/third_party/%s$'
class LicenseBuilder(object): class LicenseBuilder:
def __init__(self, def __init__(self,
buildfile_dirs, buildfile_dirs,
targets, targets,
lib_to_licenses_dict=None, lib_to_licenses_dict=None,
lib_regex_to_licenses_dict=None): lib_regex_to_licenses_dict=None):
if lib_to_licenses_dict is None: if lib_to_licenses_dict is None:
lib_to_licenses_dict = LIB_TO_LICENSES_DICT lib_to_licenses_dict = LIB_TO_LICENSES_DICT
if lib_regex_to_licenses_dict is None: if lib_regex_to_licenses_dict is None:
lib_regex_to_licenses_dict = LIB_REGEX_TO_LICENSES_DICT lib_regex_to_licenses_dict = LIB_REGEX_TO_LICENSES_DICT
self.buildfile_dirs = buildfile_dirs self.buildfile_dirs = buildfile_dirs
self.targets = targets self.targets = targets
self.lib_to_licenses_dict = lib_to_licenses_dict self.lib_to_licenses_dict = lib_to_licenses_dict
self.lib_regex_to_licenses_dict = lib_regex_to_licenses_dict self.lib_regex_to_licenses_dict = lib_regex_to_licenses_dict
self.common_licenses_dict = self.lib_to_licenses_dict.copy() self.common_licenses_dict = self.lib_to_licenses_dict.copy()
self.common_licenses_dict.update(self.lib_regex_to_licenses_dict) self.common_licenses_dict.update(self.lib_regex_to_licenses_dict)
@staticmethod @staticmethod
def _ParseLibraryName(dep): def _ParseLibraryName(dep):
"""Returns library name after third_party """Returns library name after third_party
Input one of: Input one of:
//a/b/third_party/libname:c //a/b/third_party/libname:c
@ -155,11 +151,11 @@ class LicenseBuilder(object):
Outputs libname or None if this is not a third_party dependency. Outputs libname or None if this is not a third_party dependency.
""" """
groups = re.match(THIRD_PARTY_LIB_SIMPLE_NAME_REGEX, dep) groups = re.match(THIRD_PARTY_LIB_SIMPLE_NAME_REGEX, dep)
return groups.group(1) if groups else None return groups.group(1) if groups else None
def _ParseLibrary(self, dep): def _ParseLibrary(self, dep):
"""Returns library simple or regex name that matches `dep` after third_party """Returns library simple or regex name that matches `dep` after third_party
This method matches `dep` dependency against simple names in This method matches `dep` dependency against simple names in
LIB_TO_LICENSES_DICT and regular expression names in LIB_TO_LICENSES_DICT and regular expression names in
@ -167,109 +163,104 @@ class LicenseBuilder(object):
Outputs matched dict key or None if this is not a third_party dependency. Outputs matched dict key or None if this is not a third_party dependency.
""" """
libname = LicenseBuilder._ParseLibraryName(dep) libname = LicenseBuilder._ParseLibraryName(dep)
for lib_regex in self.lib_regex_to_licenses_dict: for lib_regex in self.lib_regex_to_licenses_dict:
if re.match(THIRD_PARTY_LIB_REGEX_TEMPLATE % lib_regex, dep): if re.match(THIRD_PARTY_LIB_REGEX_TEMPLATE % lib_regex, dep):
return lib_regex return lib_regex
return libname return libname
@staticmethod @staticmethod
def _RunGN(buildfile_dir, target): def _RunGN(buildfile_dir, target):
cmd = [ cmd = [
sys.executable, sys.executable,
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'), os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'),
'desc', 'desc',
'--all', '--all',
'--format=json', '--format=json',
os.path.abspath(buildfile_dir), os.path.abspath(buildfile_dir),
target, target,
] ]
logging.debug('Running: %r', cmd) logging.debug('Running: %r', cmd)
output_json = subprocess.check_output(cmd, cwd=WEBRTC_ROOT).decode('UTF-8') output_json = subprocess.check_output(cmd, cwd=WEBRTC_ROOT).decode('UTF-8')
logging.debug('Output: %s', output_json) logging.debug('Output: %s', output_json)
return output_json return output_json
def _GetThirdPartyLibraries(self, buildfile_dir, target): def _GetThirdPartyLibraries(self, buildfile_dir, target):
output = json.loads(LicenseBuilder._RunGN(buildfile_dir, target)) output = json.loads(LicenseBuilder._RunGN(buildfile_dir, target))
libraries = set() libraries = set()
for described_target in output.values(): for described_target in list(output.values()):
third_party_libs = (self._ParseLibrary(dep) third_party_libs = (self._ParseLibrary(dep)
for dep in described_target['deps']) for dep in described_target['deps'])
libraries |= set(lib for lib in third_party_libs if lib) libraries |= set(lib for lib in third_party_libs if lib)
return libraries return libraries
def GenerateLicenseText(self, output_dir): def GenerateLicenseText(self, output_dir):
# Get a list of third_party libs from gn. For fat libraries we must consider # Get a list of third_party libs from gn. For fat libraries we must consider
# all architectures, hence the multiple buildfile directories. # all architectures, hence the multiple buildfile directories.
third_party_libs = set() third_party_libs = set()
for buildfile in self.buildfile_dirs: for buildfile in self.buildfile_dirs:
for target in self.targets: for target in self.targets:
third_party_libs |= self._GetThirdPartyLibraries( third_party_libs |= self._GetThirdPartyLibraries(buildfile, target)
buildfile, target) assert len(third_party_libs) > 0
assert len(third_party_libs) > 0
missing_licenses = third_party_libs - set( missing_licenses = third_party_libs - set(self.common_licenses_dict.keys())
self.common_licenses_dict.keys()) if missing_licenses:
if missing_licenses: error_msg = 'Missing licenses for following third_party targets: %s' % \
error_msg = 'Missing licenses for following third_party targets: %s' % \ ', '.join(sorted(missing_licenses))
', '.join(sorted(missing_licenses)) logging.error(error_msg)
logging.error(error_msg) raise Exception(error_msg)
raise Exception(error_msg)
# Put webrtc at the front of the list. # Put webrtc at the front of the list.
license_libs = sorted(third_party_libs) license_libs = sorted(third_party_libs)
license_libs.insert(0, 'webrtc') license_libs.insert(0, 'webrtc')
logging.info('List of licenses: %s', ', '.join(license_libs)) logging.info('List of licenses: %s', ', '.join(license_libs))
# Generate markdown. # Generate markdown.
output_license_file = open(os.path.join(output_dir, 'LICENSE.md'), output_license_file = open(os.path.join(output_dir, 'LICENSE.md'), 'w+')
'w+') for license_lib in license_libs:
for license_lib in license_libs: if len(self.common_licenses_dict[license_lib]) == 0:
if len(self.common_licenses_dict[license_lib]) == 0: logging.info('Skipping compile time or internal dependency: %s',
logging.info( license_lib)
'Skipping compile time or internal dependency: %s', continue # Compile time dependency
license_lib)
continue # Compile time dependency
output_license_file.write('# %s\n' % license_lib) output_license_file.write('# %s\n' % license_lib)
output_license_file.write('```\n') output_license_file.write('```\n')
for path in self.common_licenses_dict[license_lib]: for path in self.common_licenses_dict[license_lib]:
license_path = os.path.join(WEBRTC_ROOT, path) license_path = os.path.join(WEBRTC_ROOT, path)
with open(license_path, 'r') as license_file: with open(license_path, 'r') as license_file:
license_text = escape(license_file.read(), quote=True) license_text = escape(license_file.read(), quote=True)
output_license_file.write(license_text) output_license_file.write(license_text)
output_license_file.write('\n') output_license_file.write('\n')
output_license_file.write('```\n\n') output_license_file.write('```\n\n')
output_license_file.close() output_license_file.close()
def main(): def main():
parser = argparse.ArgumentParser(description='Generate WebRTC LICENSE.md') parser = argparse.ArgumentParser(description='Generate WebRTC LICENSE.md')
parser.add_argument('--verbose', parser.add_argument('--verbose',
action='store_true', action='store_true',
default=False, default=False,
help='Debug logging.') help='Debug logging.')
parser.add_argument('--target', parser.add_argument('--target',
required=True, required=True,
action='append', action='append',
default=[], default=[],
help='Name of the GN target to generate a license for') help='Name of the GN target to generate a license for')
parser.add_argument('output_dir', parser.add_argument('output_dir', help='Directory to output LICENSE.md to.')
help='Directory to output LICENSE.md to.') parser.add_argument('buildfile_dirs',
parser.add_argument('buildfile_dirs', nargs='+',
nargs='+', help='Directories containing gn generated ninja files')
help='Directories containing gn generated ninja files') args = parser.parse_args()
args = parser.parse_args()
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO) logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
builder = LicenseBuilder(args.buildfile_dirs, args.target) builder = LicenseBuilder(args.buildfile_dirs, args.target)
builder.GenerateLicenseText(args.output_dir) builder.GenerateLicenseText(args.output_dir)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View file

@ -1,5 +1,6 @@
#!/usr/bin/env vpython #!/usr/bin/env vpython3
# pylint: disable=relative-import,protected-access,unused-argument
# pylint: disable=protected-access,unused-argument
# Copyright 2017 The WebRTC project authors. All Rights Reserved. # Copyright 2017 The WebRTC project authors. All Rights Reserved.
# #
@ -10,20 +11,15 @@
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
import unittest import unittest
try: from mock import patch
# python 3.3+
from unittest.mock import patch
except ImportError:
# From site-package
from mock import patch
from generate_licenses import LicenseBuilder from generate_licenses import LicenseBuilder
class TestLicenseBuilder(unittest.TestCase): class TestLicenseBuilder(unittest.TestCase):
@staticmethod @staticmethod
def _FakeRunGN(buildfile_dir, target): def _FakeRunGN(buildfile_dir, target):
return """ return """
{ {
"target1": { "target1": {
"deps": [ "deps": [
@ -36,93 +32,89 @@ class TestLicenseBuilder(unittest.TestCase):
} }
""" """
def testParseLibraryName(self): def testParseLibraryName(self):
self.assertEqual( self.assertEqual(
LicenseBuilder._ParseLibraryName('//a/b/third_party/libname1:c'), LicenseBuilder._ParseLibraryName('//a/b/third_party/libname1:c'),
'libname1') 'libname1')
self.assertEqual( self.assertEqual(
LicenseBuilder._ParseLibraryName( LicenseBuilder._ParseLibraryName('//a/b/third_party/libname2:c(d)'),
'//a/b/third_party/libname2:c(d)'), 'libname2') 'libname2')
self.assertEqual( self.assertEqual(
LicenseBuilder._ParseLibraryName( LicenseBuilder._ParseLibraryName('//a/b/third_party/libname3/c:d(e)'),
'//a/b/third_party/libname3/c:d(e)'), 'libname3') 'libname3')
self.assertEqual( self.assertEqual(
LicenseBuilder._ParseLibraryName('//a/b/not_third_party/c'), None) LicenseBuilder._ParseLibraryName('//a/b/not_third_party/c'), None)
def testParseLibrarySimpleMatch(self): def testParseLibrarySimpleMatch(self):
builder = LicenseBuilder([], [], {}, {}) builder = LicenseBuilder([], [], {}, {})
self.assertEqual(builder._ParseLibrary('//a/b/third_party/libname:c'), self.assertEqual(builder._ParseLibrary('//a/b/third_party/libname:c'),
'libname') 'libname')
def testParseLibraryRegExNoMatchFallbacksToDefaultLibname(self): def testParseLibraryRegExNoMatchFallbacksToDefaultLibname(self):
lib_dict = { lib_dict = {
'libname:foo.*': ['path/to/LICENSE'], 'libname:foo.*': ['path/to/LICENSE'],
} }
builder = LicenseBuilder([], [], lib_dict, {}) builder = LicenseBuilder([], [], lib_dict, {})
self.assertEqual( self.assertEqual(
builder._ParseLibrary('//a/b/third_party/libname:bar_java'), builder._ParseLibrary('//a/b/third_party/libname:bar_java'), 'libname')
'libname')
def testParseLibraryRegExMatch(self): def testParseLibraryRegExMatch(self):
lib_regex_dict = { lib_regex_dict = {
'libname:foo.*': ['path/to/LICENSE'], 'libname:foo.*': ['path/to/LICENSE'],
} }
builder = LicenseBuilder([], [], {}, lib_regex_dict) builder = LicenseBuilder([], [], {}, lib_regex_dict)
self.assertEqual( self.assertEqual(
builder._ParseLibrary('//a/b/third_party/libname:foo_bar_java'), builder._ParseLibrary('//a/b/third_party/libname:foo_bar_java'),
'libname:foo.*') 'libname:foo.*')
def testParseLibraryRegExMatchWithSubDirectory(self): def testParseLibraryRegExMatchWithSubDirectory(self):
lib_regex_dict = { lib_regex_dict = {
'libname/foo:bar.*': ['path/to/LICENSE'], 'libname/foo:bar.*': ['path/to/LICENSE'],
} }
builder = LicenseBuilder([], [], {}, lib_regex_dict) builder = LicenseBuilder([], [], {}, lib_regex_dict)
self.assertEqual( self.assertEqual(
builder._ParseLibrary('//a/b/third_party/libname/foo:bar_java'), builder._ParseLibrary('//a/b/third_party/libname/foo:bar_java'),
'libname/foo:bar.*') 'libname/foo:bar.*')
def testParseLibraryRegExMatchWithStarInside(self): def testParseLibraryRegExMatchWithStarInside(self):
lib_regex_dict = { lib_regex_dict = {
'libname/foo.*bar.*': ['path/to/LICENSE'], 'libname/foo.*bar.*': ['path/to/LICENSE'],
} }
builder = LicenseBuilder([], [], {}, lib_regex_dict) builder = LicenseBuilder([], [], {}, lib_regex_dict)
self.assertEqual( self.assertEqual(
builder._ParseLibrary( builder._ParseLibrary('//a/b/third_party/libname/fooHAHA:bar_java'),
'//a/b/third_party/libname/fooHAHA:bar_java'), 'libname/foo.*bar.*')
'libname/foo.*bar.*')
@patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN) @patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
def testGetThirdPartyLibrariesWithoutRegex(self): def testGetThirdPartyLibrariesWithoutRegex(self):
builder = LicenseBuilder([], [], {}, {}) builder = LicenseBuilder([], [], {}, {})
self.assertEqual( self.assertEqual(builder._GetThirdPartyLibraries('out/arm', 'target1'),
builder._GetThirdPartyLibraries('out/arm', 'target1'), set(['libname1', 'libname2', 'libname3']))
set(['libname1', 'libname2', 'libname3']))
@patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN) @patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
def testGetThirdPartyLibrariesWithRegex(self): def testGetThirdPartyLibrariesWithRegex(self):
lib_regex_dict = { lib_regex_dict = {
'libname2:c.*': ['path/to/LICENSE'], 'libname2:c.*': ['path/to/LICENSE'],
} }
builder = LicenseBuilder([], [], {}, lib_regex_dict) builder = LicenseBuilder([], [], {}, lib_regex_dict)
self.assertEqual( self.assertEqual(builder._GetThirdPartyLibraries('out/arm', 'target1'),
builder._GetThirdPartyLibraries('out/arm', 'target1'), set(['libname1', 'libname2:c.*', 'libname3']))
set(['libname1', 'libname2:c.*', 'libname3']))
@patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN) @patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
def testGenerateLicenseTextFailIfUnknownLibrary(self): def testGenerateLicenseTextFailIfUnknownLibrary(self):
lib_dict = { lib_dict = {
'simple_library': ['path/to/LICENSE'], 'simple_library': ['path/to/LICENSE'],
} }
builder = LicenseBuilder(['dummy_dir'], ['dummy_target'], lib_dict, {}) builder = LicenseBuilder(['dummy_dir'], ['dummy_target'], lib_dict, {})
with self.assertRaises(Exception) as context: with self.assertRaises(Exception) as context:
builder.GenerateLicenseText('dummy/dir') builder.GenerateLicenseText('dummy/dir')
self.assertEqual( self.assertEqual(
context.exception.args[0], context.exception.args[0],
'Missing licenses for following third_party targets: ' 'Missing licenses for following third_party targets: '
'libname1, libname2, libname3') 'libname1, libname2, libname3')
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -7,6 +9,10 @@
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
# Runs PRESUBMIT.py in py3 mode by git cl presubmit.
USE_PYTHON3 = True
def _CommonChecks(input_api, output_api): def _CommonChecks(input_api, output_api):
results = [] results = []
@ -27,15 +33,16 @@ def _CommonChecks(input_api, output_api):
results.extend(input_api.RunTests(pylint_checks)) results.extend(input_api.RunTests(pylint_checks))
# Run the MB unittests. # Run the MB unittests.
results.extend(input_api.canned_checks.RunUnitTestsInDirectory( results.extend(
input_api, input_api.canned_checks.RunUnitTestsInDirectory(input_api,
output_api, output_api,
'.', '.',
[ r'^.+_unittest\.py$'], [r'^.+_unittest\.py$'],
skip_shebang_check=True)) skip_shebang_check=False,
run_on_python2=False))
# Validate the format of the mb_config.pyl file. # Validate the format of the mb_config.pyl file.
cmd = [input_api.python_executable, 'mb.py', 'validate'] cmd = [input_api.python3_executable, 'mb.py', 'validate']
kwargs = {'cwd': input_api.PresubmitLocalPath()} kwargs = {'cwd': input_api.PresubmitLocalPath()}
results.extend(input_api.RunTests([ results.extend(input_api.RunTests([
input_api.Command(name='mb_validate', input_api.Command(name='mb_validate',

View file

@ -3,4 +3,4 @@ setlocal
:: This is required with cygwin only. :: This is required with cygwin only.
PATH=%~dp0;%PATH% PATH=%~dp0;%PATH%
set PYTHONDONTWRITEBYTECODE=1 set PYTHONDONTWRITEBYTECODE=1
call python "%~dp0mb.py" %* call vpython3 "%~dp0mb.py" %*

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -13,8 +14,6 @@ MB is a wrapper script for GN that can be used to generate build files
for sets of canned configurations and analyze them. for sets of canned configurations and analyze them.
""" """
from __future__ import print_function
import argparse import argparse
import ast import ast
import errno import errno
@ -28,10 +27,7 @@ import sys
import subprocess import subprocess
import tempfile import tempfile
import traceback import traceback
try: from urllib.request import urlopen
from urllib2 import urlopen # for Python2
except ImportError:
from urllib.request import urlopen # for Python3
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
SRC_DIR = os.path.dirname(os.path.dirname(SCRIPT_DIR)) SRC_DIR = os.path.dirname(os.path.dirname(SCRIPT_DIR))
@ -280,7 +276,7 @@ class MetaBuildWrapper(object):
def CmdExport(self): def CmdExport(self):
self.ReadConfigFile() self.ReadConfigFile()
obj = {} obj = {}
for builder_group, builders in self.builder_groups.items(): for builder_group, builders in list(self.builder_groups.items()):
obj[builder_group] = {} obj[builder_group] = {}
for builder in builders: for builder in builders:
config = self.builder_groups[builder_group][builder] config = self.builder_groups[builder_group][builder]
@ -290,7 +286,7 @@ class MetaBuildWrapper(object):
if isinstance(config, dict): if isinstance(config, dict):
args = { args = {
k: self.FlattenConfig(v)['gn_args'] k: self.FlattenConfig(v)['gn_args']
for k, v in config.items() for k, v in list(config.items())
} }
elif config.startswith('//'): elif config.startswith('//'):
args = config args = config
@ -476,15 +472,15 @@ class MetaBuildWrapper(object):
# Build a list of all of the configs referenced by builders. # Build a list of all of the configs referenced by builders.
all_configs = {} all_configs = {}
for builder_group in self.builder_groups: for builder_group in self.builder_groups:
for config in self.builder_groups[builder_group].values(): for config in list(self.builder_groups[builder_group].values()):
if isinstance(config, dict): if isinstance(config, dict):
for c in config.values(): for c in list(config.values()):
all_configs[c] = builder_group all_configs[c] = builder_group
else: else:
all_configs[config] = builder_group all_configs[config] = builder_group
# Check that every referenced args file or config actually exists. # Check that every referenced args file or config actually exists.
for config, loc in all_configs.items(): for config, loc in list(all_configs.items()):
if config.startswith('//'): if config.startswith('//'):
if not self.Exists(self.ToAbsPath(config)): if not self.Exists(self.ToAbsPath(config)):
errs.append('Unknown args file "%s" referenced from "%s".' % errs.append('Unknown args file "%s" referenced from "%s".' %
@ -500,7 +496,7 @@ class MetaBuildWrapper(object):
# Figure out the whole list of mixins, and check that every mixin # Figure out the whole list of mixins, and check that every mixin
# listed by a config or another mixin actually exists. # listed by a config or another mixin actually exists.
referenced_mixins = set() referenced_mixins = set()
for config, mixins in self.configs.items(): for config, mixins in list(self.configs.items()):
for mixin in mixins: for mixin in mixins:
if not mixin in self.mixins: if not mixin in self.mixins:
errs.append('Unknown mixin "%s" referenced by config "%s".' % errs.append('Unknown mixin "%s" referenced by config "%s".' %
@ -1172,7 +1168,7 @@ class MetaBuildWrapper(object):
self.Print('%s%s=%s' % (env_prefix, var, env_quoter(env[var]))) self.Print('%s%s=%s' % (env_prefix, var, env_quoter(env[var])))
if cmd[0] == self.executable: if cmd[0] == self.executable:
cmd = ['python'] + cmd[1:] cmd = ['vpython3'] + cmd[1:]
self.Print(*[shell_quoter(arg) for arg in cmd]) self.Print(*[shell_quoter(arg) for arg in cmd])
def PrintJSON(self, obj): def PrintJSON(self, obj):

View file

@ -1,4 +1,5 @@
#!/usr/bin/python #!/usr/bin/env vpython3
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -11,10 +12,7 @@
import ast import ast
import json import json
try: from io import StringIO # for Python3
from StringIO import StringIO # for Python2
except ImportError:
from io import StringIO # for Python3
import os import os
import re import re
import sys import sys
@ -35,14 +33,14 @@ class FakeMBW(mb.MetaBuildWrapper):
self.default_isolate_map = ('c:\\fake_src\\testing\\buildbot\\' self.default_isolate_map = ('c:\\fake_src\\testing\\buildbot\\'
'gn_isolate_map.pyl') 'gn_isolate_map.pyl')
self.platform = 'win32' self.platform = 'win32'
self.executable = 'c:\\python\\python.exe' self.executable = 'c:\\python\\vpython3.exe'
self.sep = '\\' self.sep = '\\'
self.cwd = 'c:\\fake_src\\out\\Default' self.cwd = 'c:\\fake_src\\out\\Default'
else: else:
self.src_dir = '/fake_src' self.src_dir = '/fake_src'
self.default_config = '/fake_src/tools_webrtc/mb/mb_config.pyl' self.default_config = '/fake_src/tools_webrtc/mb/mb_config.pyl'
self.default_isolate_map = '/fake_src/testing/buildbot/gn_isolate_map.pyl' self.default_isolate_map = '/fake_src/testing/buildbot/gn_isolate_map.pyl'
self.executable = '/usr/bin/python' self.executable = '/usr/bin/vpython3'
self.platform = 'linux2' self.platform = 'linux2'
self.sep = '/' self.sep = '/'
self.cwd = '/fake_src/out/Default' self.cwd = '/fake_src/out/Default'
@ -197,7 +195,7 @@ class UnitTest(unittest.TestCase):
mbw.ToAbsPath('//build/args/bots/fake_group/fake_args_bot.gn'), mbw.ToAbsPath('//build/args/bots/fake_group/fake_args_bot.gn'),
'is_debug = false\n') 'is_debug = false\n')
if files: if files:
for path, contents in files.items(): for path, contents in list(files.items()):
mbw.files[path] = contents mbw.files[path] = contents
return mbw return mbw
@ -846,8 +844,8 @@ class UnitTest(unittest.TestCase):
'/fake_src/out/Default/base_unittests.archive.json': '/fake_src/out/Default/base_unittests.archive.json':
("{\"base_unittests\":\"fake_hash\"}"), ("{\"base_unittests\":\"fake_hash\"}"),
'/fake_src/third_party/depot_tools/cipd_manifest.txt': '/fake_src/third_party/depot_tools/cipd_manifest.txt':
("# vpython\n" ("# vpython3\n"
"/some/vpython/pkg git_revision:deadbeef\n"), "/some/vpython3/pkg git_revision:deadbeef\n"),
} }
task_json = json.dumps({'tasks': [{'task_id': '00000'}]}) task_json = json.dumps({'tasks': [{'task_id': '00000'}]})
collect_json = json.dumps({'00000': {'results': {}}}) collect_json = json.dumps({'00000': {'results': {}}})

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -9,28 +10,27 @@
"""Configuration class for network emulation.""" """Configuration class for network emulation."""
class ConnectionConfig(object): class ConnectionConfig:
"""Configuration containing the characteristics of a network connection.""" """Configuration containing the characteristics of a network connection."""
def __init__(self, num, name, receive_bw_kbps, send_bw_kbps, delay_ms, def __init__(self, num, name, receive_bw_kbps, send_bw_kbps, delay_ms,
packet_loss_percent, queue_slots): packet_loss_percent, queue_slots):
self.num = num self.num = num
self.name = name self.name = name
self.receive_bw_kbps = receive_bw_kbps self.receive_bw_kbps = receive_bw_kbps
self.send_bw_kbps = send_bw_kbps self.send_bw_kbps = send_bw_kbps
self.delay_ms = delay_ms self.delay_ms = delay_ms
self.packet_loss_percent = packet_loss_percent self.packet_loss_percent = packet_loss_percent
self.queue_slots = queue_slots self.queue_slots = queue_slots
def __str__(self): def __str__(self):
"""String representing the configuration. """String representing the configuration.
Returns: Returns:
A string formatted and padded like this example: A string formatted and padded like this example:
12 Name 375 kbps 375 kbps 10 145 ms 0.1 % 12 Name 375 kbps 375 kbps 10 145 ms 0.1 %
""" """
left_aligned_name = self.name.ljust(24, ' ') left_aligned_name = self.name.ljust(24, ' ')
return '%2s %24s %5s kbps %5s kbps %4s %5s ms %3s %%' % ( return '%2s %24s %5s kbps %5s kbps %4s %5s ms %3s %%' % (
self.num, left_aligned_name, self.receive_bw_kbps, self.num, left_aligned_name, self.receive_bw_kbps, self.send_bw_kbps,
self.send_bw_kbps, self.queue_slots, self.delay_ms, self.queue_slots, self.delay_ms, self.packet_loss_percent)
self.packet_loss_percent)

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -46,170 +47,163 @@ _DEFAULT_PRESET = _PRESETS_DICT[_DEFAULT_PRESET_ID]
class NonStrippingEpilogOptionParser(optparse.OptionParser): class NonStrippingEpilogOptionParser(optparse.OptionParser):
"""Custom parser to let us show the epilog without weird line breaking.""" """Custom parser to let us show the epilog without weird line breaking."""
def format_epilog(self, formatter): def format_epilog(self, formatter):
return self.epilog return self.epilog
def _GetExternalIp(): def _GetExternalIp():
"""Finds out the machine's external IP by connecting to google.com.""" """Finds out the machine's external IP by connecting to google.com."""
external_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) external_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
external_socket.connect(('google.com', 80)) external_socket.connect(('google.com', 80))
return external_socket.getsockname()[0] return external_socket.getsockname()[0]
def _ParseArgs(): def _ParseArgs():
"""Define and parse the command-line arguments.""" """Define and parse the command-line arguments."""
presets_string = '\n'.join(str(p) for p in _PRESETS) presets_string = '\n'.join(str(p) for p in _PRESETS)
parser = NonStrippingEpilogOptionParser(epilog=( parser = NonStrippingEpilogOptionParser(epilog=(
'\nAvailable presets:\n' '\nAvailable presets:\n'
' Bandwidth (kbps) Packet\n' ' Bandwidth (kbps) Packet\n'
'ID Name Receive Send Queue Delay loss \n' 'ID Name Receive Send Queue Delay loss \n'
'-- ---- --------- -------- ----- ------- ------\n' '-- ---- --------- -------- ----- ------- ------\n'
'%s\n' % presets_string)) '%s\n' % presets_string))
parser.add_option('-p', parser.add_option('-p',
'--preset', '--preset',
type='int', type='int',
default=_DEFAULT_PRESET_ID, default=_DEFAULT_PRESET_ID,
help=('ConnectionConfig configuration, specified by ID. ' help=('ConnectionConfig configuration, specified by ID. '
'Default: %default')) 'Default: %default'))
parser.add_option( parser.add_option('-r',
'-r', '--receive-bw',
'--receive-bw', type='int',
type='int', default=_DEFAULT_PRESET.receive_bw_kbps,
default=_DEFAULT_PRESET.receive_bw_kbps, help=('Receive bandwidth in kilobit/s. Default: %default'))
help=('Receive bandwidth in kilobit/s. Default: %default')) parser.add_option('-s',
parser.add_option('-s', '--send-bw',
'--send-bw', type='int',
type='int', default=_DEFAULT_PRESET.send_bw_kbps,
default=_DEFAULT_PRESET.send_bw_kbps, help=('Send bandwidth in kilobit/s. Default: %default'))
help=('Send bandwidth in kilobit/s. Default: %default')) parser.add_option('-d',
parser.add_option('-d', '--delay',
'--delay', type='int',
type='int', default=_DEFAULT_PRESET.delay_ms,
default=_DEFAULT_PRESET.delay_ms, help=('Delay in ms. Default: %default'))
help=('Delay in ms. Default: %default')) parser.add_option('-l',
parser.add_option('-l', '--packet-loss',
'--packet-loss', type='float',
type='float', default=_DEFAULT_PRESET.packet_loss_percent,
default=_DEFAULT_PRESET.packet_loss_percent, help=('Packet loss in %. Default: %default'))
help=('Packet loss in %. Default: %default')) parser.add_option('-q',
parser.add_option( '--queue',
'-q', type='int',
'--queue', default=_DEFAULT_PRESET.queue_slots,
type='int', help=('Queue size as number of slots. Default: %default'))
default=_DEFAULT_PRESET.queue_slots, parser.add_option('--port-range',
help=('Queue size as number of slots. Default: %default')) default='%s,%s' % _DEFAULT_PORT_RANGE,
parser.add_option( help=('Range of ports for constrained network. Specify as '
'--port-range', 'two comma separated integers. Default: %default'))
default='%s,%s' % _DEFAULT_PORT_RANGE, parser.add_option('--target-ip',
help=('Range of ports for constrained network. Specify as ' default=None,
'two comma separated integers. Default: %default')) help=('The interface IP address to apply the rules for. '
parser.add_option( 'Default: the external facing interface IP address.'))
'--target-ip', parser.add_option('-v',
default=None, '--verbose',
help=('The interface IP address to apply the rules for. ' action='store_true',
'Default: the external facing interface IP address.')) default=False,
parser.add_option('-v', help=('Turn on verbose output. Will print all \'ipfw\' '
'--verbose', 'commands that are executed.'))
action='store_true',
default=False,
help=('Turn on verbose output. Will print all \'ipfw\' '
'commands that are executed.'))
options = parser.parse_args()[0] options = parser.parse_args()[0]
# Find preset by ID, if specified. # Find preset by ID, if specified.
if options.preset and not _PRESETS_DICT.has_key(options.preset): if options.preset and options.preset not in _PRESETS_DICT:
parser.error('Invalid preset: %s' % options.preset) parser.error('Invalid preset: %s' % options.preset)
# Simple validation of the IP address, if supplied. # Simple validation of the IP address, if supplied.
if options.target_ip: if options.target_ip:
try:
socket.inet_aton(options.target_ip)
except socket.error:
parser.error('Invalid IP address specified: %s' %
options.target_ip)
# Convert port range into the desired tuple format.
try: try:
if isinstance(options.port_range, str): socket.inet_aton(options.target_ip)
options.port_range = tuple( except socket.error:
int(port) for port in options.port_range.split(',')) parser.error('Invalid IP address specified: %s' % options.target_ip)
if len(options.port_range) != 2:
parser.error(
'Invalid port range specified, please specify two '
'integers separated by a comma.')
except ValueError:
parser.error('Invalid port range specified.')
_InitLogging(options.verbose) # Convert port range into the desired tuple format.
return options try:
if isinstance(options.port_range, str):
options.port_range = tuple(
int(port) for port in options.port_range.split(','))
if len(options.port_range) != 2:
parser.error('Invalid port range specified, please specify two '
'integers separated by a comma.')
except ValueError:
parser.error('Invalid port range specified.')
_InitLogging(options.verbose)
return options
def _InitLogging(verbose): def _InitLogging(verbose):
"""Setup logging.""" """Setup logging."""
log_level = _DEFAULT_LOG_LEVEL log_level = _DEFAULT_LOG_LEVEL
if verbose: if verbose:
log_level = logging.DEBUG log_level = logging.DEBUG
logging.basicConfig(level=log_level, format='%(message)s') logging.basicConfig(level=log_level, format='%(message)s')
def main(): def main():
options = _ParseArgs() options = _ParseArgs()
# Build a configuration object. Override any preset configuration settings if # Build a configuration object. Override any preset configuration settings if
# a value of a setting was also given as a flag. # a value of a setting was also given as a flag.
connection_config = _PRESETS_DICT[options.preset] connection_config = _PRESETS_DICT[options.preset]
if options.receive_bw is not _DEFAULT_PRESET.receive_bw_kbps: if options.receive_bw is not _DEFAULT_PRESET.receive_bw_kbps:
connection_config.receive_bw_kbps = options.receive_bw connection_config.receive_bw_kbps = options.receive_bw
if options.send_bw is not _DEFAULT_PRESET.send_bw_kbps: if options.send_bw is not _DEFAULT_PRESET.send_bw_kbps:
connection_config.send_bw_kbps = options.send_bw connection_config.send_bw_kbps = options.send_bw
if options.delay is not _DEFAULT_PRESET.delay_ms: if options.delay is not _DEFAULT_PRESET.delay_ms:
connection_config.delay_ms = options.delay connection_config.delay_ms = options.delay
if options.packet_loss is not _DEFAULT_PRESET.packet_loss_percent: if options.packet_loss is not _DEFAULT_PRESET.packet_loss_percent:
connection_config.packet_loss_percent = options.packet_loss connection_config.packet_loss_percent = options.packet_loss
if options.queue is not _DEFAULT_PRESET.queue_slots: if options.queue is not _DEFAULT_PRESET.queue_slots:
connection_config.queue_slots = options.queue connection_config.queue_slots = options.queue
emulator = network_emulator.NetworkEmulator(connection_config, emulator = network_emulator.NetworkEmulator(connection_config,
options.port_range) options.port_range)
try: try:
emulator.CheckPermissions() emulator.CheckPermissions()
except network_emulator.NetworkEmulatorError as e: except network_emulator.NetworkEmulatorError as e:
logging.error('Error: %s\n\nCause: %s', e.fail_msg, e.error) logging.error('Error: %s\n\nCause: %s', e.fail_msg, e.error)
return -1 return -1
if not options.target_ip: if not options.target_ip:
external_ip = _GetExternalIp() external_ip = _GetExternalIp()
else: else:
external_ip = options.target_ip external_ip = options.target_ip
logging.info('Constraining traffic to/from IP: %s', external_ip) logging.info('Constraining traffic to/from IP: %s', external_ip)
try: try:
emulator.Emulate(external_ip) emulator.Emulate(external_ip)
logging.info( logging.info(
'Started network emulation with the following configuration:\n' 'Started network emulation with the following configuration:\n'
' Receive bandwidth: %s kbps (%s kB/s)\n' ' Receive bandwidth: %s kbps (%s kB/s)\n'
' Send bandwidth : %s kbps (%s kB/s)\n' ' Send bandwidth : %s kbps (%s kB/s)\n'
' Delay : %s ms\n' ' Delay : %s ms\n'
' Packet loss : %s %%\n' ' Packet loss : %s %%\n'
' Queue slots : %s', connection_config.receive_bw_kbps, ' Queue slots : %s', connection_config.receive_bw_kbps,
connection_config.receive_bw_kbps / 8, connection_config.receive_bw_kbps / 8, connection_config.send_bw_kbps,
connection_config.send_bw_kbps, connection_config.send_bw_kbps / 8, connection_config.send_bw_kbps / 8, connection_config.delay_ms,
connection_config.delay_ms, connection_config.packet_loss_percent, connection_config.packet_loss_percent, connection_config.queue_slots)
connection_config.queue_slots) logging.info('Affected traffic: IP traffic on ports %s-%s',
logging.info('Affected traffic: IP traffic on ports %s-%s', options.port_range[0], options.port_range[1])
options.port_range[0], options.port_range[1]) input('Press Enter to abort Network Emulation...')
raw_input('Press Enter to abort Network Emulation...') logging.info('Flushing all Dummynet rules...')
logging.info('Flushing all Dummynet rules...') network_emulator.Cleanup()
network_emulator.Cleanup() logging.info('Completed Network Emulation.')
logging.info('Completed Network Emulation.') return 0
return 0 except network_emulator.NetworkEmulatorError as e:
except network_emulator.NetworkEmulatorError as e: logging.error('Error: %s\n\nCause: %s', e.fail_msg, e.error)
logging.error('Error: %s\n\nCause: %s', e.fail_msg, e.error) return -2
return -2
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -16,7 +17,7 @@ import sys
class NetworkEmulatorError(BaseException): class NetworkEmulatorError(BaseException):
"""Exception raised for errors in the network emulator. """Exception raised for errors in the network emulator.
Attributes: Attributes:
fail_msg: User defined error message. fail_msg: User defined error message.
@ -26,88 +27,83 @@ class NetworkEmulatorError(BaseException):
stderr: Error output of running the command. stderr: Error output of running the command.
""" """
def __init__(self, def __init__(self,
fail_msg, fail_msg,
cmd=None, cmd=None,
returncode=None, returncode=None,
output=None, output=None,
error=None): error=None):
BaseException.__init__(self, fail_msg) BaseException.__init__(self, fail_msg)
self.fail_msg = fail_msg self.fail_msg = fail_msg
self.cmd = cmd self.cmd = cmd
self.returncode = returncode self.returncode = returncode
self.output = output self.output = output
self.error = error self.error = error
class NetworkEmulator(object): class NetworkEmulator:
"""A network emulator that can constrain the network using Dummynet.""" """A network emulator that can constrain the network using Dummynet."""
def __init__(self, connection_config, port_range): def __init__(self, connection_config, port_range):
"""Constructor. """Constructor.
Args: Args:
connection_config: A config.ConnectionConfig object containing the connection_config: A config.ConnectionConfig object containing the
characteristics for the connection to be emulation. characteristics for the connection to be emulation.
port_range: Tuple containing two integers defining the port range. port_range: Tuple containing two integers defining the port range.
""" """
self._pipe_counter = 0 self._pipe_counter = 0
self._rule_counter = 0 self._rule_counter = 0
self._port_range = port_range self._port_range = port_range
self._connection_config = connection_config self._connection_config = connection_config
def Emulate(self, target_ip): def Emulate(self, target_ip):
"""Starts a network emulation by setting up Dummynet rules. """Starts a network emulation by setting up Dummynet rules.
Args: Args:
target_ip: The IP address of the interface that shall be that have the target_ip: The IP address of the interface that shall be that have the
network constraints applied to it. network constraints applied to it.
""" """
receive_pipe_id = self._CreateDummynetPipe( receive_pipe_id = self._CreateDummynetPipe(
self._connection_config.receive_bw_kbps, self._connection_config.receive_bw_kbps,
self._connection_config.delay_ms, self._connection_config.delay_ms,
self._connection_config.packet_loss_percent, self._connection_config.packet_loss_percent,
self._connection_config.queue_slots) self._connection_config.queue_slots)
logging.debug('Created receive pipe: %s', receive_pipe_id) logging.debug('Created receive pipe: %s', receive_pipe_id)
send_pipe_id = self._CreateDummynetPipe( send_pipe_id = self._CreateDummynetPipe(
self._connection_config.send_bw_kbps, self._connection_config.send_bw_kbps, self._connection_config.delay_ms,
self._connection_config.delay_ms, self._connection_config.packet_loss_percent,
self._connection_config.packet_loss_percent, self._connection_config.queue_slots)
self._connection_config.queue_slots) logging.debug('Created send pipe: %s', send_pipe_id)
logging.debug('Created send pipe: %s', send_pipe_id)
# Adding the rules will start the emulation. # Adding the rules will start the emulation.
incoming_rule_id = self._CreateDummynetRule(receive_pipe_id, 'any', incoming_rule_id = self._CreateDummynetRule(receive_pipe_id, 'any',
target_ip, target_ip, self._port_range)
self._port_range) logging.debug('Created incoming rule: %s', incoming_rule_id)
logging.debug('Created incoming rule: %s', incoming_rule_id) outgoing_rule_id = self._CreateDummynetRule(send_pipe_id, target_ip, 'any',
outgoing_rule_id = self._CreateDummynetRule(send_pipe_id, target_ip, self._port_range)
'any', self._port_range) logging.debug('Created outgoing rule: %s', outgoing_rule_id)
logging.debug('Created outgoing rule: %s', outgoing_rule_id)
@staticmethod @staticmethod
def CheckPermissions(): def CheckPermissions():
"""Checks if permissions are available to run Dummynet commands. """Checks if permissions are available to run Dummynet commands.
Raises: Raises:
NetworkEmulatorError: If permissions to run Dummynet commands are not NetworkEmulatorError: If permissions to run Dummynet commands are not
available. available.
""" """
try: try:
if os.getuid() != 0: if os.getuid() != 0:
raise NetworkEmulatorError( raise NetworkEmulatorError('You must run this script with sudo.')
'You must run this script with sudo.') except AttributeError as permission_error:
except AttributeError:
# AttributeError will be raised on Windows. # AttributeError will be raised on Windows.
if ctypes.windll.shell32.IsUserAnAdmin() == 0: if ctypes.windll.shell32.IsUserAnAdmin() == 0:
raise NetworkEmulatorError( raise NetworkEmulatorError('You must run this script with administrator'
'You must run this script with administrator' ' privileges.') from permission_error
' privileges.')
def _CreateDummynetRule(self, pipe_id, from_address, to_address, def _CreateDummynetRule(self, pipe_id, from_address, to_address, port_range):
port_range): """Creates a network emulation rule and returns its ID.
"""Creates a network emulation rule and returns its ID.
Args: Args:
pipe_id: integer ID of the pipe. pipe_id: integer ID of the pipe.
@ -121,22 +117,20 @@ class NetworkEmulator(object):
The ID of the rule, starting at 100. The rule ID increments with 100 for The ID of the rule, starting at 100. The rule ID increments with 100 for
each rule being added. each rule being added.
""" """
self._rule_counter += 100 self._rule_counter += 100
add_part = [ add_part = [
'add', self._rule_counter, 'pipe', pipe_id, 'ip', 'from', 'add', self._rule_counter, 'pipe', pipe_id, 'ip', 'from', from_address,
from_address, 'to', to_address 'to', to_address
] ]
_RunIpfwCommand(add_part + _RunIpfwCommand(add_part + ['src-port', '%s-%s' % port_range],
['src-port', '%s-%s' % port_range], 'Failed to add Dummynet src-port rule.')
'Failed to add Dummynet src-port rule.') _RunIpfwCommand(add_part + ['dst-port', '%s-%s' % port_range],
_RunIpfwCommand(add_part + 'Failed to add Dummynet dst-port rule.')
['dst-port', '%s-%s' % port_range], return self._rule_counter
'Failed to add Dummynet dst-port rule.')
return self._rule_counter
def _CreateDummynetPipe(self, bandwidth_kbps, delay_ms, def _CreateDummynetPipe(self, bandwidth_kbps, delay_ms, packet_loss_percent,
packet_loss_percent, queue_slots): queue_slots):
"""Creates a Dummynet pipe and return its ID. """Creates a Dummynet pipe and return its ID.
Args: Args:
bandwidth_kbps: Bandwidth. bandwidth_kbps: Bandwidth.
@ -146,34 +140,33 @@ class NetworkEmulator(object):
Returns: Returns:
The ID of the pipe, starting at 1. The ID of the pipe, starting at 1.
""" """
self._pipe_counter += 1 self._pipe_counter += 1
cmd = [ cmd = [
'pipe', self._pipe_counter, 'config', 'bw', 'pipe', self._pipe_counter, 'config', 'bw',
str(bandwidth_kbps / 8) + 'KByte/s', 'delay', str(bandwidth_kbps / 8) + 'KByte/s', 'delay',
'%sms' % delay_ms, 'plr', (packet_loss_percent / 100.0), 'queue', '%sms' % delay_ms, 'plr', (packet_loss_percent / 100.0), 'queue',
queue_slots queue_slots
] ]
error_message = 'Failed to create Dummynet pipe. ' error_message = 'Failed to create Dummynet pipe. '
if sys.platform.startswith('linux'): if sys.platform.startswith('linux'):
error_message += ( error_message += ('Make sure you have loaded the ipfw_mod.ko module to '
'Make sure you have loaded the ipfw_mod.ko module to ' 'your kernel (sudo insmod /path/to/ipfw_mod.ko).')
'your kernel (sudo insmod /path/to/ipfw_mod.ko).') _RunIpfwCommand(cmd, error_message)
_RunIpfwCommand(cmd, error_message) return self._pipe_counter
return self._pipe_counter
def Cleanup(): def Cleanup():
"""Stops the network emulation by flushing all Dummynet rules. """Stops the network emulation by flushing all Dummynet rules.
Notice that this will flush any rules that may have been created previously Notice that this will flush any rules that may have been created previously
before starting the emulation. before starting the emulation.
""" """
_RunIpfwCommand(['-f', 'flush'], 'Failed to flush Dummynet rules!') _RunIpfwCommand(['-f', 'flush'], 'Failed to flush Dummynet rules!')
_RunIpfwCommand(['-f', 'pipe', 'flush'], 'Failed to flush Dummynet pipes!') _RunIpfwCommand(['-f', 'pipe', 'flush'], 'Failed to flush Dummynet pipes!')
def _RunIpfwCommand(command, fail_msg=None): def _RunIpfwCommand(command, fail_msg=None):
"""Executes a command and prefixes the appropriate command for """Executes a command and prefixes the appropriate command for
Windows or Linux/UNIX. Windows or Linux/UNIX.
Args: Args:
@ -184,19 +177,19 @@ def _RunIpfwCommand(command, fail_msg=None):
NetworkEmulatorError: If command fails a message is set by the fail_msg NetworkEmulatorError: If command fails a message is set by the fail_msg
parameter. parameter.
""" """
if sys.platform == 'win32': if sys.platform == 'win32':
ipfw_command = ['ipfw.exe'] ipfw_command = ['ipfw.exe']
else: else:
ipfw_command = ['sudo', '-n', 'ipfw'] ipfw_command = ['sudo', '-n', 'ipfw']
cmd_list = ipfw_command[:] + [str(x) for x in command] cmd_list = ipfw_command[:] + [str(x) for x in command]
cmd_string = ' '.join(cmd_list) cmd_string = ' '.join(cmd_list)
logging.debug('Running command: %s', cmd_string) logging.debug('Running command: %s', cmd_string)
process = subprocess.Popen(cmd_list, process = subprocess.Popen(cmd_list,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE) stderr=subprocess.PIPE)
output, error = process.communicate() output, error = process.communicate()
if process.returncode != 0: if process.returncode != 0:
raise NetworkEmulatorError(fail_msg, cmd_string, process.returncode, raise NetworkEmulatorError(fail_msg, cmd_string, process.returncode, output,
output, error) error)
return output.strip() return output.strip()

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -8,12 +9,13 @@
# be found in the AUTHORS file in the root of the source tree. # be found in the AUTHORS file in the root of the source tree.
import datetime import datetime
import httplib2
import json import json
import subprocess import subprocess
import time import time
import zlib import zlib
import httplib2
from tracing.value import histogram from tracing.value import histogram
from tracing.value import histogram_set from tracing.value import histogram_set
from tracing.value.diagnostics import generic_set from tracing.value.diagnostics import generic_set
@ -21,52 +23,51 @@ from tracing.value.diagnostics import reserved_infos
def _GenerateOauthToken(): def _GenerateOauthToken():
args = ['luci-auth', 'token'] args = ['luci-auth', 'token']
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if p.wait() == 0: if p.wait() == 0:
output = p.stdout.read() output = p.stdout.read()
return output.strip() return output.strip()
else: raise RuntimeError(
raise RuntimeError( 'Error generating authentication token.\nStdout: %s\nStderr:%s' %
'Error generating authentication token.\nStdout: %s\nStderr:%s' % (p.stdout.read(), p.stderr.read()))
(p.stdout.read(), p.stderr.read()))
def _CreateHeaders(oauth_token): def _CreateHeaders(oauth_token):
return {'Authorization': 'Bearer %s' % oauth_token} return {'Authorization': 'Bearer %s' % oauth_token}
def _SendHistogramSet(url, histograms): def _SendHistogramSet(url, histograms):
"""Make a HTTP POST with the given JSON to the Performance Dashboard. """Make a HTTP POST with the given JSON to the Performance Dashboard.
Args: Args:
url: URL of Performance Dashboard instance, e.g. url: URL of Performance Dashboard instance, e.g.
"https://chromeperf.appspot.com". "https://chromeperf.appspot.com".
histograms: a histogram set object that contains the data to be sent. histograms: a histogram set object that contains the data to be sent.
""" """
headers = _CreateHeaders(_GenerateOauthToken()) headers = _CreateHeaders(_GenerateOauthToken())
serialized = json.dumps(_ApplyHacks(histograms.AsDicts()), indent=4) serialized = json.dumps(_ApplyHacks(histograms.AsDicts()), indent=4)
if url.startswith('http://localhost'): if url.startswith('http://localhost'):
# The catapult server turns off compression in developer mode. # The catapult server turns off compression in developer mode.
data = serialized data = serialized
else: else:
data = zlib.compress(serialized) data = zlib.compress(serialized)
print 'Sending %d bytes to %s.' % (len(data), url + '/add_histograms') print('Sending %d bytes to %s.' % (len(data), url + '/add_histograms'))
http = httplib2.Http() http = httplib2.Http()
response, content = http.request(url + '/add_histograms', response, content = http.request(url + '/add_histograms',
method='POST', method='POST',
body=data, body=data,
headers=headers) headers=headers)
return response, content return response, content
def _WaitForUploadConfirmation(url, upload_token, wait_timeout, def _WaitForUploadConfirmation(url, upload_token, wait_timeout,
wait_polling_period): wait_polling_period):
"""Make a HTTP GET requests to the Performance Dashboard untill upload """Make a HTTP GET requests to the Performance Dashboard untill upload
status is known or the time is out. status is known or the time is out.
Args: Args:
@ -79,42 +80,43 @@ def _WaitForUploadConfirmation(url, upload_token, wait_timeout,
wait_polling_period: (datetime.timedelta) Performance Dashboard will be wait_polling_period: (datetime.timedelta) Performance Dashboard will be
polled every wait_polling_period amount of time. polled every wait_polling_period amount of time.
""" """
assert wait_polling_period <= wait_timeout assert wait_polling_period <= wait_timeout
headers = _CreateHeaders(_GenerateOauthToken()) headers = _CreateHeaders(_GenerateOauthToken())
http = httplib2.Http() http = httplib2.Http()
oauth_refreshed = False oauth_refreshed = False
response = None response = None
resp_json = None resp_json = None
current_time = datetime.datetime.now()
end_time = current_time + wait_timeout
next_poll_time = current_time + wait_polling_period
while datetime.datetime.now() < end_time:
current_time = datetime.datetime.now() current_time = datetime.datetime.now()
end_time = current_time + wait_timeout if next_poll_time > current_time:
next_poll_time = current_time + wait_polling_period time.sleep((next_poll_time - current_time).total_seconds())
while datetime.datetime.now() < end_time: next_poll_time = datetime.datetime.now() + wait_polling_period
current_time = datetime.datetime.now()
if next_poll_time > current_time:
time.sleep((next_poll_time - current_time).total_seconds())
next_poll_time = datetime.datetime.now() + wait_polling_period
response, content = http.request(url + '/uploads/' + upload_token, response, content = http.request(url + '/uploads/' + upload_token,
method='GET', headers=headers) method='GET',
headers=headers)
print 'Upload state polled. Response: %r.' % content print('Upload state polled. Response: %r.' % content)
if not oauth_refreshed and response.status == 403: if not oauth_refreshed and response.status == 403:
print 'Oauth token refreshed. Continue polling.' print('Oauth token refreshed. Continue polling.')
headers = _CreateHeaders(_GenerateOauthToken()) headers = _CreateHeaders(_GenerateOauthToken())
oauth_refreshed = True oauth_refreshed = True
continue continue
if response.status != 200: if response.status != 200:
break break
resp_json = json.loads(content) resp_json = json.loads(content)
if resp_json['state'] == 'COMPLETED' or resp_json['state'] == 'FAILED': if resp_json['state'] == 'COMPLETED' or resp_json['state'] == 'FAILED':
break break
return response, resp_json return response, resp_json
# Because of an issues on the Dashboard side few measurements over a large set # Because of an issues on the Dashboard side few measurements over a large set
@ -124,7 +126,7 @@ def _WaitForUploadConfirmation(url, upload_token, wait_timeout,
def _CheckFullUploadInfo(url, upload_token, def _CheckFullUploadInfo(url, upload_token,
min_measurements_amount=50, min_measurements_amount=50,
max_failed_measurements_percent=0.03): max_failed_measurements_percent=0.03):
"""Make a HTTP GET requests to the Performance Dashboard to get full info """Make a HTTP GET requests to the Performance Dashboard to get full info
about upload (including measurements). Checks if upload is correct despite about upload (including measurements). Checks if upload is correct despite
not having status "COMPLETED". not having status "COMPLETED".
@ -138,125 +140,123 @@ def _CheckFullUploadInfo(url, upload_token,
max_failed_measurements_percent: maximal percent of failured measurements max_failed_measurements_percent: maximal percent of failured measurements
to tolerate. to tolerate.
""" """
headers = _CreateHeaders(_GenerateOauthToken()) headers = _CreateHeaders(_GenerateOauthToken())
http = httplib2.Http() http = httplib2.Http()
response, content = http.request(url + '/uploads/' + upload_token + response, content = http.request(url + '/uploads/' + upload_token +
'?additional_info=measurements', '?additional_info=measurements',
method='GET', headers=headers) method='GET',
headers=headers)
if response.status != 200:
print 'Failed to reach the dashboard to get full upload info.'
return False
resp_json = json.loads(content)
print 'Full upload info: %s.' % json.dumps(resp_json, indent=4)
if 'measurements' in resp_json:
measurements_cnt = len(resp_json['measurements'])
not_completed_state_cnt = len([
m for m in resp_json['measurements']
if m['state'] != 'COMPLETED'
])
if (measurements_cnt >= min_measurements_amount and
(not_completed_state_cnt / (measurements_cnt * 1.0) <=
max_failed_measurements_percent)):
print('Not all measurements were confirmed to upload. '
'Measurements count: %d, failed to upload or timed out: %d' %
(measurements_cnt, not_completed_state_cnt))
return True
if response.status != 200:
print('Failed to reach the dashboard to get full upload info.')
return False return False
resp_json = json.loads(content)
print('Full upload info: %s.' % json.dumps(resp_json, indent=4))
if 'measurements' in resp_json:
measurements_cnt = len(resp_json['measurements'])
not_completed_state_cnt = len(
[m for m in resp_json['measurements'] if m['state'] != 'COMPLETED'])
if (measurements_cnt >= min_measurements_amount
and (not_completed_state_cnt /
(measurements_cnt * 1.0) <= max_failed_measurements_percent)):
print(('Not all measurements were confirmed to upload. '
'Measurements count: %d, failed to upload or timed out: %d' %
(measurements_cnt, not_completed_state_cnt)))
return True
return False
# TODO(https://crbug.com/1029452): HACKHACK # TODO(https://crbug.com/1029452): HACKHACK
# Remove once we have doubles in the proto and handle -infinity correctly. # Remove once we have doubles in the proto and handle -infinity correctly.
def _ApplyHacks(dicts): def _ApplyHacks(dicts):
def _NoInf(value): def _NoInf(value):
if value == float('inf'): if value == float('inf'):
return histogram.JS_MAX_VALUE return histogram.JS_MAX_VALUE
if value == float('-inf'): if value == float('-inf'):
return -histogram.JS_MAX_VALUE return -histogram.JS_MAX_VALUE
return value return value
for d in dicts: for d in dicts:
if 'running' in d: if 'running' in d:
d['running'] = [_NoInf(value) for value in d['running']] d['running'] = [_NoInf(value) for value in d['running']]
if 'sampleValues' in d: if 'sampleValues' in d:
d['sampleValues'] = [_NoInf(value) for value in d['sampleValues']] d['sampleValues'] = [_NoInf(value) for value in d['sampleValues']]
return dicts return dicts
def _LoadHistogramSetFromProto(options): def _LoadHistogramSetFromProto(options):
hs = histogram_set.HistogramSet() hs = histogram_set.HistogramSet()
with options.input_results_file as f: with options.input_results_file as f:
hs.ImportProto(f.read()) hs.ImportProto(f.read())
return hs return hs
def _AddBuildInfo(histograms, options): def _AddBuildInfo(histograms, options):
common_diagnostics = { common_diagnostics = {
reserved_infos.MASTERS: options.perf_dashboard_machine_group, reserved_infos.MASTERS: options.perf_dashboard_machine_group,
reserved_infos.BOTS: options.bot, reserved_infos.BOTS: options.bot,
reserved_infos.POINT_ID: options.commit_position, reserved_infos.POINT_ID: options.commit_position,
reserved_infos.BENCHMARKS: options.test_suite, reserved_infos.BENCHMARKS: options.test_suite,
reserved_infos.WEBRTC_REVISIONS: str(options.webrtc_git_hash), reserved_infos.WEBRTC_REVISIONS: str(options.webrtc_git_hash),
reserved_infos.BUILD_URLS: options.build_page_url, reserved_infos.BUILD_URLS: options.build_page_url,
} }
for k, v in common_diagnostics.items(): for k, v in list(common_diagnostics.items()):
histograms.AddSharedDiagnosticToAllHistograms( histograms.AddSharedDiagnosticToAllHistograms(k.name,
k.name, generic_set.GenericSet([v])) generic_set.GenericSet([v]))
def _DumpOutput(histograms, output_file): def _DumpOutput(histograms, output_file):
with output_file: with output_file:
json.dump(_ApplyHacks(histograms.AsDicts()), output_file, indent=4) json.dump(_ApplyHacks(histograms.AsDicts()), output_file, indent=4)
def UploadToDashboard(options): def UploadToDashboard(options):
histograms = _LoadHistogramSetFromProto(options) histograms = _LoadHistogramSetFromProto(options)
_AddBuildInfo(histograms, options) _AddBuildInfo(histograms, options)
if options.output_json_file: if options.output_json_file:
_DumpOutput(histograms, options.output_json_file) _DumpOutput(histograms, options.output_json_file)
response, content = _SendHistogramSet(options.dashboard_url, histograms) response, content = _SendHistogramSet(options.dashboard_url, histograms)
if response.status != 200: if response.status != 200:
print('Upload failed with %d: %s\n\n%s' % (response.status, print(('Upload failed with %d: %s\n\n%s' %
response.reason, content)) (response.status, response.reason, content)))
return 1
upload_token = json.loads(content).get('token')
if not options.wait_for_upload or not upload_token:
print('Received 200 from dashboard. ',
'Not waiting for the upload status confirmation.')
return 0
response, resp_json = _WaitForUploadConfirmation(
options.dashboard_url,
upload_token,
datetime.timedelta(seconds=options.wait_timeout_sec),
datetime.timedelta(seconds=options.wait_polling_period_sec))
if ((resp_json and resp_json['state'] == 'COMPLETED') or
_CheckFullUploadInfo(options.dashboard_url, upload_token)):
print 'Upload completed.'
return 0
if response.status != 200:
print('Upload status poll failed with %d: %s' % (response.status,
response.reason))
return 1
if resp_json['state'] == 'FAILED':
print 'Upload failed.'
return 1
print('Upload wasn\'t completed in a given time: %d seconds.' %
options.wait_timeout_sec)
return 1 return 1
upload_token = json.loads(content).get('token')
if not options.wait_for_upload or not upload_token:
print(('Received 200 from dashboard. ',
'Not waiting for the upload status confirmation.'))
return 0
response, resp_json = _WaitForUploadConfirmation(
options.dashboard_url, upload_token,
datetime.timedelta(seconds=options.wait_timeout_sec),
datetime.timedelta(seconds=options.wait_polling_period_sec))
if ((resp_json and resp_json['state'] == 'COMPLETED')
or _CheckFullUploadInfo(options.dashboard_url, upload_token)):
print('Upload completed.')
return 0
if response.status != 200:
print(('Upload status poll failed with %d: %s' %
(response.status, response.reason)))
return 1
if resp_json['state'] == 'FAILED':
print('Upload failed.')
return 1
print(('Upload wasn\'t completed in a given time: %d seconds.' %
options.wait_timeout_sec))
return 1

View file

@ -1,4 +1,5 @@
#!/usr/bin/env vpython #!/usr/bin/env vpython3
# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -28,95 +29,110 @@ import google.protobuf # pylint: disable=unused-import
def _CreateParser(): def _CreateParser():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--perf-dashboard-machine-group', required=True, parser.add_argument('--perf-dashboard-machine-group',
help='The "master" the bots are grouped under. This ' required=True,
'string is the group in the the perf dashboard path ' help='The "master" the bots are grouped under. This '
'group/bot/perf_id/metric/subtest.') 'string is the group in the the perf dashboard path '
parser.add_argument('--bot', required=True, 'group/bot/perf_id/metric/subtest.')
help='The bot running the test (e.g. ' parser.add_argument('--bot',
'webrtc-win-large-tests).') required=True,
parser.add_argument('--test-suite', required=True, help='The bot running the test (e.g. '
help='The key for the test in the dashboard (i.e. what ' 'webrtc-win-large-tests).')
'you select in the top-level test suite selector in ' parser.add_argument('--test-suite',
'the dashboard') required=True,
parser.add_argument('--webrtc-git-hash', required=True, help='The key for the test in the dashboard (i.e. what '
help='webrtc.googlesource.com commit hash.') 'you select in the top-level test suite selector in '
parser.add_argument('--commit-position', type=int, required=True, 'the dashboard')
help='Commit pos corresponding to the git hash.') parser.add_argument('--webrtc-git-hash',
parser.add_argument('--build-page-url', required=True, required=True,
help='URL to the build page for this build.') help='webrtc.googlesource.com commit hash.')
parser.add_argument('--dashboard-url', required=True, parser.add_argument('--commit-position',
help='Which dashboard to use.') type=int,
parser.add_argument('--input-results-file', type=argparse.FileType(), required=True,
required=True, help='Commit pos corresponding to the git hash.')
help='A HistogramSet proto file with output from ' parser.add_argument('--build-page-url',
'WebRTC tests.') required=True,
parser.add_argument('--output-json-file', type=argparse.FileType('w'), help='URL to the build page for this build.')
help='Where to write the output (for debugging).') parser.add_argument('--dashboard-url',
parser.add_argument('--outdir', required=True, required=True,
help='Path to the local out/ dir (usually out/Default)') help='Which dashboard to use.')
parser.add_argument('--wait-for-upload', action='store_true', parser.add_argument('--input-results-file',
help='If specified, script will wait untill Chrome ' type=argparse.FileType(),
'perf dashboard confirms that the data was succesfully ' required=True,
'proccessed and uploaded') help='A HistogramSet proto file with output from '
parser.add_argument('--wait-timeout-sec', type=int, default=1200, 'WebRTC tests.')
help='Used only if wait-for-upload is True. Maximum ' parser.add_argument('--output-json-file',
'amount of time in seconds that the script will wait ' type=argparse.FileType('w'),
'for the confirmation.') help='Where to write the output (for debugging).')
parser.add_argument('--wait-polling-period-sec', type=int, default=120, parser.add_argument('--outdir',
help='Used only if wait-for-upload is True. Status ' required=True,
'will be requested from the Dashboard every ' help='Path to the local out/ dir (usually out/Default)')
'wait-polling-period-sec seconds.') parser.add_argument('--wait-for-upload',
return parser action='store_true',
help='If specified, script will wait untill Chrome '
'perf dashboard confirms that the data was succesfully '
'proccessed and uploaded')
parser.add_argument('--wait-timeout-sec',
type=int,
default=1200,
help='Used only if wait-for-upload is True. Maximum '
'amount of time in seconds that the script will wait '
'for the confirmation.')
parser.add_argument('--wait-polling-period-sec',
type=int,
default=120,
help='Used only if wait-for-upload is True. Status '
'will be requested from the Dashboard every '
'wait-polling-period-sec seconds.')
return parser
def _ConfigurePythonPath(options): def _ConfigurePythonPath(options):
# We just yank the python scripts we require into the PYTHONPATH. You could # We just yank the python scripts we require into the PYTHONPATH. You could
# also imagine a solution where we use for instance # also imagine a solution where we use for instance
# protobuf:py_proto_runtime to copy catapult and protobuf code to out/. # protobuf:py_proto_runtime to copy catapult and protobuf code to out/.
# This is the convention in Chromium and WebRTC python scripts. We do need # This is the convention in Chromium and WebRTC python scripts. We do need
# to build histogram_pb2 however, so that's why we add out/ to sys.path # to build histogram_pb2 however, so that's why we add out/ to sys.path
# below. # below.
# #
# It would be better if there was an equivalent to py_binary in GN, but # It would be better if there was an equivalent to py_binary in GN, but
# there's not. # there's not.
script_dir = os.path.dirname(os.path.realpath(__file__)) script_dir = os.path.dirname(os.path.realpath(__file__))
checkout_root = os.path.abspath( checkout_root = os.path.abspath(os.path.join(script_dir, os.pardir,
os.path.join(script_dir, os.pardir, os.pardir)) os.pardir))
sys.path.insert( sys.path.insert(
0, os.path.join(checkout_root, 'third_party', 'catapult', 'tracing')) 0, os.path.join(checkout_root, 'third_party', 'catapult', 'tracing'))
sys.path.insert( sys.path.insert(
0, os.path.join(checkout_root, 'third_party', 'protobuf', 'python')) 0, os.path.join(checkout_root, 'third_party', 'protobuf', 'python'))
# The webrtc_dashboard_upload gn rule will build the protobuf stub for # The webrtc_dashboard_upload gn rule will build the protobuf stub for
# python, so put it in the path for this script before we attempt to import # python, so put it in the path for this script before we attempt to import
# it. # it.
histogram_proto_path = os.path.join(options.outdir, 'pyproto', 'tracing', histogram_proto_path = os.path.join(options.outdir, 'pyproto', 'tracing',
'tracing', 'proto') 'tracing', 'proto')
sys.path.insert(0, histogram_proto_path) sys.path.insert(0, histogram_proto_path)
# Fail early in case the proto hasn't been built. # Fail early in case the proto hasn't been built.
from tracing.proto import histogram_proto from tracing.proto import histogram_proto
if not histogram_proto.HAS_PROTO: if not histogram_proto.HAS_PROTO:
raise ImportError( raise ImportError('Could not find histogram_pb2. You need to build the '
'Could not find histogram_pb2. You need to build the ' 'webrtc_dashboard_upload target before invoking this '
'webrtc_dashboard_upload target before invoking this ' 'script. Expected to find '
'script. Expected to find ' 'histogram_pb2.py in %s.' % histogram_proto_path)
'histogram_pb2.py in %s.' % histogram_proto_path)
def main(args): def main(args):
parser = _CreateParser() parser = _CreateParser()
options = parser.parse_args(args) options = parser.parse_args(args)
_ConfigurePythonPath(options) _ConfigurePythonPath(options)
import catapult_uploader import catapult_uploader
return catapult_uploader.UploadToDashboard(options) return catapult_uploader.UploadToDashboard(options)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -18,11 +20,11 @@ import tempfile
def FindSrcDirPath(): def FindSrcDirPath():
"""Returns the abs path to the src/ dir of the project.""" """Returns the abs path to the src/ dir of the project."""
src_dir = os.path.dirname(os.path.abspath(__file__)) src_dir = os.path.dirname(os.path.abspath(__file__))
while os.path.basename(src_dir) != 'src': while os.path.basename(src_dir) != 'src':
src_dir = os.path.normpath(os.path.join(src_dir, os.pardir)) src_dir = os.path.normpath(os.path.join(src_dir, os.pardir))
return src_dir return src_dir
SRC_DIR = FindSrcDirPath() SRC_DIR = FindSrcDirPath()
@ -31,16 +33,16 @@ import find_depot_tools
def RunGnCommand(args, root_dir=None): def RunGnCommand(args, root_dir=None):
"""Runs `gn` with provided args and return error if any.""" """Runs `gn` with provided args and return error if any."""
try: try:
command = [ command = [
sys.executable, sys.executable,
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py') os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py')
] + args ] + args
subprocess.check_output(command, cwd=root_dir) subprocess.check_output(command, cwd=root_dir)
except subprocess.CalledProcessError as err: except subprocess.CalledProcessError as err:
return err.output return err.output
return None return None
# GN_ERROR_RE matches the summary of an error output by `gn check`. # GN_ERROR_RE matches the summary of an error output by `gn check`.
@ -50,49 +52,49 @@ GN_ERROR_RE = re.compile(r'^ERROR .+(?:\n.*[^_\n].*$)+', re.MULTILINE)
def RunGnCheck(root_dir=None): def RunGnCheck(root_dir=None):
"""Runs `gn gen --check` with default args to detect mismatches between """Runs `gn gen --check` with default args to detect mismatches between
#includes and dependencies in the BUILD.gn files, as well as general build #includes and dependencies in the BUILD.gn files, as well as general build
errors. errors.
Returns a list of error summary strings. Returns a list of error summary strings.
""" """
out_dir = tempfile.mkdtemp('gn') out_dir = tempfile.mkdtemp('gn')
try: try:
error = RunGnCommand(['gen', '--check', out_dir], root_dir) error = RunGnCommand(['gen', '--check', out_dir], root_dir)
finally: finally:
shutil.rmtree(out_dir, ignore_errors=True) shutil.rmtree(out_dir, ignore_errors=True)
return GN_ERROR_RE.findall(error) if error else [] return GN_ERROR_RE.findall(error.decode('utf-8')) if error else []
def RunNinjaCommand(args, root_dir=None): def RunNinjaCommand(args, root_dir=None):
"""Runs ninja quietly. Any failure (e.g. clang not found) is """Runs ninja quietly. Any failure (e.g. clang not found) is
silently discarded, since this is unlikely an error in submitted CL.""" silently discarded, since this is unlikely an error in submitted CL."""
command = [os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja')] + args command = [os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja')] + args
p = subprocess.Popen(command, p = subprocess.Popen(command,
cwd=root_dir, cwd=root_dir,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE) stderr=subprocess.PIPE)
out, _ = p.communicate() out, _ = p.communicate()
return out return out
def GetClangTidyPath(): def GetClangTidyPath():
"""POC/WIP! Use the one we have, even it doesn't match clang's version.""" """POC/WIP! Use the one we have, even it doesn't match clang's version."""
tidy = ('third_party/android_ndk/toolchains/' tidy = ('third_party/android_ndk/toolchains/'
'llvm/prebuilt/linux-x86_64/bin/clang-tidy') 'llvm/prebuilt/linux-x86_64/bin/clang-tidy')
return os.path.join(SRC_DIR, tidy) return os.path.join(SRC_DIR, tidy)
def GetCompilationDb(root_dir=None): def GetCompilationDb(root_dir=None):
"""Run ninja compdb tool to get proper flags, defines and include paths.""" """Run ninja compdb tool to get proper flags, defines and include paths."""
# The compdb tool expect a rule. # The compdb tool expect a rule.
commands = json.loads(RunNinjaCommand(['-t', 'compdb', 'cxx'], root_dir)) commands = json.loads(RunNinjaCommand(['-t', 'compdb', 'cxx'], root_dir))
# Turns 'file' field into a key. # Turns 'file' field into a key.
return {v['file']: v for v in commands} return {v['file']: v for v in commands}
def GetCompilationCommand(filepath, gn_args, work_dir): def GetCompilationCommand(filepath, gn_args, work_dir):
"""Get the whole command used to compile one cc file. """Get the whole command used to compile one cc file.
Typically, clang++ with flags, defines and include paths. Typically, clang++ with flags, defines and include paths.
Args: Args:
@ -103,30 +105,30 @@ def GetCompilationCommand(filepath, gn_args, work_dir):
Returns: Returns:
Command as a list, ready to be consumed by subprocess.Popen. Command as a list, ready to be consumed by subprocess.Popen.
""" """
gn_errors = RunGnCommand(['gen'] + gn_args + [work_dir]) gn_errors = RunGnCommand(['gen'] + gn_args + [work_dir])
if gn_errors: if gn_errors:
raise (RuntimeError('FYI, cannot complete check due to gn error:\n%s\n' raise RuntimeError('FYI, cannot complete check due to gn error:\n%s\n'
'Please open a bug.' % gn_errors)) 'Please open a bug.' % gn_errors)
# Needed for single file compilation. # Needed for single file compilation.
commands = GetCompilationDb(work_dir) commands = GetCompilationDb(work_dir)
# Path as referenced by ninja. # Path as referenced by ninja.
rel_path = os.path.relpath(os.path.abspath(filepath), work_dir) rel_path = os.path.relpath(os.path.abspath(filepath), work_dir)
# Gather defines, include path and flags (such as -std=c++11). # Gather defines, include path and flags (such as -std=c++11).
try: try:
compilation_entry = commands[rel_path] compilation_entry = commands[rel_path]
except KeyError: except KeyError as not_found:
raise ValueError('%s: Not found in compilation database.\n' raise ValueError('%s: Not found in compilation database.\n'
'Please check the path.' % filepath) 'Please check the path.' % filepath) from not_found
command = compilation_entry['command'].split() command = compilation_entry['command'].split()
# Remove troublesome flags. May trigger an error otherwise. # Remove troublesome flags. May trigger an error otherwise.
if '-MMD' in command: if '-MMD' in command:
command.remove('-MMD') command.remove('-MMD')
if '-MF' in command: if '-MF' in command:
index = command.index('-MF') index = command.index('-MF')
del command[index:index + 2] # Remove filename as well. del command[index:index + 2] # Remove filename as well.
return command return command

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
@ -12,7 +12,6 @@ import re
import os import os
import unittest import unittest
#pylint: disable=relative-import
import build_helpers import build_helpers
TESTDATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), TESTDATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
@ -26,7 +25,7 @@ class GnCheckTest(unittest.TestCase):
expected_error = re.compile('ERROR Dependency cycle') expected_error = re.compile('ERROR Dependency cycle')
gn_output = build_helpers.RunGnCheck(test_dir) gn_output = build_helpers.RunGnCheck(test_dir)
self.assertEqual(1, len(gn_output)) self.assertEqual(1, len(gn_output))
self.assertRegexpMatches(gn_output[0], expected_error) self.assertRegex(gn_output[0], expected_error)
if __name__ == '__main__': if __name__ == '__main__':

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -9,7 +10,6 @@
import os import os
import re import re
import string
# TARGET_RE matches a GN target, and extracts the target name and the contents. # TARGET_RE matches a GN target, and extracts the target name and the contents.
TARGET_RE = re.compile( TARGET_RE = re.compile(
@ -26,27 +26,27 @@ SOURCE_FILE_RE = re.compile(r'.*\"(?P<source_file>.*)\"')
class NoBuildGnFoundError(Exception): class NoBuildGnFoundError(Exception):
pass pass
class WrongFileTypeError(Exception): class WrongFileTypeError(Exception):
pass pass
def _ReadFile(file_path): def _ReadFile(file_path):
"""Returns the content of file_path in a string. """Returns the content of file_path in a string.
Args: Args:
file_path: the path of the file to read. file_path: the path of the file to read.
Returns: Returns:
A string with the content of the file. A string with the content of the file.
""" """
with open(file_path) as f: with open(file_path) as f:
return f.read() return f.read()
def GetBuildGnPathFromFilePath(file_path, file_exists_check, root_dir_path): def GetBuildGnPathFromFilePath(file_path, file_exists_check, root_dir_path):
"""Returns the BUILD.gn file responsible for file_path. """Returns the BUILD.gn file responsible for file_path.
Args: Args:
file_path: the absolute path to the .h file to check. file_path: the absolute path to the .h file to check.
@ -58,23 +58,21 @@ def GetBuildGnPathFromFilePath(file_path, file_exists_check, root_dir_path):
A string with the absolute path to the BUILD.gn file responsible to include A string with the absolute path to the BUILD.gn file responsible to include
file_path in a target. file_path in a target.
""" """
if not file_path.endswith('.h'): if not file_path.endswith('.h'):
raise WrongFileTypeError( raise WrongFileTypeError(
'File {} is not an header file (.h)'.format(file_path)) 'File {} is not an header file (.h)'.format(file_path))
candidate_dir = os.path.dirname(file_path) candidate_dir = os.path.dirname(file_path)
while candidate_dir.startswith(root_dir_path): while candidate_dir.startswith(root_dir_path):
candidate_build_gn_path = os.path.join(candidate_dir, 'BUILD.gn') candidate_build_gn_path = os.path.join(candidate_dir, 'BUILD.gn')
if file_exists_check(candidate_build_gn_path): if file_exists_check(candidate_build_gn_path):
return candidate_build_gn_path return candidate_build_gn_path
else: candidate_dir = os.path.abspath(os.path.join(candidate_dir, os.pardir))
candidate_dir = os.path.abspath( raise NoBuildGnFoundError(
os.path.join(candidate_dir, os.pardir)) 'No BUILD.gn file found for file: `{}`'.format(file_path))
raise NoBuildGnFoundError(
'No BUILD.gn file found for file: `{}`'.format(file_path))
def IsHeaderInBuildGn(header_path, build_gn_path): def IsHeaderInBuildGn(header_path, build_gn_path):
"""Returns True if the header is listed in the BUILD.gn file. """Returns True if the header is listed in the BUILD.gn file.
Args: Args:
header_path: the absolute path to the header to check. header_path: the absolute path to the header to check.
@ -85,15 +83,15 @@ def IsHeaderInBuildGn(header_path, build_gn_path):
at least one GN target in the BUILD.gn file specified by at least one GN target in the BUILD.gn file specified by
the argument build_gn_path. the argument build_gn_path.
""" """
target_abs_path = os.path.dirname(build_gn_path) target_abs_path = os.path.dirname(build_gn_path)
build_gn_content = _ReadFile(build_gn_path) build_gn_content = _ReadFile(build_gn_path)
headers_in_build_gn = GetHeadersInBuildGnFileSources( headers_in_build_gn = GetHeadersInBuildGnFileSources(build_gn_content,
build_gn_content, target_abs_path) target_abs_path)
return header_path in headers_in_build_gn return header_path in headers_in_build_gn
def GetHeadersInBuildGnFileSources(file_content, target_abs_path): def GetHeadersInBuildGnFileSources(file_content, target_abs_path):
"""Returns a set with all the .h files in the file_content. """Returns a set with all the .h files in the file_content.
Args: Args:
file_content: a string with the content of the BUILD.gn file. file_content: a string with the content of the BUILD.gn file.
@ -104,15 +102,15 @@ def GetHeadersInBuildGnFileSources(file_content, target_abs_path):
A set with all the headers (.h file) in the file_content. A set with all the headers (.h file) in the file_content.
The set contains absolute paths. The set contains absolute paths.
""" """
headers_in_sources = set([]) headers_in_sources = set([])
for target_match in TARGET_RE.finditer(file_content): for target_match in TARGET_RE.finditer(file_content):
target_contents = target_match.group('target_contents') target_contents = target_match.group('target_contents')
for sources_match in SOURCES_RE.finditer(target_contents): for sources_match in SOURCES_RE.finditer(target_contents):
sources = sources_match.group('sources') sources = sources_match.group('sources')
for source_file_match in SOURCE_FILE_RE.finditer(sources): for source_file_match in SOURCE_FILE_RE.finditer(sources):
source_file = source_file_match.group('source_file') source_file = source_file_match.group('source_file')
if source_file.endswith('.h'): if source_file.endswith('.h'):
source_file_tokens = string.split(source_file, '/') source_file_tokens = source_file.split('/')
headers_in_sources.add( headers_in_sources.add(
os.path.join(target_abs_path, *source_file_tokens)) os.path.join(target_abs_path, *source_file_tokens))
return headers_in_sources return headers_in_sources

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -11,72 +12,67 @@ import os
import sys import sys
import unittest import unittest
#pylint: disable=relative-import
import check_orphan_headers import check_orphan_headers
def _GetRootBasedOnPlatform(): def _GetRootBasedOnPlatform():
if sys.platform.startswith('win'): if sys.platform.startswith('win'):
return 'C:\\' return 'C:\\'
else: return '/'
return '/'
def _GetPath(*path_chunks): def _GetPath(*path_chunks):
return os.path.join(_GetRootBasedOnPlatform(), *path_chunks) return os.path.join(_GetRootBasedOnPlatform(), *path_chunks)
class GetBuildGnPathFromFilePathTest(unittest.TestCase): class GetBuildGnPathFromFilePathTest(unittest.TestCase):
def testGetBuildGnFromSameDirectory(self): def testGetBuildGnFromSameDirectory(self):
file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h') file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h')
expected_build_path = _GetPath('home', 'projects', 'webrtc', 'base', expected_build_path = _GetPath('home', 'projects', 'webrtc', 'base',
'BUILD.gn') 'BUILD.gn')
file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc', file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc', 'base',
'base', 'BUILD.gn') 'BUILD.gn')
src_dir_path = _GetPath('home', 'projects', 'webrtc') src_dir_path = _GetPath('home', 'projects', 'webrtc')
self.assertEqual( self.assertEqual(
expected_build_path, expected_build_path,
check_orphan_headers.GetBuildGnPathFromFilePath( check_orphan_headers.GetBuildGnPathFromFilePath(file_path, file_exists,
file_path, file_exists, src_dir_path)) src_dir_path))
def testGetBuildPathFromParentDirectory(self): def testGetBuildPathFromParentDirectory(self):
file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h') file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h')
expected_build_path = _GetPath('home', 'projects', 'webrtc', expected_build_path = _GetPath('home', 'projects', 'webrtc', 'BUILD.gn')
'BUILD.gn') file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc',
file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc', 'BUILD.gn')
'BUILD.gn') src_dir_path = _GetPath('home', 'projects', 'webrtc')
src_dir_path = _GetPath('home', 'projects', 'webrtc') self.assertEqual(
self.assertEqual( expected_build_path,
expected_build_path, check_orphan_headers.GetBuildGnPathFromFilePath(file_path, file_exists,
check_orphan_headers.GetBuildGnPathFromFilePath( src_dir_path))
file_path, file_exists, src_dir_path))
def testExceptionIfNoBuildGnFilesAreFound(self): def testExceptionIfNoBuildGnFilesAreFound(self):
with self.assertRaises(check_orphan_headers.NoBuildGnFoundError): with self.assertRaises(check_orphan_headers.NoBuildGnFoundError):
file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h') file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h')
file_exists = lambda p: False file_exists = lambda p: False
src_dir_path = _GetPath('home', 'projects', 'webrtc') src_dir_path = _GetPath('home', 'projects', 'webrtc')
check_orphan_headers.GetBuildGnPathFromFilePath( check_orphan_headers.GetBuildGnPathFromFilePath(file_path, file_exists,
file_path, file_exists, src_dir_path) src_dir_path)
def testExceptionIfFilePathIsNotAnHeader(self): def testExceptionIfFilePathIsNotAnHeader(self):
with self.assertRaises(check_orphan_headers.WrongFileTypeError): with self.assertRaises(check_orphan_headers.WrongFileTypeError):
file_path = _GetPath('home', 'projects', 'webrtc', 'base', file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.cc')
'foo.cc') file_exists = lambda p: False
file_exists = lambda p: False src_dir_path = _GetPath('home', 'projects', 'webrtc')
src_dir_path = _GetPath('home', 'projects', 'webrtc') check_orphan_headers.GetBuildGnPathFromFilePath(file_path, file_exists,
check_orphan_headers.GetBuildGnPathFromFilePath( src_dir_path)
file_path, file_exists, src_dir_path)
class GetHeadersInBuildGnFileSourcesTest(unittest.TestCase): class GetHeadersInBuildGnFileSourcesTest(unittest.TestCase):
def testEmptyFileReturnsEmptySet(self): def testEmptyFileReturnsEmptySet(self):
self.assertEqual( self.assertEqual(
set([]), set([]),
check_orphan_headers.GetHeadersInBuildGnFileSources('', '/a/b')) check_orphan_headers.GetHeadersInBuildGnFileSources('', '/a/b'))
def testReturnsSetOfHeadersFromFileContent(self): def testReturnsSetOfHeadersFromFileContent(self):
file_content = """ file_content = """
# Some comments # Some comments
if (is_android) { if (is_android) {
import("//a/b/c.gni") import("//a/b/c.gni")
@ -101,17 +97,17 @@ class GetHeadersInBuildGnFileSourcesTest(unittest.TestCase):
sources = ["baz/foo.h"] sources = ["baz/foo.h"]
} }
""" """
target_abs_path = _GetPath('a', 'b') target_abs_path = _GetPath('a', 'b')
self.assertEqual( self.assertEqual(
set([ set([
_GetPath('a', 'b', 'foo.h'), _GetPath('a', 'b', 'foo.h'),
_GetPath('a', 'b', 'bar.h'), _GetPath('a', 'b', 'bar.h'),
_GetPath('a', 'b', 'public_foo.h'), _GetPath('a', 'b', 'public_foo.h'),
_GetPath('a', 'b', 'baz', 'foo.h'), _GetPath('a', 'b', 'baz', 'foo.h'),
]), ]),
check_orphan_headers.GetHeadersInBuildGnFileSources( check_orphan_headers.GetHeadersInBuildGnFileSources(
file_content, target_abs_path)) file_content, target_abs_path))
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
@ -33,104 +33,101 @@ class PackageBoundaryViolation(
collections.namedtuple( collections.namedtuple(
'PackageBoundaryViolation', 'PackageBoundaryViolation',
'build_file_path target_name source_file subpackage')): 'build_file_path target_name source_file subpackage')):
def __str__(self): def __str__(self):
return ERROR_MESSAGE.format(**self._asdict()) return ERROR_MESSAGE.format(**self._asdict())
def _BuildSubpackagesPattern(packages, query): def _BuildSubpackagesPattern(packages, query):
"""Returns a regular expression that matches source files inside subpackages """Returns a regular expression that matches source files inside subpackages
of the given query.""" of the given query."""
query += os.path.sep query += os.path.sep
length = len(query) length = len(query)
pattern = r'\s*"(?P<source_file>(?P<subpackage>' pattern = r'\s*"(?P<source_file>(?P<subpackage>'
pattern += '|'.join( pattern += '|'.join(
re.escape(package[length:].replace(os.path.sep, '/')) re.escape(package[length:].replace(os.path.sep, '/'))
for package in packages if package.startswith(query)) for package in packages if package.startswith(query))
pattern += r')/[\w\./]*)"' pattern += r')/[\w\./]*)"'
return re.compile(pattern) return re.compile(pattern)
def _ReadFileAndPrependLines(file_path): def _ReadFileAndPrependLines(file_path):
"""Reads the contents of a file.""" """Reads the contents of a file."""
with open(file_path) as f: with open(file_path) as f:
return "".join(f.readlines()) return "".join(f.readlines())
def _CheckBuildFile(build_file_path, packages): def _CheckBuildFile(build_file_path, packages):
"""Iterates over all the targets of the given BUILD.gn file, and verifies that """Iterates over all the targets of the given BUILD.gn file, and verifies that
the source files referenced by it don't belong to any of it's subpackages. the source files referenced by it don't belong to any of it's subpackages.
Returns an iterator over PackageBoundaryViolations for this package. Returns an iterator over PackageBoundaryViolations for this package.
""" """
package = os.path.dirname(build_file_path) package = os.path.dirname(build_file_path)
subpackages_re = _BuildSubpackagesPattern(packages, package) subpackages_re = _BuildSubpackagesPattern(packages, package)
build_file_contents = _ReadFileAndPrependLines(build_file_path) build_file_contents = _ReadFileAndPrependLines(build_file_path)
for target_match in TARGET_RE.finditer(build_file_contents): for target_match in TARGET_RE.finditer(build_file_contents):
target_name = target_match.group('target_name') target_name = target_match.group('target_name')
target_contents = target_match.group('target_contents') target_contents = target_match.group('target_contents')
for sources_match in SOURCES_RE.finditer(target_contents): for sources_match in SOURCES_RE.finditer(target_contents):
sources = sources_match.group('sources') sources = sources_match.group('sources')
for subpackages_match in subpackages_re.finditer(sources): for subpackages_match in subpackages_re.finditer(sources):
subpackage = subpackages_match.group('subpackage') subpackage = subpackages_match.group('subpackage')
source_file = subpackages_match.group('source_file') source_file = subpackages_match.group('source_file')
if subpackage: if subpackage:
yield PackageBoundaryViolation(build_file_path, yield PackageBoundaryViolation(build_file_path, target_name,
target_name, source_file, source_file, subpackage)
subpackage)
def CheckPackageBoundaries(root_dir, build_files=None): def CheckPackageBoundaries(root_dir, build_files=None):
packages = [ packages = [
root for root, _, files in os.walk(root_dir) if 'BUILD.gn' in files root for root, _, files in os.walk(root_dir) if 'BUILD.gn' in files
] ]
if build_files is not None: if build_files is not None:
for build_file_path in build_files:
assert build_file_path.startswith(root_dir)
else:
build_files = [
os.path.join(package, 'BUILD.gn') for package in packages
]
messages = []
for build_file_path in build_files: for build_file_path in build_files:
messages.extend(_CheckBuildFile(build_file_path, packages)) assert build_file_path.startswith(root_dir)
return messages else:
build_files = [os.path.join(package, 'BUILD.gn') for package in packages]
messages = []
for build_file_path in build_files:
messages.extend(_CheckBuildFile(build_file_path, packages))
return messages
def main(argv): def main(argv):
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description='Script that checks package boundary violations in GN ' description='Script that checks package boundary violations in GN '
'build files.') 'build files.')
parser.add_argument('root_dir', parser.add_argument('root_dir',
metavar='ROOT_DIR', metavar='ROOT_DIR',
help='The root directory that contains all BUILD.gn ' help='The root directory that contains all BUILD.gn '
'files to be processed.') 'files to be processed.')
parser.add_argument('build_files', parser.add_argument('build_files',
metavar='BUILD_FILE', metavar='BUILD_FILE',
nargs='*', nargs='*',
help='A list of BUILD.gn files to be processed. If no ' help='A list of BUILD.gn files to be processed. If no '
'files are given, all BUILD.gn files under ROOT_DIR ' 'files are given, all BUILD.gn files under ROOT_DIR '
'will be processed.') 'will be processed.')
parser.add_argument('--max_messages', parser.add_argument('--max_messages',
type=int, type=int,
default=None, default=None,
help='If set, the maximum number of violations to be ' help='If set, the maximum number of violations to be '
'displayed.') 'displayed.')
args = parser.parse_args(argv) args = parser.parse_args(argv)
messages = CheckPackageBoundaries(args.root_dir, args.build_files) messages = CheckPackageBoundaries(args.root_dir, args.build_files)
messages = messages[:args.max_messages] messages = messages[:args.max_messages]
for i, message in enumerate(messages): for i, message in enumerate(messages):
if i > 0: if i > 0:
print print()
print message print(message)
return bool(messages) return bool(messages)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
@ -12,8 +12,7 @@ import ast
import os import os
import unittest import unittest
#pylint: disable=relative-import import check_package_boundaries
from check_package_boundaries import CheckPackageBoundaries
MSG_FORMAT = 'ERROR:check_package_boundaries.py: Unexpected %s.' MSG_FORMAT = 'ERROR:check_package_boundaries.py: Unexpected %s.'
TESTDATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), TESTDATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
@ -21,54 +20,52 @@ TESTDATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
def ReadPylFile(file_path): def ReadPylFile(file_path):
with open(file_path) as f: with open(file_path) as f:
return ast.literal_eval(f.read()) return ast.literal_eval(f.read())
class UnitTest(unittest.TestCase): class UnitTest(unittest.TestCase):
def _RunTest(self, test_dir, check_all_build_files=False): def _RunTest(self, test_dir, check_all_build_files=False):
build_files = [os.path.join(test_dir, 'BUILD.gn')] build_files = [os.path.join(test_dir, 'BUILD.gn')]
if check_all_build_files: if check_all_build_files:
build_files = None build_files = None
messages = [] messages = []
for violation in CheckPackageBoundaries(test_dir, build_files): for violation in check_package_boundaries.CheckPackageBoundaries(
build_file_path = os.path.relpath(violation.build_file_path, test_dir, build_files):
test_dir) build_file_path = os.path.relpath(violation.build_file_path, test_dir)
build_file_path = build_file_path.replace(os.path.sep, '/') build_file_path = build_file_path.replace(os.path.sep, '/')
messages.append( messages.append(violation._replace(build_file_path=build_file_path))
violation._replace(build_file_path=build_file_path))
expected_messages = ReadPylFile(os.path.join(test_dir, 'expected.pyl')) expected_messages = ReadPylFile(os.path.join(test_dir, 'expected.pyl'))
self.assertListEqual(sorted(expected_messages), sorted(messages)) self.assertListEqual(sorted(expected_messages), sorted(messages))
def testNoErrors(self): def testNoErrors(self):
self._RunTest(os.path.join(TESTDATA_DIR, 'no_errors')) self._RunTest(os.path.join(TESTDATA_DIR, 'no_errors'))
def testMultipleErrorsSingleTarget(self): def testMultipleErrorsSingleTarget(self):
self._RunTest( self._RunTest(os.path.join(TESTDATA_DIR, 'multiple_errors_single_target'))
os.path.join(TESTDATA_DIR, 'multiple_errors_single_target'))
def testMultipleErrorsMultipleTargets(self): def testMultipleErrorsMultipleTargets(self):
self._RunTest( self._RunTest(os.path.join(TESTDATA_DIR,
os.path.join(TESTDATA_DIR, 'multiple_errors_multiple_targets')) 'multiple_errors_multiple_targets'))
def testCommonPrefix(self): def testCommonPrefix(self):
self._RunTest(os.path.join(TESTDATA_DIR, 'common_prefix')) self._RunTest(os.path.join(TESTDATA_DIR, 'common_prefix'))
def testAllBuildFiles(self): def testAllBuildFiles(self):
self._RunTest(os.path.join(TESTDATA_DIR, 'all_build_files'), True) self._RunTest(os.path.join(TESTDATA_DIR, 'all_build_files'), True)
def testSanitizeFilename(self): def testSanitizeFilename(self):
# The `dangerous_filename` test case contains a directory with '++' in its # The `dangerous_filename` test case contains a directory with '++' in its
# name. If it's not properly escaped, a regex error would be raised. # name. If it's not properly escaped, a regex error would be raised.
self._RunTest(os.path.join(TESTDATA_DIR, 'dangerous_filename'), True) self._RunTest(os.path.join(TESTDATA_DIR, 'dangerous_filename'), True)
def testRelativeFilename(self): def testRelativeFilename(self):
test_dir = os.path.join(TESTDATA_DIR, 'all_build_files') test_dir = os.path.join(TESTDATA_DIR, 'all_build_files')
with self.assertRaises(AssertionError): with self.assertRaises(AssertionError):
CheckPackageBoundaries(test_dir, ["BUILD.gn"]) check_package_boundaries.CheckPackageBoundaries(test_dir, ["BUILD.gn"])
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View file

@ -9,7 +9,7 @@ by WebRTC follow this instructions:
2. Launch the script: 2. Launch the script:
``` ```
$ python tools_webrtc/sslroots/generate_sslroots.py roots.pem $ vpython3 tools_webrtc/sslroots/generate_sslroots.py roots.pem
``` ```
3. Step 2 should have generated an ssl_roots.h file right next to roots.pem. 3. Step 2 should have generated an ssl_roots.h file right next to roots.pem.

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# -*- coding:utf-8 -*- # -*- coding:utf-8 -*-
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
# #
@ -17,7 +19,7 @@ Arguments:
generated file size. generated file size.
""" """
import commands import subprocess
from optparse import OptionParser from optparse import OptionParser
import os import os
import re import re
@ -39,180 +41,174 @@ _VERBOSE = 'verbose'
def main(): def main():
"""The main entrypoint.""" """The main entrypoint."""
parser = OptionParser('usage %prog FILE') parser = OptionParser('usage %prog FILE')
parser.add_option('-v', '--verbose', dest='verbose', action='store_true') parser.add_option('-v', '--verbose', dest='verbose', action='store_true')
parser.add_option('-f', parser.add_option('-f', '--full_cert', dest='full_cert', action='store_true')
'--full_cert', options, args = parser.parse_args()
dest='full_cert', if len(args) < 1:
action='store_true') parser.error('No crt file specified.')
options, args = parser.parse_args() return
if len(args) < 1: root_dir = _SplitCrt(args[0], options)
parser.error('No crt file specified.') _GenCFiles(root_dir, options)
return _Cleanup(root_dir)
root_dir = _SplitCrt(args[0], options)
_GenCFiles(root_dir, options)
_Cleanup(root_dir)
def _SplitCrt(source_file, options): def _SplitCrt(source_file, options):
sub_file_blocks = [] sub_file_blocks = []
label_name = '' label_name = ''
root_dir = os.path.dirname(os.path.abspath(source_file)) + '/' root_dir = os.path.dirname(os.path.abspath(source_file)) + '/'
_PrintOutput(root_dir, options) _PrintOutput(root_dir, options)
f = open(source_file) f = open(source_file)
for line in f: for line in f:
if line.startswith('# Label: '): if line.startswith('# Label: '):
sub_file_blocks.append(line) sub_file_blocks.append(line)
label = re.search(r'\".*\"', line) label = re.search(r'\".*\"', line)
temp_label = label.group(0) temp_label = label.group(0)
end = len(temp_label) - 1 end = len(temp_label) - 1
label_name = _SafeName(temp_label[1:end]) label_name = _SafeName(temp_label[1:end])
elif line.startswith('-----END CERTIFICATE-----'): elif line.startswith('-----END CERTIFICATE-----'):
sub_file_blocks.append(line) sub_file_blocks.append(line)
new_file_name = root_dir + _PREFIX + label_name + _EXTENSION new_file_name = root_dir + _PREFIX + label_name + _EXTENSION
_PrintOutput('Generating: ' + new_file_name, options) _PrintOutput('Generating: ' + new_file_name, options)
new_file = open(new_file_name, 'w') new_file = open(new_file_name, 'w')
for out_line in sub_file_blocks: for out_line in sub_file_blocks:
new_file.write(out_line) new_file.write(out_line)
new_file.close() new_file.close()
sub_file_blocks = [] sub_file_blocks = []
else: else:
sub_file_blocks.append(line) sub_file_blocks.append(line)
f.close() f.close()
return root_dir return root_dir
def _GenCFiles(root_dir, options): def _GenCFiles(root_dir, options):
output_header_file = open(root_dir + _GENERATED_FILE, 'w') output_header_file = open(root_dir + _GENERATED_FILE, 'w')
output_header_file.write(_CreateOutputHeader()) output_header_file.write(_CreateOutputHeader())
if options.full_cert: if options.full_cert:
subject_name_list = _CreateArraySectionHeader(_SUBJECT_NAME_VARIABLE, subject_name_list = _CreateArraySectionHeader(_SUBJECT_NAME_VARIABLE,
_CHAR_TYPE, options) _CHAR_TYPE, options)
public_key_list = _CreateArraySectionHeader(_PUBLIC_KEY_VARIABLE, public_key_list = _CreateArraySectionHeader(_PUBLIC_KEY_VARIABLE,
_CHAR_TYPE, options) _CHAR_TYPE, options)
certificate_list = _CreateArraySectionHeader(_CERTIFICATE_VARIABLE, certificate_list = _CreateArraySectionHeader(_CERTIFICATE_VARIABLE,
_CHAR_TYPE, options) _CHAR_TYPE, options)
certificate_size_list = _CreateArraySectionHeader( certificate_size_list = _CreateArraySectionHeader(_CERTIFICATE_SIZE_VARIABLE,
_CERTIFICATE_SIZE_VARIABLE, _INT_TYPE, options) _INT_TYPE, options)
for _, _, files in os.walk(root_dir): for _, _, files in os.walk(root_dir):
for current_file in files: for current_file in files:
if current_file.startswith(_PREFIX): if current_file.startswith(_PREFIX):
prefix_length = len(_PREFIX) prefix_length = len(_PREFIX)
length = len(current_file) - len(_EXTENSION) length = len(current_file) - len(_EXTENSION)
label = current_file[prefix_length:length] label = current_file[prefix_length:length]
filtered_output, cert_size = _CreateCertSection( filtered_output, cert_size = _CreateCertSection(root_dir, current_file,
root_dir, current_file, label, options) label, options)
output_header_file.write(filtered_output + '\n\n\n') output_header_file.write(filtered_output + '\n\n\n')
if options.full_cert: if options.full_cert:
subject_name_list += _AddLabelToArray( subject_name_list += _AddLabelToArray(label, _SUBJECT_NAME_ARRAY)
label, _SUBJECT_NAME_ARRAY) public_key_list += _AddLabelToArray(label, _PUBLIC_KEY_ARRAY)
public_key_list += _AddLabelToArray( certificate_list += _AddLabelToArray(label, _CERTIFICATE_ARRAY)
label, _PUBLIC_KEY_ARRAY) certificate_size_list += (' %s,\n') % (cert_size)
certificate_list += _AddLabelToArray(label, _CERTIFICATE_ARRAY)
certificate_size_list += (' %s,\n') % (cert_size)
if options.full_cert: if options.full_cert:
subject_name_list += _CreateArraySectionFooter() subject_name_list += _CreateArraySectionFooter()
output_header_file.write(subject_name_list) output_header_file.write(subject_name_list)
public_key_list += _CreateArraySectionFooter() public_key_list += _CreateArraySectionFooter()
output_header_file.write(public_key_list) output_header_file.write(public_key_list)
certificate_list += _CreateArraySectionFooter() certificate_list += _CreateArraySectionFooter()
output_header_file.write(certificate_list) output_header_file.write(certificate_list)
certificate_size_list += _CreateArraySectionFooter() certificate_size_list += _CreateArraySectionFooter()
output_header_file.write(certificate_size_list) output_header_file.write(certificate_size_list)
output_header_file.write(_CreateOutputFooter()) output_header_file.write(_CreateOutputFooter())
output_header_file.close() output_header_file.close()
def _Cleanup(root_dir): def _Cleanup(root_dir):
for f in os.listdir(root_dir): for f in os.listdir(root_dir):
if f.startswith(_PREFIX): if f.startswith(_PREFIX):
os.remove(root_dir + f) os.remove(root_dir + f)
def _CreateCertSection(root_dir, source_file, label, options): def _CreateCertSection(root_dir, source_file, label, options):
command = 'openssl x509 -in %s%s -noout -C' % (root_dir, source_file) command = 'openssl x509 -in %s%s -noout -C' % (root_dir, source_file)
_PrintOutput(command, options) _PrintOutput(command, options)
output = commands.getstatusoutput(command)[1] output = subprocess.getstatusoutput(command)[1]
renamed_output = output.replace('unsigned char XXX_', renamed_output = output.replace('unsigned char XXX_',
'const unsigned char ' + label + '_') 'const unsigned char ' + label + '_')
filtered_output = '' filtered_output = ''
cert_block = '^const unsigned char.*?};$' cert_block = '^const unsigned char.*?};$'
prog = re.compile(cert_block, re.IGNORECASE | re.MULTILINE | re.DOTALL) prog = re.compile(cert_block, re.IGNORECASE | re.MULTILINE | re.DOTALL)
if not options.full_cert: if not options.full_cert:
filtered_output = prog.sub('', renamed_output, count=2) filtered_output = prog.sub('', renamed_output, count=2)
else: else:
filtered_output = renamed_output filtered_output = renamed_output
cert_size_block = r'\d\d\d+' cert_size_block = r'\d\d\d+'
prog2 = re.compile(cert_size_block, re.MULTILINE | re.VERBOSE) prog2 = re.compile(cert_size_block, re.MULTILINE | re.VERBOSE)
result = prog2.findall(renamed_output) result = prog2.findall(renamed_output)
cert_size = result[len(result) - 1] cert_size = result[len(result) - 1]
return filtered_output, cert_size return filtered_output, cert_size
def _CreateOutputHeader(): def _CreateOutputHeader():
output = ( output = ('/*\n'
'/*\n' ' * Copyright 2004 The WebRTC Project Authors. All rights '
' * Copyright 2004 The WebRTC Project Authors. All rights ' 'reserved.\n'
'reserved.\n' ' *\n'
' *\n' ' * Use of this source code is governed by a BSD-style license\n'
' * Use of this source code is governed by a BSD-style license\n' ' * that can be found in the LICENSE file in the root of the '
' * that can be found in the LICENSE file in the root of the ' 'source\n'
'source\n' ' * tree. An additional intellectual property rights grant can be '
' * tree. An additional intellectual property rights grant can be ' 'found\n'
'found\n' ' * in the file PATENTS. All contributing project authors may\n'
' * in the file PATENTS. All contributing project authors may\n' ' * be found in the AUTHORS file in the root of the source tree.\n'
' * be found in the AUTHORS file in the root of the source tree.\n' ' */\n\n'
' */\n\n' '#ifndef RTC_BASE_SSL_ROOTS_H_\n'
'#ifndef RTC_BASE_SSL_ROOTS_H_\n' '#define RTC_BASE_SSL_ROOTS_H_\n\n'
'#define RTC_BASE_SSL_ROOTS_H_\n\n' '// This file is the root certificates in C form that are needed to'
'// This file is the root certificates in C form that are needed to' ' connect to\n// Google.\n\n'
' connect to\n// Google.\n\n' '// It was generated with the following command line:\n'
'// It was generated with the following command line:\n' '// > vpython3 tools_webrtc/sslroots/generate_sslroots.py'
'// > python tools_webrtc/sslroots/generate_sslroots.py' '\n// https://pki.goog/roots.pem\n\n'
'\n// https://pki.goog/roots.pem\n\n' '// clang-format off\n'
'// clang-format off\n' '// Don\'t bother formatting generated code,\n'
'// Don\'t bother formatting generated code,\n' '// also it would breaks subject/issuer lines.\n\n')
'// also it would breaks subject/issuer lines.\n\n') return output
return output
def _CreateOutputFooter(): def _CreateOutputFooter():
output = ('// clang-format on\n\n' '#endif // RTC_BASE_SSL_ROOTS_H_\n') output = ('// clang-format on\n\n#endif // RTC_BASE_SSL_ROOTS_H_\n')
return output return output
def _CreateArraySectionHeader(type_name, type_type, options): def _CreateArraySectionHeader(type_name, type_type, options):
output = ('const %s kSSLCert%sList[] = {\n') % (type_type, type_name) output = ('const %s kSSLCert%sList[] = {\n') % (type_type, type_name)
_PrintOutput(output, options) _PrintOutput(output, options)
return output return output
def _AddLabelToArray(label, type_name): def _AddLabelToArray(label, type_name):
return ' %s_%s,\n' % (label, type_name) return ' %s_%s,\n' % (label, type_name)
def _CreateArraySectionFooter(): def _CreateArraySectionFooter():
return '};\n\n' return '};\n\n'
def _SafeName(original_file_name): def _SafeName(original_file_name):
bad_chars = ' -./\\()áéíőú' bad_chars = ' -./\\()áéíőú'
replacement_chars = '' replacement_chars = ''
for _ in bad_chars: for _ in bad_chars:
replacement_chars += '_' replacement_chars += '_'
translation_table = string.maketrans(bad_chars, replacement_chars) translation_table = string.maketrans(bad_chars, replacement_chars)
return original_file_name.translate(translation_table) return original_file_name.translate(translation_table)
def _PrintOutput(output, options): def _PrintOutput(output, options):
if options.verbose: if options.verbose:
print output print(output)
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env vpython3
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -19,11 +20,11 @@ import sys
def FindSrcDirPath(): def FindSrcDirPath():
"""Returns the abs path to the src/ dir of the project.""" """Returns the abs path to the src/ dir of the project."""
src_dir = os.path.dirname(os.path.abspath(__file__)) src_dir = os.path.dirname(os.path.abspath(__file__))
while os.path.basename(src_dir) != 'src': while os.path.basename(src_dir) != 'src':
src_dir = os.path.normpath(os.path.join(src_dir, os.pardir)) src_dir = os.path.normpath(os.path.join(src_dir, os.pardir))
return src_dir return src_dir
UPDATE_BRANCH_NAME = 'webrtc_version_update' UPDATE_BRANCH_NAME = 'webrtc_version_update'
@ -33,140 +34,132 @@ NOTIFY_EMAIL = 'mbonadei@webrtc.org'
def _RemovePreviousUpdateBranch(): def _RemovePreviousUpdateBranch():
active_branch, branches = _GetBranches() active_branch, branches = _GetBranches()
if active_branch == UPDATE_BRANCH_NAME: if active_branch == UPDATE_BRANCH_NAME:
active_branch = 'master' active_branch = 'master'
if UPDATE_BRANCH_NAME in branches: if UPDATE_BRANCH_NAME in branches:
logging.info('Removing previous update branch (%s)', logging.info('Removing previous update branch (%s)', UPDATE_BRANCH_NAME)
UPDATE_BRANCH_NAME) subprocess.check_call(['git', 'checkout', active_branch])
subprocess.check_call(['git', 'checkout', active_branch]) subprocess.check_call(['git', 'branch', '-D', UPDATE_BRANCH_NAME])
subprocess.check_call(['git', 'branch', '-D', UPDATE_BRANCH_NAME]) logging.info('No branch to remove')
logging.info('No branch to remove')
def _GetLastAuthor(): def _GetLastAuthor():
"""Returns a string with the author of the last commit.""" """Returns a string with the author of the last commit."""
author = subprocess.check_output(['git', 'log', author = subprocess.check_output(
'-1', ['git', 'log', '-1', '--pretty=format:"%an"']).splitlines()
'--pretty=format:"%an"']).splitlines() return author
return author
def _GetBranches(): def _GetBranches():
"""Returns a tuple (active, branches). """Returns a tuple (active, branches).
'active' is a string with name of the currently active branch, while 'active' is a string with name of the currently active branch, while
'branches' is the list of all branches. 'branches' is the list of all branches.
""" """
lines = subprocess.check_output(['git', 'branch']).splitlines() lines = subprocess.check_output(['git', 'branch']).splitlines()
branches = [] branches = []
active = '' active = ''
for line in lines: for line in lines:
if '*' in line: if '*' in line:
# The assumption is that the first char will always be the '*'. # The assumption is that the first char will always be the '*'.
active = line[1:].strip() active = line[1:].strip()
branches.append(active) branches.append(active)
else: else:
branch = line.strip() branch = line.strip()
if branch: if branch:
branches.append(branch) branches.append(branch)
return active, branches return active, branches
def _CreateUpdateBranch(): def _CreateUpdateBranch():
logging.info('Creating update branch: %s', UPDATE_BRANCH_NAME) logging.info('Creating update branch: %s', UPDATE_BRANCH_NAME)
subprocess.check_call(['git', 'checkout', '-b', UPDATE_BRANCH_NAME]) subprocess.check_call(['git', 'checkout', '-b', UPDATE_BRANCH_NAME])
def _UpdateWebRTCVersion(filename): def _UpdateWebRTCVersion(filename):
with open(filename) as f: with open(filename) as f:
content = f.read() content = f.read()
d = datetime.datetime.utcnow() d = datetime.datetime.utcnow()
# pylint: disable=line-too-long # pylint: disable=line-too-long
new_content = re.sub( new_content = re.sub(
r'WebRTC source stamp [0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}', r'WebRTC source stamp [0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}',
r'WebRTC source stamp %02d-%02d-%02dT%02d:%02d:%02d' % (d.year, r'WebRTC source stamp %02d-%02d-%02dT%02d:%02d:%02d' %
d.month, (d.year, d.month, d.day, d.hour, d.minute, d.second),
d.day, content,
d.hour, flags=re.MULTILINE)
d.minute, # pylint: enable=line-too-long
d.second), with open(filename, 'w') as f:
content, f.write(new_content)
flags=re.MULTILINE)
# pylint: enable=line-too-long
with open(filename, 'w') as f:
f.write(new_content)
def _IsTreeClean(): def _IsTreeClean():
stdout = subprocess.check_output(['git', 'status', '--porcelain']) stdout = subprocess.check_output(['git', 'status', '--porcelain'])
if len(stdout) == 0: if len(stdout) == 0:
return True return True
return False return False
def _LocalCommit(): def _LocalCommit():
logging.info('Committing changes locally.') logging.info('Committing changes locally.')
d = datetime.datetime.utcnow() d = datetime.datetime.utcnow()
git_author = subprocess.check_output(['git', 'config', commit_msg = ('Update WebRTC code version (%02d-%02d-%02dT%02d:%02d:%02d).'
'user.email']).strip() '\n\nBug: None')
commit_msg = ('Update WebRTC code version (%02d-%02d-%02dT%02d:%02d:%02d).' commit_msg = commit_msg % (d.year, d.month, d.day, d.hour, d.minute, d.second)
'\n\nBug: None') subprocess.check_call(['git', 'add', '--update', '.'])
commit_msg = commit_msg % (d.year, d.month, d.day, d.hour, d.minute, subprocess.check_call(['git', 'commit', '-m', commit_msg])
d.second)
subprocess.check_call(['git', 'add', '--update', '.'])
subprocess.check_call(['git', 'commit', '-m', commit_msg])
def _UploadCL(commit_queue_mode): def _UploadCL(commit_queue_mode):
"""Upload the committed changes as a changelist to Gerrit. """Upload the committed changes as a changelist to Gerrit.
commit_queue_mode: commit_queue_mode:
- 2: Submit to commit queue. - 2: Submit to commit queue.
- 1: Run trybots but do not submit to CQ. - 1: Run trybots but do not submit to CQ.
- 0: Skip CQ, upload only. - 0: Skip CQ, upload only.
""" """
cmd = ['git', 'cl', 'upload', '--force', '--bypass-hooks', cmd = [
'--bypass-watchlist'] 'git', 'cl', 'upload', '--force', '--bypass-hooks', '--bypass-watchlist'
if commit_queue_mode >= 2: ]
logging.info('Sending the CL to the CQ...') if commit_queue_mode >= 2:
cmd.extend(['-o', 'label=Bot-Commit+1']) logging.info('Sending the CL to the CQ...')
cmd.extend(['-o', 'label=Commit-Queue+2']) cmd.extend(['-o', 'label=Bot-Commit+1'])
cmd.extend(['--send-mail', '--cc', NOTIFY_EMAIL]) cmd.extend(['-o', 'label=Commit-Queue+2'])
elif commit_queue_mode >= 1: cmd.extend(['--send-mail', '--cc', NOTIFY_EMAIL])
logging.info('Starting CQ dry run...') elif commit_queue_mode >= 1:
cmd.extend(['-o', 'label=Commit-Queue+1']) logging.info('Starting CQ dry run...')
subprocess.check_call(cmd) cmd.extend(['-o', 'label=Commit-Queue+1'])
subprocess.check_call(cmd)
def main(): def main():
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
p = argparse.ArgumentParser() p = argparse.ArgumentParser()
p.add_argument('--clean', p.add_argument('--clean',
action='store_true', action='store_true',
default=False, default=False,
help='Removes any previous local update branch.') help='Removes any previous local update branch.')
opts = p.parse_args() opts = p.parse_args()
if opts.clean: if opts.clean:
_RemovePreviousUpdateBranch() _RemovePreviousUpdateBranch()
if _GetLastAuthor() == 'webrtc-version-updater': if _GetLastAuthor() == 'webrtc-version-updater':
logging.info('Last commit is a version change, skipping CL.') logging.info('Last commit is a version change, skipping CL.')
return 0
version_filename = os.path.join(CHECKOUT_SRC_DIR, 'call', 'version.cc')
_CreateUpdateBranch()
_UpdateWebRTCVersion(version_filename)
if _IsTreeClean():
logging.info('No WebRTC version change detected, skipping CL.')
else:
_LocalCommit()
logging.info('Uploading CL...')
_UploadCL(2)
return 0 return 0
version_filename = os.path.join(CHECKOUT_SRC_DIR, 'call', 'version.cc')
_CreateUpdateBranch()
_UpdateWebRTCVersion(version_filename)
if _IsTreeClean():
logging.info('No WebRTC version change detected, skipping CL.')
else:
_LocalCommit()
logging.info('Uploading CL...')
_UploadCL(2)
return 0
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main()) sys.exit(main())

View file

@ -1,3 +1,5 @@
#!/usr/bin/env vpython3
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
# #
# Use of this source code is governed by a BSD-style license # Use of this source code is governed by a BSD-style license
@ -75,12 +77,8 @@ _EXTENSION_FLAGS = {
} }
def PathExists(*args):
return os.path.exists(os.path.join(*args))
def FindWebrtcSrcFromFilename(filename): def FindWebrtcSrcFromFilename(filename):
"""Searches for the root of the WebRTC checkout. """Searches for the root of the WebRTC checkout.
Simply checks parent directories until it finds .gclient and src/. Simply checks parent directories until it finds .gclient and src/.
@ -90,20 +88,20 @@ def FindWebrtcSrcFromFilename(filename):
Returns: Returns:
(String) Path of 'src/', or None if unable to find. (String) Path of 'src/', or None if unable to find.
""" """
curdir = os.path.normpath(os.path.dirname(filename)) curdir = os.path.normpath(os.path.dirname(filename))
while not (os.path.basename(curdir) == 'src' while not (os.path.basename(curdir) == 'src'
and PathExists(curdir, 'DEPS') and and os.path.exists(os.path.join(curdir, 'DEPS')) and
(PathExists(curdir, '..', '.gclient') (os.path.exists(os.path.join(curdir, '..', '.gclient'))
or PathExists(curdir, '.git'))): or os.path.exists(os.path.join(curdir, '.git')))):
nextdir = os.path.normpath(os.path.join(curdir, '..')) nextdir = os.path.normpath(os.path.join(curdir, '..'))
if nextdir == curdir: if nextdir == curdir:
return None return None
curdir = nextdir curdir = nextdir
return curdir return curdir
def GetDefaultSourceFile(webrtc_root, filename): def GetDefaultSourceFile(webrtc_root, filename):
"""Returns the default source file to use as an alternative to `filename`. """Returns the default source file to use as an alternative to `filename`.
Compile flags used to build the default source file is assumed to be a Compile flags used to build the default source file is assumed to be a
close-enough approximation for building `filename`. close-enough approximation for building `filename`.
@ -115,13 +113,13 @@ def GetDefaultSourceFile(webrtc_root, filename):
Returns: Returns:
(String) Absolute path to substitute source file. (String) Absolute path to substitute source file.
""" """
if 'test.' in filename: if 'test.' in filename:
return os.path.join(webrtc_root, 'base', 'logging_unittest.cc') return os.path.join(webrtc_root, 'base', 'logging_unittest.cc')
return os.path.join(webrtc_root, 'base', 'logging.cc') return os.path.join(webrtc_root, 'base', 'logging.cc')
def GetNinjaBuildOutputsForSourceFile(out_dir, filename): def GetNinjaBuildOutputsForSourceFile(out_dir, filename):
"""Returns a list of build outputs for filename. """Returns a list of build outputs for filename.
The list is generated by invoking 'ninja -t query' tool to retrieve a list of The list is generated by invoking 'ninja -t query' tool to retrieve a list of
inputs and outputs of `filename`. This list is then filtered to only include inputs and outputs of `filename`. This list is then filtered to only include
@ -135,35 +133,35 @@ def GetNinjaBuildOutputsForSourceFile(out_dir, filename):
(List of Strings) List of target names. Will return [] if `filename` doesn't (List of Strings) List of target names. Will return [] if `filename` doesn't
yield any .o or .obj outputs. yield any .o or .obj outputs.
""" """
# Ninja needs the path to the source file relative to the output build # Ninja needs the path to the source file relative to the output build
# directory. # directory.
rel_filename = os.path.relpath(filename, out_dir) rel_filename = os.path.relpath(filename, out_dir)
p = subprocess.Popen(['ninja', '-C', out_dir, '-t', 'query', rel_filename], p = subprocess.Popen(['ninja', '-C', out_dir, '-t', 'query', rel_filename],
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, stderr=subprocess.STDOUT,
universal_newlines=True) universal_newlines=True)
stdout, _ = p.communicate() stdout, _ = p.communicate()
if p.returncode != 0: if p.returncode != 0:
return [] return []
# The output looks like: # The output looks like:
# ../../relative/path/to/source.cc: # ../../relative/path/to/source.cc:
# outputs: # outputs:
# obj/reative/path/to/target.source.o # obj/reative/path/to/target.source.o
# obj/some/other/target2.source.o # obj/some/other/target2.source.o
# another/target.txt # another/target.txt
# #
outputs_text = stdout.partition('\n outputs:\n')[2] outputs_text = stdout.partition('\n outputs:\n')[2]
output_lines = [line.strip() for line in outputs_text.split('\n')] output_lines = [line.strip() for line in outputs_text.split('\n')]
return [ return [
target for target in output_lines target for target in output_lines
if target and (target.endswith('.o') or target.endswith('.obj')) if target and (target.endswith('.o') or target.endswith('.obj'))
] ]
def GetClangCommandLineForNinjaOutput(out_dir, build_target): def GetClangCommandLineForNinjaOutput(out_dir, build_target):
"""Returns the Clang command line for building `build_target` """Returns the Clang command line for building `build_target`
Asks ninja for the list of commands used to build `filename` and returns the Asks ninja for the list of commands used to build `filename` and returns the
final Clang invocation. final Clang invocation.
@ -176,25 +174,25 @@ def GetClangCommandLineForNinjaOutput(out_dir, build_target):
(String or None) Clang command line or None if a Clang command line couldn't (String or None) Clang command line or None if a Clang command line couldn't
be determined. be determined.
""" """
p = subprocess.Popen( p = subprocess.Popen(
['ninja', '-v', '-C', out_dir, '-t', 'commands', build_target], ['ninja', '-v', '-C', out_dir, '-t', 'commands', build_target],
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
universal_newlines=True) universal_newlines=True)
stdout, _ = p.communicate() stdout, _ = p.communicate()
if p.returncode != 0: if p.returncode != 0:
return None
# Ninja will return multiple build steps for all dependencies up to
# `build_target`. The build step we want is the last Clang invocation, which
# is expected to be the one that outputs `build_target`.
for line in reversed(stdout.split('\n')):
if 'clang' in line:
return line
return None return None
# Ninja will return multiple build steps for all dependencies up to
# `build_target`. The build step we want is the last Clang invocation, which
# is expected to be the one that outputs `build_target`.
for line in reversed(stdout.split('\n')):
if 'clang' in line:
return line
return None
def GetClangCommandLineFromNinjaForSource(out_dir, filename): def GetClangCommandLineFromNinjaForSource(out_dir, filename):
"""Returns a Clang command line used to build `filename`. """Returns a Clang command line used to build `filename`.
The same source file could be built multiple times using different tool The same source file could be built multiple times using different tool
chains. In such cases, this command returns the first Clang invocation. We chains. In such cases, this command returns the first Clang invocation. We
@ -210,17 +208,17 @@ def GetClangCommandLineFromNinjaForSource(out_dir, filename):
(String or None): Command line for Clang invocation using `filename` as a (String or None): Command line for Clang invocation using `filename` as a
source. Returns None if no such command line could be found. source. Returns None if no such command line could be found.
""" """
build_targets = GetNinjaBuildOutputsForSourceFile(out_dir, filename) build_targets = GetNinjaBuildOutputsForSourceFile(out_dir, filename)
for build_target in build_targets: for build_target in build_targets:
command_line = GetClangCommandLineForNinjaOutput(out_dir, build_target) command_line = GetClangCommandLineForNinjaOutput(out_dir, build_target)
if command_line: if command_line:
return command_line return command_line
return None return None
def GetClangOptionsFromCommandLine(clang_commandline, out_dir, def GetClangOptionsFromCommandLine(clang_commandline, out_dir,
additional_flags): additional_flags):
"""Extracts relevant command line options from `clang_commandline` """Extracts relevant command line options from `clang_commandline`
Args: Args:
clang_commandline: (String) Full Clang invocation. clang_commandline: (String) Full Clang invocation.
@ -232,48 +230,46 @@ def GetClangOptionsFromCommandLine(clang_commandline, out_dir,
(List of Strings) The list of command line flags for this source file. Can (List of Strings) The list of command line flags for this source file. Can
be empty. be empty.
""" """
clang_flags = [] + additional_flags clang_flags = [] + additional_flags
# Parse flags that are important for YCM's purposes. # Parse flags that are important for YCM's purposes.
clang_tokens = shlex.split(clang_commandline) clang_tokens = shlex.split(clang_commandline)
for flag_index, flag in enumerate(clang_tokens): for flag_index, flag in enumerate(clang_tokens):
if flag.startswith('-I'): if flag.startswith('-I'):
# Relative paths need to be resolved, because they're relative to # Relative paths need to be resolved, because they're relative to
# the output dir, not the source. # the output dir, not the source.
if flag[2] == '/': if flag[2] == '/':
clang_flags.append(flag) clang_flags.append(flag)
else: else:
abs_path = os.path.normpath(os.path.join(out_dir, flag[2:])) abs_path = os.path.normpath(os.path.join(out_dir, flag[2:]))
clang_flags.append('-I' + abs_path) clang_flags.append('-I' + abs_path)
elif flag.startswith('-std'): elif flag.startswith('-std'):
clang_flags.append(flag) clang_flags.append(flag)
elif flag.startswith('-') and flag[1] in 'DWFfmO': elif flag.startswith('-') and flag[1] in 'DWFfmO':
if (flag == '-Wno-deprecated-register' or if flag in ['-Wno-deprecated-register', '-Wno-header-guard']:
flag == '-Wno-header-guard'): # These flags causes libclang (3.3) to crash. Remove it until
# These flags causes libclang (3.3) to crash. Remove it until # things are fixed.
# things are fixed. continue
continue clang_flags.append(flag)
clang_flags.append(flag) elif flag == '-isysroot':
elif flag == '-isysroot': # On Mac -isysroot <path> is used to find the system headers.
# On Mac -isysroot <path> is used to find the system headers. # Copy over both flags.
# Copy over both flags. if flag_index + 1 < len(clang_tokens):
if flag_index + 1 < len(clang_tokens): clang_flags.append(flag)
clang_flags.append(flag) clang_flags.append(clang_tokens[flag_index + 1])
clang_flags.append(clang_tokens[flag_index + 1]) elif flag.startswith('--sysroot='):
elif flag.startswith('--sysroot='): # On Linux we use a sysroot image.
# On Linux we use a sysroot image. sysroot_path = flag.lstrip('--sysroot=')
sysroot_path = flag.lstrip('--sysroot=') if sysroot_path.startswith('/'):
if sysroot_path.startswith('/'): clang_flags.append(flag)
clang_flags.append(flag) else:
else: abs_path = os.path.normpath(os.path.join(out_dir, sysroot_path))
abs_path = os.path.normpath(os.path.join( clang_flags.append('--sysroot=' + abs_path)
out_dir, sysroot_path)) return clang_flags
clang_flags.append('--sysroot=' + abs_path)
return clang_flags
def GetClangOptionsFromNinjaForFilename(webrtc_root, filename): def GetClangOptionsFromNinjaForFilename(webrtc_root, filename):
"""Returns the Clang command line options needed for building `filename`. """Returns the Clang command line options needed for building `filename`.
Command line options are based on the command used by ninja for building Command line options are based on the command used by ninja for building
`filename`. If `filename` is a .h file, uses its companion .cc or .cpp file. `filename`. If `filename` is a .h file, uses its companion .cc or .cpp file.
@ -289,55 +285,54 @@ def GetClangOptionsFromNinjaForFilename(webrtc_root, filename):
(List of Strings) The list of command line flags for this source file. Can (List of Strings) The list of command line flags for this source file. Can
be empty. be empty.
""" """
if not webrtc_root: if not webrtc_root:
return [] return []
# Generally, everyone benefits from including WebRTC's src/, because all of # Generally, everyone benefits from including WebRTC's src/, because all of
# WebRTC's includes are relative to that. # WebRTC's includes are relative to that.
additional_flags = ['-I' + os.path.join(webrtc_root)] additional_flags = ['-I' + os.path.join(webrtc_root)]
# Version of Clang used to compile WebRTC can be newer then version of # Version of Clang used to compile WebRTC can be newer then version of
# libclang that YCM uses for completion. So it's possible that YCM's # libclang that YCM uses for completion. So it's possible that YCM's
# libclang doesn't know about some used warning options, which causes # libclang doesn't know about some used warning options, which causes
# compilation warnings (and errors, because of '-Werror'); # compilation warnings (and errors, because of '-Werror');
additional_flags.append('-Wno-unknown-warning-option') additional_flags.append('-Wno-unknown-warning-option')
sys.path.append(os.path.join(webrtc_root, 'tools', 'vim')) sys.path.append(os.path.join(webrtc_root, 'tools', 'vim'))
from ninja_output import GetNinjaOutputDirectory from ninja_output import GetNinjaOutputDirectory
out_dir = GetNinjaOutputDirectory(webrtc_root) out_dir = GetNinjaOutputDirectory(webrtc_root)
basename, extension = os.path.splitext(filename) basename, extension = os.path.splitext(filename)
if extension == '.h': if extension == '.h':
candidates = [basename + ext for ext in _HEADER_ALTERNATES] candidates = [basename + ext for ext in _HEADER_ALTERNATES]
else: else:
candidates = [filename] candidates = [filename]
clang_line = None clang_line = None
buildable_extension = extension buildable_extension = extension
for candidate in candidates: for candidate in candidates:
clang_line = GetClangCommandLineFromNinjaForSource(out_dir, candidate) clang_line = GetClangCommandLineFromNinjaForSource(out_dir, candidate)
if clang_line: if clang_line:
buildable_extension = os.path.splitext(candidate)[1] buildable_extension = os.path.splitext(candidate)[1]
break break
additional_flags += _EXTENSION_FLAGS.get(buildable_extension, []) additional_flags += _EXTENSION_FLAGS.get(buildable_extension, [])
if not clang_line: if not clang_line:
# If ninja didn't know about filename or it's companion files, then try # If ninja didn't know about filename or it's companion files, then try
# a default build target. It is possible that the file is new, or # a default build target. It is possible that the file is new, or
# build.ninja is stale. # build.ninja is stale.
clang_line = GetClangCommandLineFromNinjaForSource( clang_line = GetClangCommandLineFromNinjaForSource(
out_dir, GetDefaultSourceFile(webrtc_root, filename)) out_dir, GetDefaultSourceFile(webrtc_root, filename))
if not clang_line: if not clang_line:
return additional_flags return additional_flags
return GetClangOptionsFromCommandLine(clang_line, out_dir, return GetClangOptionsFromCommandLine(clang_line, out_dir, additional_flags)
additional_flags)
def FlagsForFile(filename): def FlagsForFile(filename):
"""This is the main entry point for YCM. Its interface is fixed. """This is the main entry point for YCM. Its interface is fixed.
Args: Args:
filename: (String) Path to source file being edited. filename: (String) Path to source file being edited.
@ -347,16 +342,15 @@ def FlagsForFile(filename):
'flags': (List of Strings) Command line flags. 'flags': (List of Strings) Command line flags.
'do_cache': (Boolean) True if the result should be cached. 'do_cache': (Boolean) True if the result should be cached.
""" """
abs_filename = os.path.abspath(filename) abs_filename = os.path.abspath(filename)
webrtc_root = FindWebrtcSrcFromFilename(abs_filename) webrtc_root = FindWebrtcSrcFromFilename(abs_filename)
clang_flags = GetClangOptionsFromNinjaForFilename(webrtc_root, clang_flags = GetClangOptionsFromNinjaForFilename(webrtc_root, abs_filename)
abs_filename)
# If clang_flags could not be determined, then assume that was due to a # If clang_flags could not be determined, then assume that was due to a
# transient failure. Preventing YCM from caching the flags allows us to # transient failure. Preventing YCM from caching the flags allows us to
# try to determine the flags again. # try to determine the flags again.
should_cache_flags_for_file = bool(clang_flags) should_cache_flags_for_file = bool(clang_flags)
final_flags = _DEFAULT_FLAGS + clang_flags final_flags = _DEFAULT_FLAGS + clang_flags
return {'flags': final_flags, 'do_cache': should_cache_flags_for_file} return {'flags': final_flags, 'do_cache': should_cache_flags_for_file}