mirror of
https://github.com/mollyim/webrtc.git
synced 2025-05-12 21:30:45 +01:00
tools_webrtc dir converted to py3 + top level PRESUBMIT script
Bug: webrtc:13607 Change-Id: Ib018e43ea977cc24dd71048e68e3343741f7f31b Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/249083 Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org> Reviewed-by: Harald Alvestrand <hta@webrtc.org> Reviewed-by: Jeremy Leconte <jleconte@google.com> Commit-Queue: Christoffer Jansson <jansson@google.com> Cr-Commit-Position: refs/heads/main@{#35953}
This commit is contained in:
parent
b5cba85c2f
commit
4e8a773b4b
50 changed files with 4570 additions and 4673 deletions
10
.vpython3
10
.vpython3
|
@ -33,10 +33,16 @@ wheel: <
|
|||
|
||||
# Used by tools_webrtc/perf/webrtc_dashboard_upload.py.
|
||||
wheel: <
|
||||
name: "infra/python/wheels/httplib2-py2_py3"
|
||||
version: "version:0.10.3"
|
||||
name: "infra/python/wheels/httplib2-py3"
|
||||
version: "version:0.19.1"
|
||||
>
|
||||
|
||||
wheel: <
|
||||
name: "infra/python/wheels/pyparsing-py2_py3"
|
||||
version: "version:2.4.7"
|
||||
>
|
||||
|
||||
|
||||
# Used by:
|
||||
# build/toolchain/win
|
||||
wheel: <
|
||||
|
|
2098
PRESUBMIT.py
2098
PRESUBMIT.py
File diff suppressed because it is too large
Load diff
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
|
@ -8,6 +8,7 @@
|
|||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
@ -20,145 +21,145 @@ from presubmit_test_mocks import MockInputApi, MockOutputApi, MockFile, MockChan
|
|||
|
||||
|
||||
class CheckBugEntryFieldTest(unittest.TestCase):
|
||||
def testCommitMessageBugEntryWithNoError(self):
|
||||
mock_input_api = MockInputApi()
|
||||
mock_output_api = MockOutputApi()
|
||||
mock_input_api.change = MockChange([], ['webrtc:1234'])
|
||||
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
|
||||
mock_output_api)
|
||||
self.assertEqual(0, len(errors))
|
||||
def testCommitMessageBugEntryWithNoError(self):
|
||||
mock_input_api = MockInputApi()
|
||||
mock_output_api = MockOutputApi()
|
||||
mock_input_api.change = MockChange([], ['webrtc:1234'])
|
||||
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
|
||||
mock_output_api)
|
||||
self.assertEqual(0, len(errors))
|
||||
|
||||
def testCommitMessageBugEntryReturnError(self):
|
||||
mock_input_api = MockInputApi()
|
||||
mock_output_api = MockOutputApi()
|
||||
mock_input_api.change = MockChange([], ['webrtc:1234', 'webrtc=4321'])
|
||||
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
|
||||
mock_output_api)
|
||||
self.assertEqual(1, len(errors))
|
||||
self.assertEqual(('Bogus Bug entry: webrtc=4321. Please specify'
|
||||
' the issue tracker prefix and the issue number,'
|
||||
' separated by a colon, e.g. webrtc:123 or'
|
||||
' chromium:12345.'), str(errors[0]))
|
||||
def testCommitMessageBugEntryReturnError(self):
|
||||
mock_input_api = MockInputApi()
|
||||
mock_output_api = MockOutputApi()
|
||||
mock_input_api.change = MockChange([], ['webrtc:1234', 'webrtc=4321'])
|
||||
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
|
||||
mock_output_api)
|
||||
self.assertEqual(1, len(errors))
|
||||
self.assertEqual(('Bogus Bug entry: webrtc=4321. Please specify'
|
||||
' the issue tracker prefix and the issue number,'
|
||||
' separated by a colon, e.g. webrtc:123 or'
|
||||
' chromium:12345.'), str(errors[0]))
|
||||
|
||||
def testCommitMessageBugEntryWithoutPrefix(self):
|
||||
mock_input_api = MockInputApi()
|
||||
mock_output_api = MockOutputApi()
|
||||
mock_input_api.change = MockChange([], ['1234'])
|
||||
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
|
||||
mock_output_api)
|
||||
self.assertEqual(1, len(errors))
|
||||
self.assertEqual(('Bug entry requires issue tracker prefix, '
|
||||
'e.g. webrtc:1234'), str(errors[0]))
|
||||
def testCommitMessageBugEntryWithoutPrefix(self):
|
||||
mock_input_api = MockInputApi()
|
||||
mock_output_api = MockOutputApi()
|
||||
mock_input_api.change = MockChange([], ['1234'])
|
||||
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
|
||||
mock_output_api)
|
||||
self.assertEqual(1, len(errors))
|
||||
self.assertEqual(('Bug entry requires issue tracker prefix, '
|
||||
'e.g. webrtc:1234'), str(errors[0]))
|
||||
|
||||
def testCommitMessageBugEntryIsNone(self):
|
||||
mock_input_api = MockInputApi()
|
||||
mock_output_api = MockOutputApi()
|
||||
mock_input_api.change = MockChange([], ['None'])
|
||||
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
|
||||
mock_output_api)
|
||||
self.assertEqual(0, len(errors))
|
||||
def testCommitMessageBugEntryIsNone(self):
|
||||
mock_input_api = MockInputApi()
|
||||
mock_output_api = MockOutputApi()
|
||||
mock_input_api.change = MockChange([], ['None'])
|
||||
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
|
||||
mock_output_api)
|
||||
self.assertEqual(0, len(errors))
|
||||
|
||||
def testCommitMessageBugEntrySupportInternalBugReference(self):
|
||||
mock_input_api = MockInputApi()
|
||||
mock_output_api = MockOutputApi()
|
||||
mock_input_api.change.BUG = 'b/12345'
|
||||
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
|
||||
mock_output_api)
|
||||
self.assertEqual(0, len(errors))
|
||||
mock_input_api.change.BUG = 'b/12345, webrtc:1234'
|
||||
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
|
||||
mock_output_api)
|
||||
self.assertEqual(0, len(errors))
|
||||
def testCommitMessageBugEntrySupportInternalBugReference(self):
|
||||
mock_input_api = MockInputApi()
|
||||
mock_output_api = MockOutputApi()
|
||||
mock_input_api.change.BUG = 'b/12345'
|
||||
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
|
||||
mock_output_api)
|
||||
self.assertEqual(0, len(errors))
|
||||
mock_input_api.change.BUG = 'b/12345, webrtc:1234'
|
||||
errors = PRESUBMIT.CheckCommitMessageBugEntry(mock_input_api,
|
||||
mock_output_api)
|
||||
self.assertEqual(0, len(errors))
|
||||
|
||||
|
||||
class CheckNewlineAtTheEndOfProtoFilesTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.tmp_dir = tempfile.mkdtemp()
|
||||
self.proto_file_path = os.path.join(self.tmp_dir, 'foo.proto')
|
||||
self.input_api = MockInputApi()
|
||||
self.output_api = MockOutputApi()
|
||||
def setUp(self):
|
||||
self.tmp_dir = tempfile.mkdtemp()
|
||||
self.proto_file_path = os.path.join(self.tmp_dir, 'foo.proto')
|
||||
self.input_api = MockInputApi()
|
||||
self.output_api = MockOutputApi()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tmp_dir, ignore_errors=True)
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tmp_dir, ignore_errors=True)
|
||||
|
||||
def testErrorIfProtoFileDoesNotEndWithNewline(self):
|
||||
self._GenerateProtoWithoutNewlineAtTheEnd()
|
||||
self.input_api.files = [MockFile(self.proto_file_path)]
|
||||
errors = PRESUBMIT.CheckNewlineAtTheEndOfProtoFiles(
|
||||
self.input_api, self.output_api, lambda x: True)
|
||||
self.assertEqual(1, len(errors))
|
||||
self.assertEqual(
|
||||
'File %s must end with exactly one newline.' %
|
||||
self.proto_file_path, str(errors[0]))
|
||||
def testErrorIfProtoFileDoesNotEndWithNewline(self):
|
||||
self._GenerateProtoWithoutNewlineAtTheEnd()
|
||||
self.input_api.files = [MockFile(self.proto_file_path)]
|
||||
errors = PRESUBMIT.CheckNewlineAtTheEndOfProtoFiles(
|
||||
self.input_api, self.output_api, lambda x: True)
|
||||
self.assertEqual(1, len(errors))
|
||||
self.assertEqual(
|
||||
'File %s must end with exactly one newline.' % self.proto_file_path,
|
||||
str(errors[0]))
|
||||
|
||||
def testNoErrorIfProtoFileEndsWithNewline(self):
|
||||
self._GenerateProtoWithNewlineAtTheEnd()
|
||||
self.input_api.files = [MockFile(self.proto_file_path)]
|
||||
errors = PRESUBMIT.CheckNewlineAtTheEndOfProtoFiles(
|
||||
self.input_api, self.output_api, lambda x: True)
|
||||
self.assertEqual(0, len(errors))
|
||||
def testNoErrorIfProtoFileEndsWithNewline(self):
|
||||
self._GenerateProtoWithNewlineAtTheEnd()
|
||||
self.input_api.files = [MockFile(self.proto_file_path)]
|
||||
errors = PRESUBMIT.CheckNewlineAtTheEndOfProtoFiles(
|
||||
self.input_api, self.output_api, lambda x: True)
|
||||
self.assertEqual(0, len(errors))
|
||||
|
||||
def _GenerateProtoWithNewlineAtTheEnd(self):
|
||||
with open(self.proto_file_path, 'w') as f:
|
||||
f.write(
|
||||
textwrap.dedent("""
|
||||
def _GenerateProtoWithNewlineAtTheEnd(self):
|
||||
with open(self.proto_file_path, 'w') as f:
|
||||
f.write(
|
||||
textwrap.dedent("""
|
||||
syntax = "proto2";
|
||||
option optimize_for = LITE_RUNTIME;
|
||||
package webrtc.audioproc;
|
||||
"""))
|
||||
|
||||
def _GenerateProtoWithoutNewlineAtTheEnd(self):
|
||||
with open(self.proto_file_path, 'w') as f:
|
||||
f.write(
|
||||
textwrap.dedent("""
|
||||
def _GenerateProtoWithoutNewlineAtTheEnd(self):
|
||||
with open(self.proto_file_path, 'w') as f:
|
||||
f.write(
|
||||
textwrap.dedent("""
|
||||
syntax = "proto2";
|
||||
option optimize_for = LITE_RUNTIME;
|
||||
package webrtc.audioproc;"""))
|
||||
|
||||
|
||||
class CheckNoMixingSourcesTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.tmp_dir = tempfile.mkdtemp()
|
||||
self.file_path = os.path.join(self.tmp_dir, 'BUILD.gn')
|
||||
self.input_api = MockInputApi()
|
||||
self.output_api = MockOutputApi()
|
||||
def setUp(self):
|
||||
self.tmp_dir = tempfile.mkdtemp()
|
||||
self.file_path = os.path.join(self.tmp_dir, 'BUILD.gn')
|
||||
self.input_api = MockInputApi()
|
||||
self.output_api = MockOutputApi()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tmp_dir, ignore_errors=True)
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tmp_dir, ignore_errors=True)
|
||||
|
||||
def testErrorIfCAndCppAreMixed(self):
|
||||
self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.cc', 'bar.h'])
|
||||
def testErrorIfCAndCppAreMixed(self):
|
||||
self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.cc', 'bar.h'])
|
||||
|
||||
def testErrorIfCAndObjCAreMixed(self):
|
||||
self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.m', 'bar.h'])
|
||||
def testErrorIfCAndObjCAreMixed(self):
|
||||
self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.m', 'bar.h'])
|
||||
|
||||
def testErrorIfCAndObjCppAreMixed(self):
|
||||
self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.mm', 'bar.h'])
|
||||
def testErrorIfCAndObjCppAreMixed(self):
|
||||
self._AssertNumberOfErrorsWithSources(1, ['foo.c', 'bar.mm', 'bar.h'])
|
||||
|
||||
def testErrorIfCppAndObjCAreMixed(self):
|
||||
self._AssertNumberOfErrorsWithSources(1, ['foo.cc', 'bar.m', 'bar.h'])
|
||||
def testErrorIfCppAndObjCAreMixed(self):
|
||||
self._AssertNumberOfErrorsWithSources(1, ['foo.cc', 'bar.m', 'bar.h'])
|
||||
|
||||
def testErrorIfCppAndObjCppAreMixed(self):
|
||||
self._AssertNumberOfErrorsWithSources(1, ['foo.cc', 'bar.mm', 'bar.h'])
|
||||
def testErrorIfCppAndObjCppAreMixed(self):
|
||||
self._AssertNumberOfErrorsWithSources(1, ['foo.cc', 'bar.mm', 'bar.h'])
|
||||
|
||||
def testNoErrorIfOnlyC(self):
|
||||
self._AssertNumberOfErrorsWithSources(0, ['foo.c', 'bar.c', 'bar.h'])
|
||||
def testNoErrorIfOnlyC(self):
|
||||
self._AssertNumberOfErrorsWithSources(0, ['foo.c', 'bar.c', 'bar.h'])
|
||||
|
||||
def testNoErrorIfOnlyCpp(self):
|
||||
self._AssertNumberOfErrorsWithSources(0, ['foo.cc', 'bar.cc', 'bar.h'])
|
||||
def testNoErrorIfOnlyCpp(self):
|
||||
self._AssertNumberOfErrorsWithSources(0, ['foo.cc', 'bar.cc', 'bar.h'])
|
||||
|
||||
def testNoErrorIfOnlyObjC(self):
|
||||
self._AssertNumberOfErrorsWithSources(0, ['foo.m', 'bar.m', 'bar.h'])
|
||||
def testNoErrorIfOnlyObjC(self):
|
||||
self._AssertNumberOfErrorsWithSources(0, ['foo.m', 'bar.m', 'bar.h'])
|
||||
|
||||
def testNoErrorIfOnlyObjCpp(self):
|
||||
self._AssertNumberOfErrorsWithSources(0, ['foo.mm', 'bar.mm', 'bar.h'])
|
||||
def testNoErrorIfOnlyObjCpp(self):
|
||||
self._AssertNumberOfErrorsWithSources(0, ['foo.mm', 'bar.mm', 'bar.h'])
|
||||
|
||||
def testNoErrorIfObjCAndObjCppAreMixed(self):
|
||||
self._AssertNumberOfErrorsWithSources(0, ['foo.m', 'bar.mm', 'bar.h'])
|
||||
def testNoErrorIfObjCAndObjCppAreMixed(self):
|
||||
self._AssertNumberOfErrorsWithSources(0, ['foo.m', 'bar.mm', 'bar.h'])
|
||||
|
||||
def testNoErrorIfSourcesAreInExclusiveIfBranches(self):
|
||||
self._GenerateBuildFile(
|
||||
textwrap.dedent("""
|
||||
def testNoErrorIfSourcesAreInExclusiveIfBranches(self):
|
||||
self._GenerateBuildFile(
|
||||
textwrap.dedent("""
|
||||
rtc_library("bar_foo") {
|
||||
if (is_win) {
|
||||
sources = [
|
||||
|
@ -184,15 +185,15 @@ class CheckNoMixingSourcesTest(unittest.TestCase):
|
|||
}
|
||||
}
|
||||
"""))
|
||||
self.input_api.files = [MockFile(self.file_path)]
|
||||
errors = PRESUBMIT.CheckNoMixingSources(self.input_api,
|
||||
[MockFile(self.file_path)],
|
||||
self.output_api)
|
||||
self.assertEqual(0, len(errors))
|
||||
self.input_api.files = [MockFile(self.file_path)]
|
||||
errors = PRESUBMIT.CheckNoMixingSources(self.input_api,
|
||||
[MockFile(self.file_path)],
|
||||
self.output_api)
|
||||
self.assertEqual(0, len(errors))
|
||||
|
||||
def testErrorIfSourcesAreNotInExclusiveIfBranches(self):
|
||||
self._GenerateBuildFile(
|
||||
textwrap.dedent("""
|
||||
def testErrorIfSourcesAreNotInExclusiveIfBranches(self):
|
||||
self._GenerateBuildFile(
|
||||
textwrap.dedent("""
|
||||
rtc_library("bar_foo") {
|
||||
if (is_win) {
|
||||
sources = [
|
||||
|
@ -224,23 +225,22 @@ class CheckNoMixingSourcesTest(unittest.TestCase):
|
|||
}
|
||||
}
|
||||
"""))
|
||||
self.input_api.files = [MockFile(self.file_path)]
|
||||
errors = PRESUBMIT.CheckNoMixingSources(self.input_api,
|
||||
[MockFile(self.file_path)],
|
||||
self.output_api)
|
||||
self.assertEqual(1, len(errors))
|
||||
self.assertTrue('bar.cc' in str(errors[0]))
|
||||
self.assertTrue('bar.mm' in str(errors[0]))
|
||||
self.assertTrue('foo.cc' in str(errors[0]))
|
||||
self.assertTrue('foo.mm' in str(errors[0]))
|
||||
self.assertTrue('bar.m' in str(errors[0]))
|
||||
self.assertTrue('bar.c' in str(errors[0]))
|
||||
self.input_api.files = [MockFile(self.file_path)]
|
||||
errors = PRESUBMIT.CheckNoMixingSources(self.input_api,
|
||||
[MockFile(self.file_path)],
|
||||
self.output_api)
|
||||
self.assertEqual(1, len(errors))
|
||||
self.assertTrue('bar.cc' in str(errors[0]))
|
||||
self.assertTrue('bar.mm' in str(errors[0]))
|
||||
self.assertTrue('foo.cc' in str(errors[0]))
|
||||
self.assertTrue('foo.mm' in str(errors[0]))
|
||||
self.assertTrue('bar.m' in str(errors[0]))
|
||||
self.assertTrue('bar.c' in str(errors[0]))
|
||||
|
||||
def _AssertNumberOfErrorsWithSources(self, number_of_errors, sources):
|
||||
assert len(
|
||||
sources) == 3, 'This function accepts a list of 3 source files'
|
||||
self._GenerateBuildFile(
|
||||
textwrap.dedent("""
|
||||
def _AssertNumberOfErrorsWithSources(self, number_of_errors, sources):
|
||||
assert len(sources) == 3, 'This function accepts a list of 3 source files'
|
||||
self._GenerateBuildFile(
|
||||
textwrap.dedent("""
|
||||
rtc_static_library("bar_foo") {
|
||||
sources = [
|
||||
"%s",
|
||||
|
@ -256,84 +256,76 @@ class CheckNoMixingSourcesTest(unittest.TestCase):
|
|||
],
|
||||
}
|
||||
""" % (tuple(sources) * 2)))
|
||||
self.input_api.files = [MockFile(self.file_path)]
|
||||
errors = PRESUBMIT.CheckNoMixingSources(self.input_api,
|
||||
[MockFile(self.file_path)],
|
||||
self.output_api)
|
||||
self.assertEqual(number_of_errors, len(errors))
|
||||
if number_of_errors == 1:
|
||||
for source in sources:
|
||||
if not source.endswith('.h'):
|
||||
self.assertTrue(source in str(errors[0]))
|
||||
self.input_api.files = [MockFile(self.file_path)]
|
||||
errors = PRESUBMIT.CheckNoMixingSources(self.input_api,
|
||||
[MockFile(self.file_path)],
|
||||
self.output_api)
|
||||
self.assertEqual(number_of_errors, len(errors))
|
||||
if number_of_errors == 1:
|
||||
for source in sources:
|
||||
if not source.endswith('.h'):
|
||||
self.assertTrue(source in str(errors[0]))
|
||||
|
||||
def _GenerateBuildFile(self, content):
|
||||
with open(self.file_path, 'w') as f:
|
||||
f.write(content)
|
||||
def _GenerateBuildFile(self, content):
|
||||
with open(self.file_path, 'w') as f:
|
||||
f.write(content)
|
||||
|
||||
|
||||
class CheckAssertUsageTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.input_api = MockInputApi()
|
||||
self.output_api = MockOutputApi()
|
||||
self._content_with_assert = [
|
||||
'void Foo() {',
|
||||
' assert(true);',
|
||||
'}'
|
||||
]
|
||||
self._content_without_assert = [
|
||||
'void Foo() {',
|
||||
' RTC_CHECK(true);',
|
||||
'}'
|
||||
]
|
||||
def setUp(self):
|
||||
self.input_api = MockInputApi()
|
||||
self.output_api = MockOutputApi()
|
||||
self._content_with_assert = ['void Foo() {', ' assert(true);', '}']
|
||||
self._content_without_assert = ['void Foo() {', ' RTC_CHECK(true);', '}']
|
||||
|
||||
def testDetectsAssertInCcFile(self):
|
||||
self.input_api.files = [
|
||||
MockFile('with_assert.cc', self._content_with_assert),
|
||||
MockFile('without_assert.cc', self._content_without_assert),
|
||||
]
|
||||
errors = PRESUBMIT.CheckAssertUsage(
|
||||
self.input_api, self.output_api, lambda x: True)
|
||||
self.assertEqual(1, len(errors))
|
||||
self.assertEqual('with_assert.cc', errors[0].items[0])
|
||||
def testDetectsAssertInCcFile(self):
|
||||
self.input_api.files = [
|
||||
MockFile('with_assert.cc', self._content_with_assert),
|
||||
MockFile('without_assert.cc', self._content_without_assert),
|
||||
]
|
||||
errors = PRESUBMIT.CheckAssertUsage(self.input_api,
|
||||
self.output_api, lambda x: True)
|
||||
self.assertEqual(1, len(errors))
|
||||
self.assertEqual('with_assert.cc', errors[0].items[0])
|
||||
|
||||
def testDetectsAssertInHeaderFile(self):
|
||||
self.input_api.files = [
|
||||
MockFile('with_assert.h', self._content_with_assert),
|
||||
MockFile('without_assert.h', self._content_without_assert),
|
||||
]
|
||||
errors = PRESUBMIT.CheckAssertUsage(
|
||||
self.input_api, self.output_api, lambda x: True)
|
||||
self.assertEqual(1, len(errors))
|
||||
self.assertEqual('with_assert.h', errors[0].items[0])
|
||||
def testDetectsAssertInHeaderFile(self):
|
||||
self.input_api.files = [
|
||||
MockFile('with_assert.h', self._content_with_assert),
|
||||
MockFile('without_assert.h', self._content_without_assert),
|
||||
]
|
||||
errors = PRESUBMIT.CheckAssertUsage(self.input_api,
|
||||
self.output_api, lambda x: True)
|
||||
self.assertEqual(1, len(errors))
|
||||
self.assertEqual('with_assert.h', errors[0].items[0])
|
||||
|
||||
def testDetectsAssertInObjCFile(self):
|
||||
self.input_api.files = [
|
||||
MockFile('with_assert.m', self._content_with_assert),
|
||||
MockFile('without_assert.m', self._content_without_assert),
|
||||
]
|
||||
errors = PRESUBMIT.CheckAssertUsage(
|
||||
self.input_api, self.output_api, lambda x: True)
|
||||
self.assertEqual(1, len(errors))
|
||||
self.assertEqual('with_assert.m', errors[0].items[0])
|
||||
def testDetectsAssertInObjCFile(self):
|
||||
self.input_api.files = [
|
||||
MockFile('with_assert.m', self._content_with_assert),
|
||||
MockFile('without_assert.m', self._content_without_assert),
|
||||
]
|
||||
errors = PRESUBMIT.CheckAssertUsage(self.input_api,
|
||||
self.output_api, lambda x: True)
|
||||
self.assertEqual(1, len(errors))
|
||||
self.assertEqual('with_assert.m', errors[0].items[0])
|
||||
|
||||
def testDetectsAssertInObjCppFile(self):
|
||||
self.input_api.files = [
|
||||
MockFile('with_assert.mm', self._content_with_assert),
|
||||
MockFile('without_assert.mm', self._content_without_assert),
|
||||
]
|
||||
errors = PRESUBMIT.CheckAssertUsage(
|
||||
self.input_api, self.output_api, lambda x: True)
|
||||
self.assertEqual(1, len(errors))
|
||||
self.assertEqual('with_assert.mm', errors[0].items[0])
|
||||
def testDetectsAssertInObjCppFile(self):
|
||||
self.input_api.files = [
|
||||
MockFile('with_assert.mm', self._content_with_assert),
|
||||
MockFile('without_assert.mm', self._content_without_assert),
|
||||
]
|
||||
errors = PRESUBMIT.CheckAssertUsage(self.input_api,
|
||||
self.output_api, lambda x: True)
|
||||
self.assertEqual(1, len(errors))
|
||||
self.assertEqual('with_assert.mm', errors[0].items[0])
|
||||
|
||||
def testDoesntDetectAssertInOtherFiles(self):
|
||||
self.input_api.files = [
|
||||
MockFile('with_assert.cpp', self._content_with_assert),
|
||||
]
|
||||
errors = PRESUBMIT.CheckAssertUsage(
|
||||
self.input_api, self.output_api, lambda x: True)
|
||||
self.assertEqual(0, len(errors))
|
||||
def testDoesntDetectAssertInOtherFiles(self):
|
||||
self.input_api.files = [
|
||||
MockFile('with_assert.cpp', self._content_with_assert),
|
||||
]
|
||||
errors = PRESUBMIT.CheckAssertUsage(self.input_api,
|
||||
self.output_api, lambda x: True)
|
||||
self.assertEqual(0, len(errors))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
unittest.main()
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -9,135 +11,131 @@
|
|||
# This file is inspired to [1].
|
||||
# [1] - https://cs.chromium.org/chromium/src/PRESUBMIT_test_mocks.py
|
||||
|
||||
from __future__ import absolute_import
|
||||
import os.path
|
||||
import re
|
||||
|
||||
|
||||
class MockInputApi(object):
|
||||
"""Mock class for the InputApi class.
|
||||
class MockInputApi:
|
||||
"""Mock class for the InputApi class.
|
||||
|
||||
This class can be used for unittests for presubmit by initializing the files
|
||||
attribute as the list of changed files.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.change = MockChange([], [])
|
||||
self.files = []
|
||||
self.presubmit_local_path = os.path.dirname(__file__)
|
||||
self.re = re # pylint: disable=invalid-name
|
||||
def __init__(self):
|
||||
self.change = MockChange([], [])
|
||||
self.files = []
|
||||
self.presubmit_local_path = os.path.dirname(__file__)
|
||||
self.re = re # pylint: disable=invalid-name
|
||||
|
||||
def AffectedSourceFiles(self, file_filter=None):
|
||||
return self.AffectedFiles(file_filter=file_filter)
|
||||
def AffectedSourceFiles(self, file_filter=None):
|
||||
return self.AffectedFiles(file_filter=file_filter)
|
||||
|
||||
def AffectedFiles(self, file_filter=None, include_deletes=False):
|
||||
for f in self.files:
|
||||
if file_filter and not file_filter(f):
|
||||
continue
|
||||
if not include_deletes and f.Action() == 'D':
|
||||
continue
|
||||
yield f
|
||||
def AffectedFiles(self, file_filter=None, include_deletes=False):
|
||||
for f in self.files:
|
||||
if file_filter and not file_filter(f):
|
||||
continue
|
||||
if not include_deletes and f.Action() == 'D':
|
||||
continue
|
||||
yield f
|
||||
|
||||
@classmethod
|
||||
def FilterSourceFile(cls,
|
||||
affected_file,
|
||||
files_to_check=(),
|
||||
files_to_skip=()):
|
||||
# pylint: disable=unused-argument
|
||||
return True
|
||||
@classmethod
|
||||
def FilterSourceFile(cls, affected_file, files_to_check=(), files_to_skip=()):
|
||||
# pylint: disable=unused-argument
|
||||
return True
|
||||
|
||||
def PresubmitLocalPath(self):
|
||||
return self.presubmit_local_path
|
||||
def PresubmitLocalPath(self):
|
||||
return self.presubmit_local_path
|
||||
|
||||
def ReadFile(self, affected_file, mode='rU'):
|
||||
filename = affected_file.AbsoluteLocalPath()
|
||||
for f in self.files:
|
||||
if f.LocalPath() == filename:
|
||||
with open(filename, mode) as f:
|
||||
return f.read()
|
||||
# Otherwise, file is not in our mock API.
|
||||
raise IOError, "No such file or directory: '%s'" % filename
|
||||
def ReadFile(self, affected_file, mode='r'):
|
||||
filename = affected_file.AbsoluteLocalPath()
|
||||
for f in self.files:
|
||||
if f.LocalPath() == filename:
|
||||
with open(filename, mode) as f:
|
||||
return f.read()
|
||||
# Otherwise, file is not in our mock API.
|
||||
raise IOError("No such file or directory: '%s'" % filename)
|
||||
|
||||
|
||||
class MockOutputApi(object):
|
||||
"""Mock class for the OutputApi class.
|
||||
class MockOutputApi:
|
||||
"""Mock class for the OutputApi class.
|
||||
|
||||
An instance of this class can be passed to presubmit unittests for outputing
|
||||
various types of results.
|
||||
"""
|
||||
|
||||
class PresubmitResult(object):
|
||||
def __init__(self, message, items=None, long_text=''):
|
||||
self.message = message
|
||||
self.items = items
|
||||
self.long_text = long_text
|
||||
class PresubmitResult:
|
||||
def __init__(self, message, items=None, long_text=''):
|
||||
self.message = message
|
||||
self.items = items
|
||||
self.long_text = long_text
|
||||
|
||||
def __repr__(self):
|
||||
return self.message
|
||||
def __repr__(self):
|
||||
return self.message
|
||||
|
||||
class PresubmitError(PresubmitResult):
|
||||
def __init__(self, message, items=None, long_text=''):
|
||||
MockOutputApi.PresubmitResult.__init__(self, message, items,
|
||||
long_text)
|
||||
self.type = 'error'
|
||||
class PresubmitError(PresubmitResult):
|
||||
def __init__(self, message, items=None, long_text=''):
|
||||
MockOutputApi.PresubmitResult.__init__(self, message, items, long_text)
|
||||
self.type = 'error'
|
||||
|
||||
|
||||
class MockChange(object):
|
||||
"""Mock class for Change class.
|
||||
class MockChange:
|
||||
"""Mock class for Change class.
|
||||
|
||||
This class can be used in presubmit unittests to mock the query of the
|
||||
current change.
|
||||
"""
|
||||
|
||||
def __init__(self, changed_files, bugs_from_description, tags=None):
|
||||
self._changed_files = changed_files
|
||||
self._bugs_from_description = bugs_from_description
|
||||
self.tags = dict() if not tags else tags
|
||||
def __init__(self, changed_files, bugs_from_description, tags=None):
|
||||
self._changed_files = changed_files
|
||||
self._bugs_from_description = bugs_from_description
|
||||
self.tags = dict() if not tags else tags
|
||||
|
||||
def BugsFromDescription(self):
|
||||
return self._bugs_from_description
|
||||
def BugsFromDescription(self):
|
||||
return self._bugs_from_description
|
||||
|
||||
def __getattr__(self, attr):
|
||||
"""Return tags directly as attributes on the object."""
|
||||
if not re.match(r"^[A-Z_]*$", attr):
|
||||
raise AttributeError(self, attr)
|
||||
return self.tags.get(attr)
|
||||
def __getattr__(self, attr):
|
||||
"""Return tags directly as attributes on the object."""
|
||||
if not re.match(r"^[A-Z_]*$", attr):
|
||||
raise AttributeError(self, attr)
|
||||
return self.tags.get(attr)
|
||||
|
||||
|
||||
class MockFile(object):
|
||||
"""Mock class for the File class.
|
||||
class MockFile:
|
||||
"""Mock class for the File class.
|
||||
|
||||
This class can be used to form the mock list of changed files in
|
||||
MockInputApi for presubmit unittests.
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
local_path,
|
||||
new_contents=None,
|
||||
old_contents=None,
|
||||
action='A'):
|
||||
if new_contents is None:
|
||||
new_contents = ["Data"]
|
||||
self._local_path = local_path
|
||||
self._new_contents = new_contents
|
||||
self._changed_contents = [(i + 1, l)
|
||||
for i, l in enumerate(new_contents)]
|
||||
self._action = action
|
||||
self._old_contents = old_contents
|
||||
def __init__(self,
|
||||
local_path,
|
||||
new_contents=None,
|
||||
old_contents=None,
|
||||
action='A'):
|
||||
if new_contents is None:
|
||||
new_contents = ["Data"]
|
||||
self._local_path = local_path
|
||||
self._new_contents = new_contents
|
||||
self._changed_contents = [(i + 1, l) for i, l in enumerate(new_contents)]
|
||||
self._action = action
|
||||
self._old_contents = old_contents
|
||||
|
||||
def Action(self):
|
||||
return self._action
|
||||
def Action(self):
|
||||
return self._action
|
||||
|
||||
def ChangedContents(self):
|
||||
return self._changed_contents
|
||||
def ChangedContents(self):
|
||||
return self._changed_contents
|
||||
|
||||
def NewContents(self):
|
||||
return self._new_contents
|
||||
def NewContents(self):
|
||||
return self._new_contents
|
||||
|
||||
def LocalPath(self):
|
||||
return self._local_path
|
||||
def LocalPath(self):
|
||||
return self._local_path
|
||||
|
||||
def AbsoluteLocalPath(self):
|
||||
return self._local_path
|
||||
def AbsoluteLocalPath(self):
|
||||
return self._local_path
|
||||
|
||||
def OldContents(self):
|
||||
return self._old_contents
|
||||
def OldContents(self):
|
||||
return self._old_contents
|
||||
|
|
1
pylintrc
1
pylintrc
|
@ -28,6 +28,7 @@ disable=
|
|||
exec-used,
|
||||
fixme,
|
||||
import-error,
|
||||
import-outside-toplevel,
|
||||
missing-docstring,
|
||||
no-init,
|
||||
no-member,
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -6,45 +8,48 @@
|
|||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
# Runs PRESUBMIT.py in py3 mode by git cl presubmit.
|
||||
USE_PYTHON3 = True
|
||||
|
||||
|
||||
def _LicenseHeader(input_api):
|
||||
"""Returns the license header regexp."""
|
||||
# Accept any year number from 2003 to the current year
|
||||
current_year = int(input_api.time.strftime('%Y'))
|
||||
allowed_years = (str(s) for s in reversed(xrange(2003, current_year + 1)))
|
||||
years_re = '(' + '|'.join(allowed_years) + ')'
|
||||
license_header = (
|
||||
r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. '
|
||||
r'All [Rr]ights [Rr]eserved\.\n'
|
||||
r'.*?\n'
|
||||
r'.*? Use of this source code is governed by a BSD-style license\n'
|
||||
r'.*? that can be found in the LICENSE file in the root of the source\n'
|
||||
r'.*? tree\. An additional intellectual property rights grant can be '
|
||||
r'found\n'
|
||||
r'.*? in the file PATENTS\. All contributing project authors may\n'
|
||||
r'.*? be found in the AUTHORS file in the root of the source tree\.\n'
|
||||
) % {
|
||||
'year': years_re,
|
||||
}
|
||||
return license_header
|
||||
"""Returns the license header regexp."""
|
||||
# Accept any year number from 2003 to the current year
|
||||
current_year = int(input_api.time.strftime('%Y'))
|
||||
allowed_years = (str(s) for s in reversed(range(2003, current_year + 1)))
|
||||
years_re = '(' + '|'.join(allowed_years) + ')'
|
||||
license_header = (
|
||||
r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. '
|
||||
r'All [Rr]ights [Rr]eserved\.\n'
|
||||
r'.*?\n'
|
||||
r'.*? Use of this source code is governed by a BSD-style license\n'
|
||||
r'.*? that can be found in the LICENSE file in the root of the source\n'
|
||||
r'.*? tree\. An additional intellectual property rights grant can be '
|
||||
r'found\n'
|
||||
r'.*? in the file PATENTS\. All contributing project authors may\n'
|
||||
r'.*? be found in the AUTHORS file in the root of the source tree\.\n'
|
||||
) % {
|
||||
'year': years_re,
|
||||
}
|
||||
return license_header
|
||||
|
||||
|
||||
def _CommonChecks(input_api, output_api):
|
||||
"""Checks common to both upload and commit."""
|
||||
results = []
|
||||
results.extend(
|
||||
input_api.canned_checks.CheckLicense(input_api, output_api,
|
||||
_LicenseHeader(input_api)))
|
||||
return results
|
||||
"""Checks common to both upload and commit."""
|
||||
results = []
|
||||
results.extend(
|
||||
input_api.canned_checks.CheckLicense(input_api, output_api,
|
||||
_LicenseHeader(input_api)))
|
||||
return results
|
||||
|
||||
|
||||
def CheckChangeOnUpload(input_api, output_api):
|
||||
results = []
|
||||
results.extend(_CommonChecks(input_api, output_api))
|
||||
return results
|
||||
results = []
|
||||
results.extend(_CommonChecks(input_api, output_api))
|
||||
return results
|
||||
|
||||
|
||||
def CheckChangeOnCommit(input_api, output_api):
|
||||
results = []
|
||||
results.extend(_CommonChecks(input_api, output_api))
|
||||
return results
|
||||
results = []
|
||||
results.extend(_CommonChecks(input_api, output_api))
|
||||
return results
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
|
@ -51,172 +51,167 @@ import find_depot_tools
|
|||
|
||||
|
||||
def _ParseArgs():
|
||||
parser = argparse.ArgumentParser(description='libwebrtc.aar generator.')
|
||||
parser.add_argument(
|
||||
'--build-dir',
|
||||
type=os.path.abspath,
|
||||
help='Build dir. By default will create and use temporary dir.')
|
||||
parser.add_argument('--output',
|
||||
default='libwebrtc.aar',
|
||||
type=os.path.abspath,
|
||||
help='Output file of the script.')
|
||||
parser.add_argument(
|
||||
'--arch',
|
||||
default=DEFAULT_ARCHS,
|
||||
nargs='*',
|
||||
help='Architectures to build. Defaults to %(default)s.')
|
||||
parser.add_argument('--use-goma',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Use goma.')
|
||||
parser.add_argument('--verbose',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Debug logging.')
|
||||
parser.add_argument(
|
||||
'--extra-gn-args',
|
||||
default=[],
|
||||
nargs='*',
|
||||
help="""Additional GN arguments to be used during Ninja generation.
|
||||
parser = argparse.ArgumentParser(description='libwebrtc.aar generator.')
|
||||
parser.add_argument(
|
||||
'--build-dir',
|
||||
type=os.path.abspath,
|
||||
help='Build dir. By default will create and use temporary dir.')
|
||||
parser.add_argument('--output',
|
||||
default='libwebrtc.aar',
|
||||
type=os.path.abspath,
|
||||
help='Output file of the script.')
|
||||
parser.add_argument('--arch',
|
||||
default=DEFAULT_ARCHS,
|
||||
nargs='*',
|
||||
help='Architectures to build. Defaults to %(default)s.')
|
||||
parser.add_argument('--use-goma',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Use goma.')
|
||||
parser.add_argument('--verbose',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Debug logging.')
|
||||
parser.add_argument(
|
||||
'--extra-gn-args',
|
||||
default=[],
|
||||
nargs='*',
|
||||
help="""Additional GN arguments to be used during Ninja generation.
|
||||
These are passed to gn inside `--args` switch and
|
||||
applied after any other arguments and will
|
||||
override any values defined by the script.
|
||||
Example of building debug aar file:
|
||||
build_aar.py --extra-gn-args='is_debug=true'""")
|
||||
parser.add_argument(
|
||||
'--extra-ninja-switches',
|
||||
default=[],
|
||||
nargs='*',
|
||||
help="""Additional Ninja switches to be used during compilation.
|
||||
parser.add_argument(
|
||||
'--extra-ninja-switches',
|
||||
default=[],
|
||||
nargs='*',
|
||||
help="""Additional Ninja switches to be used during compilation.
|
||||
These are applied after any other Ninja switches.
|
||||
Example of enabling verbose Ninja output:
|
||||
build_aar.py --extra-ninja-switches='-v'""")
|
||||
parser.add_argument(
|
||||
'--extra-gn-switches',
|
||||
default=[],
|
||||
nargs='*',
|
||||
help="""Additional GN switches to be used during compilation.
|
||||
parser.add_argument(
|
||||
'--extra-gn-switches',
|
||||
default=[],
|
||||
nargs='*',
|
||||
help="""Additional GN switches to be used during compilation.
|
||||
These are applied after any other GN switches.
|
||||
Example of enabling verbose GN output:
|
||||
build_aar.py --extra-gn-switches='-v'""")
|
||||
return parser.parse_args()
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def _RunGN(args):
|
||||
cmd = [
|
||||
sys.executable,
|
||||
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py')
|
||||
]
|
||||
cmd.extend(args)
|
||||
logging.debug('Running: %r', cmd)
|
||||
subprocess.check_call(cmd)
|
||||
cmd = [
|
||||
sys.executable,
|
||||
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py')
|
||||
]
|
||||
cmd.extend(args)
|
||||
logging.debug('Running: %r', cmd)
|
||||
subprocess.check_call(cmd)
|
||||
|
||||
|
||||
def _RunNinja(output_directory, args):
|
||||
cmd = [
|
||||
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja'), '-C',
|
||||
output_directory
|
||||
]
|
||||
cmd.extend(args)
|
||||
logging.debug('Running: %r', cmd)
|
||||
subprocess.check_call(cmd)
|
||||
cmd = [
|
||||
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja'), '-C',
|
||||
output_directory
|
||||
]
|
||||
cmd.extend(args)
|
||||
logging.debug('Running: %r', cmd)
|
||||
subprocess.check_call(cmd)
|
||||
|
||||
|
||||
def _EncodeForGN(value):
|
||||
"""Encodes value as a GN literal."""
|
||||
if isinstance(value, str):
|
||||
return '"' + value + '"'
|
||||
elif isinstance(value, bool):
|
||||
return repr(value).lower()
|
||||
else:
|
||||
return repr(value)
|
||||
"""Encodes value as a GN literal."""
|
||||
if isinstance(value, str):
|
||||
return '"' + value + '"'
|
||||
if isinstance(value, bool):
|
||||
return repr(value).lower()
|
||||
return repr(value)
|
||||
|
||||
|
||||
def _GetOutputDirectory(build_dir, arch):
|
||||
"""Returns the GN output directory for the target architecture."""
|
||||
return os.path.join(build_dir, arch)
|
||||
"""Returns the GN output directory for the target architecture."""
|
||||
return os.path.join(build_dir, arch)
|
||||
|
||||
|
||||
def _GetTargetCpu(arch):
|
||||
"""Returns target_cpu for the GN build with the given architecture."""
|
||||
if arch in ['armeabi', 'armeabi-v7a']:
|
||||
return 'arm'
|
||||
elif arch == 'arm64-v8a':
|
||||
return 'arm64'
|
||||
elif arch == 'x86':
|
||||
return 'x86'
|
||||
elif arch == 'x86_64':
|
||||
return 'x64'
|
||||
else:
|
||||
raise Exception('Unknown arch: ' + arch)
|
||||
"""Returns target_cpu for the GN build with the given architecture."""
|
||||
if arch in ['armeabi', 'armeabi-v7a']:
|
||||
return 'arm'
|
||||
if arch == 'arm64-v8a':
|
||||
return 'arm64'
|
||||
if arch == 'x86':
|
||||
return 'x86'
|
||||
if arch == 'x86_64':
|
||||
return 'x64'
|
||||
raise Exception('Unknown arch: ' + arch)
|
||||
|
||||
|
||||
def _GetArmVersion(arch):
|
||||
"""Returns arm_version for the GN build with the given architecture."""
|
||||
if arch == 'armeabi':
|
||||
return 6
|
||||
elif arch == 'armeabi-v7a':
|
||||
return 7
|
||||
elif arch in ['arm64-v8a', 'x86', 'x86_64']:
|
||||
return None
|
||||
else:
|
||||
raise Exception('Unknown arch: ' + arch)
|
||||
"""Returns arm_version for the GN build with the given architecture."""
|
||||
if arch == 'armeabi':
|
||||
return 6
|
||||
if arch == 'armeabi-v7a':
|
||||
return 7
|
||||
if arch in ['arm64-v8a', 'x86', 'x86_64']:
|
||||
return None
|
||||
raise Exception('Unknown arch: ' + arch)
|
||||
|
||||
|
||||
def Build(build_dir, arch, use_goma, extra_gn_args, extra_gn_switches,
|
||||
extra_ninja_switches):
|
||||
"""Generates target architecture using GN and builds it using ninja."""
|
||||
logging.info('Building: %s', arch)
|
||||
output_directory = _GetOutputDirectory(build_dir, arch)
|
||||
gn_args = {
|
||||
'target_os': 'android',
|
||||
'is_debug': False,
|
||||
'is_component_build': False,
|
||||
'rtc_include_tests': False,
|
||||
'target_cpu': _GetTargetCpu(arch),
|
||||
'use_goma': use_goma
|
||||
}
|
||||
arm_version = _GetArmVersion(arch)
|
||||
if arm_version:
|
||||
gn_args['arm_version'] = arm_version
|
||||
gn_args_str = '--args=' + ' '.join(
|
||||
[k + '=' + _EncodeForGN(v)
|
||||
for k, v in gn_args.items()] + extra_gn_args)
|
||||
"""Generates target architecture using GN and builds it using ninja."""
|
||||
logging.info('Building: %s', arch)
|
||||
output_directory = _GetOutputDirectory(build_dir, arch)
|
||||
gn_args = {
|
||||
'target_os': 'android',
|
||||
'is_debug': False,
|
||||
'is_component_build': False,
|
||||
'rtc_include_tests': False,
|
||||
'target_cpu': _GetTargetCpu(arch),
|
||||
'use_goma': use_goma
|
||||
}
|
||||
arm_version = _GetArmVersion(arch)
|
||||
if arm_version:
|
||||
gn_args['arm_version'] = arm_version
|
||||
gn_args_str = '--args=' + ' '.join(
|
||||
[k + '=' + _EncodeForGN(v) for k, v in gn_args.items()] + extra_gn_args)
|
||||
|
||||
gn_args_list = ['gen', output_directory, gn_args_str]
|
||||
gn_args_list.extend(extra_gn_switches)
|
||||
_RunGN(gn_args_list)
|
||||
gn_args_list = ['gen', output_directory, gn_args_str]
|
||||
gn_args_list.extend(extra_gn_switches)
|
||||
_RunGN(gn_args_list)
|
||||
|
||||
ninja_args = TARGETS[:]
|
||||
if use_goma:
|
||||
ninja_args.extend(['-j', '200'])
|
||||
ninja_args.extend(extra_ninja_switches)
|
||||
_RunNinja(output_directory, ninja_args)
|
||||
ninja_args = TARGETS[:]
|
||||
if use_goma:
|
||||
ninja_args.extend(['-j', '200'])
|
||||
ninja_args.extend(extra_ninja_switches)
|
||||
_RunNinja(output_directory, ninja_args)
|
||||
|
||||
|
||||
def CollectCommon(aar_file, build_dir, arch):
|
||||
"""Collects architecture independent files into the .aar-archive."""
|
||||
logging.info('Collecting common files.')
|
||||
output_directory = _GetOutputDirectory(build_dir, arch)
|
||||
aar_file.write(MANIFEST_FILE, 'AndroidManifest.xml')
|
||||
aar_file.write(os.path.join(output_directory, JAR_FILE), 'classes.jar')
|
||||
"""Collects architecture independent files into the .aar-archive."""
|
||||
logging.info('Collecting common files.')
|
||||
output_directory = _GetOutputDirectory(build_dir, arch)
|
||||
aar_file.write(MANIFEST_FILE, 'AndroidManifest.xml')
|
||||
aar_file.write(os.path.join(output_directory, JAR_FILE), 'classes.jar')
|
||||
|
||||
|
||||
def Collect(aar_file, build_dir, arch):
|
||||
"""Collects architecture specific files into the .aar-archive."""
|
||||
logging.info('Collecting: %s', arch)
|
||||
output_directory = _GetOutputDirectory(build_dir, arch)
|
||||
"""Collects architecture specific files into the .aar-archive."""
|
||||
logging.info('Collecting: %s', arch)
|
||||
output_directory = _GetOutputDirectory(build_dir, arch)
|
||||
|
||||
abi_dir = os.path.join('jni', arch)
|
||||
for so_file in NEEDED_SO_FILES:
|
||||
aar_file.write(os.path.join(output_directory, so_file),
|
||||
os.path.join(abi_dir, so_file))
|
||||
abi_dir = os.path.join('jni', arch)
|
||||
for so_file in NEEDED_SO_FILES:
|
||||
aar_file.write(os.path.join(output_directory, so_file),
|
||||
os.path.join(abi_dir, so_file))
|
||||
|
||||
|
||||
def GenerateLicenses(output_dir, build_dir, archs):
|
||||
builder = LicenseBuilder(
|
||||
[_GetOutputDirectory(build_dir, arch) for arch in archs], TARGETS)
|
||||
builder.GenerateLicenseText(output_dir)
|
||||
builder = LicenseBuilder(
|
||||
[_GetOutputDirectory(build_dir, arch) for arch in archs], TARGETS)
|
||||
builder.GenerateLicenseText(output_dir)
|
||||
|
||||
|
||||
def BuildAar(archs,
|
||||
|
@ -226,35 +221,35 @@ def BuildAar(archs,
|
|||
ext_build_dir=None,
|
||||
extra_gn_switches=None,
|
||||
extra_ninja_switches=None):
|
||||
extra_gn_args = extra_gn_args or []
|
||||
extra_gn_switches = extra_gn_switches or []
|
||||
extra_ninja_switches = extra_ninja_switches or []
|
||||
build_dir = ext_build_dir if ext_build_dir else tempfile.mkdtemp()
|
||||
extra_gn_args = extra_gn_args or []
|
||||
extra_gn_switches = extra_gn_switches or []
|
||||
extra_ninja_switches = extra_ninja_switches or []
|
||||
build_dir = ext_build_dir if ext_build_dir else tempfile.mkdtemp()
|
||||
|
||||
for arch in archs:
|
||||
Build(build_dir, arch, use_goma, extra_gn_args, extra_gn_switches,
|
||||
extra_ninja_switches)
|
||||
|
||||
with zipfile.ZipFile(output_file, 'w') as aar_file:
|
||||
# Architecture doesn't matter here, arbitrarily using the first one.
|
||||
CollectCommon(aar_file, build_dir, archs[0])
|
||||
for arch in archs:
|
||||
Build(build_dir, arch, use_goma, extra_gn_args, extra_gn_switches,
|
||||
extra_ninja_switches)
|
||||
Collect(aar_file, build_dir, arch)
|
||||
|
||||
with zipfile.ZipFile(output_file, 'w') as aar_file:
|
||||
# Architecture doesn't matter here, arbitrarily using the first one.
|
||||
CollectCommon(aar_file, build_dir, archs[0])
|
||||
for arch in archs:
|
||||
Collect(aar_file, build_dir, arch)
|
||||
license_dir = os.path.dirname(os.path.realpath(output_file))
|
||||
GenerateLicenses(license_dir, build_dir, archs)
|
||||
|
||||
license_dir = os.path.dirname(os.path.realpath(output_file))
|
||||
GenerateLicenses(license_dir, build_dir, archs)
|
||||
|
||||
if not ext_build_dir:
|
||||
shutil.rmtree(build_dir, True)
|
||||
if not ext_build_dir:
|
||||
shutil.rmtree(build_dir, True)
|
||||
|
||||
|
||||
def main():
|
||||
args = _ParseArgs()
|
||||
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
|
||||
args = _ParseArgs()
|
||||
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
|
||||
|
||||
BuildAar(args.arch, args.output, args.use_goma, args.extra_gn_args,
|
||||
args.build_dir, args.extra_gn_switches, args.extra_ninja_switches)
|
||||
BuildAar(args.arch, args.output, args.use_goma, args.extra_gn_args,
|
||||
args.build_dir, args.extra_gn_switches, args.extra_ninja_switches)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
sys.exit(main())
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
|
@ -7,8 +7,7 @@
|
|||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
"""Script for building and testing WebRTC AAR.
|
||||
"""
|
||||
"""Script for building and testing WebRTC AAR."""
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
|
@ -36,110 +35,109 @@ AAR_PROJECT_DIR = os.path.join(CHECKOUT_ROOT, 'examples/aarproject')
|
|||
|
||||
|
||||
def _ParseArgs():
|
||||
parser = argparse.ArgumentParser(description='Releases WebRTC on Bintray.')
|
||||
parser.add_argument('--use-goma',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Use goma.')
|
||||
parser.add_argument('--skip-tests',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Skips running the tests.')
|
||||
parser.add_argument(
|
||||
'--build-dir',
|
||||
default=None,
|
||||
help='Temporary directory to store the build files. If not specified, '
|
||||
'a new directory will be created.')
|
||||
parser.add_argument('--verbose',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Debug logging.')
|
||||
return parser.parse_args()
|
||||
parser = argparse.ArgumentParser(description='Releases WebRTC on Bintray.')
|
||||
parser.add_argument('--use-goma',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Use goma.')
|
||||
parser.add_argument('--skip-tests',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Skips running the tests.')
|
||||
parser.add_argument(
|
||||
'--build-dir',
|
||||
default=None,
|
||||
help='Temporary directory to store the build files. If not specified, '
|
||||
'a new directory will be created.')
|
||||
parser.add_argument('--verbose',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Debug logging.')
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def _GetCommitHash():
|
||||
commit_hash = subprocess.check_output(
|
||||
commit_hash = subprocess.check_output(
|
||||
['git', 'rev-parse', 'HEAD'], cwd=CHECKOUT_ROOT).decode('UTF-8').strip()
|
||||
return commit_hash
|
||||
return commit_hash
|
||||
|
||||
|
||||
def _GetCommitPos():
|
||||
commit_message = subprocess.check_output(
|
||||
['git', 'rev-list', '--format=%B', '--max-count=1', 'HEAD'],
|
||||
cwd=CHECKOUT_ROOT).decode('UTF-8')
|
||||
commit_pos_match = re.search(COMMIT_POSITION_REGEX, commit_message,
|
||||
re.MULTILINE)
|
||||
if not commit_pos_match:
|
||||
raise Exception('Commit position not found in the commit message: %s' %
|
||||
commit_message)
|
||||
return commit_pos_match.group(1)
|
||||
commit_message = subprocess.check_output(
|
||||
['git', 'rev-list', '--format=%B', '--max-count=1', 'HEAD'],
|
||||
cwd=CHECKOUT_ROOT).decode('UTF-8')
|
||||
commit_pos_match = re.search(COMMIT_POSITION_REGEX, commit_message,
|
||||
re.MULTILINE)
|
||||
if not commit_pos_match:
|
||||
raise Exception('Commit position not found in the commit message: %s' %
|
||||
commit_message)
|
||||
return commit_pos_match.group(1)
|
||||
|
||||
|
||||
def _TestAAR(build_dir):
|
||||
"""Runs AppRTCMobile tests using the AAR. Returns true if the tests pass."""
|
||||
logging.info('Testing library.')
|
||||
"""Runs AppRTCMobile tests using the AAR. Returns true if the tests pass."""
|
||||
logging.info('Testing library.')
|
||||
|
||||
# Uninstall any existing version of AppRTCMobile.
|
||||
logging.info(
|
||||
'Uninstalling previous AppRTCMobile versions. It is okay for '
|
||||
'these commands to fail if AppRTCMobile is not installed.')
|
||||
subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc'])
|
||||
subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc.test'])
|
||||
# Uninstall any existing version of AppRTCMobile.
|
||||
logging.info('Uninstalling previous AppRTCMobile versions. It is okay for '
|
||||
'these commands to fail if AppRTCMobile is not installed.')
|
||||
subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc'])
|
||||
subprocess.call([ADB_BIN, 'uninstall', 'org.appspot.apprtc.test'])
|
||||
|
||||
# Run tests.
|
||||
try:
|
||||
# First clean the project.
|
||||
subprocess.check_call([GRADLEW_BIN, 'clean'], cwd=AAR_PROJECT_DIR)
|
||||
# Then run the tests.
|
||||
subprocess.check_call([
|
||||
GRADLEW_BIN,
|
||||
'connectedDebugAndroidTest',
|
||||
'-PaarDir=' + os.path.abspath(build_dir)],
|
||||
cwd=AAR_PROJECT_DIR)
|
||||
except subprocess.CalledProcessError:
|
||||
logging.exception('Test failure.')
|
||||
return False # Clean or tests failed
|
||||
# Run tests.
|
||||
try:
|
||||
# First clean the project.
|
||||
subprocess.check_call([GRADLEW_BIN, 'clean'], cwd=AAR_PROJECT_DIR)
|
||||
# Then run the tests.
|
||||
subprocess.check_call([
|
||||
GRADLEW_BIN, 'connectedDebugAndroidTest',
|
||||
'-PaarDir=' + os.path.abspath(build_dir)
|
||||
],
|
||||
cwd=AAR_PROJECT_DIR)
|
||||
except subprocess.CalledProcessError:
|
||||
logging.exception('Test failure.')
|
||||
return False # Clean or tests failed
|
||||
|
||||
return True # Tests pass
|
||||
return True # Tests pass
|
||||
|
||||
|
||||
def BuildAndTestAar(use_goma, skip_tests, build_dir):
|
||||
version = '1.0.' + _GetCommitPos()
|
||||
commit = _GetCommitHash()
|
||||
logging.info(
|
||||
'Building and Testing AAR version %s with hash %s', version, commit)
|
||||
version = '1.0.' + _GetCommitPos()
|
||||
commit = _GetCommitHash()
|
||||
logging.info('Building and Testing AAR version %s with hash %s', version,
|
||||
commit)
|
||||
|
||||
# If build directory is not specified, create a temporary directory.
|
||||
use_tmp_dir = not build_dir
|
||||
# If build directory is not specified, create a temporary directory.
|
||||
use_tmp_dir = not build_dir
|
||||
if use_tmp_dir:
|
||||
build_dir = tempfile.mkdtemp()
|
||||
|
||||
try:
|
||||
base_name = ARTIFACT_ID + '-' + version
|
||||
aar_file = os.path.join(build_dir, base_name + '.aar')
|
||||
|
||||
logging.info('Building at %s', build_dir)
|
||||
BuildAar(ARCHS,
|
||||
aar_file,
|
||||
use_goma=use_goma,
|
||||
ext_build_dir=os.path.join(build_dir, 'aar-build'))
|
||||
|
||||
tests_pass = skip_tests or _TestAAR(build_dir)
|
||||
if not tests_pass:
|
||||
raise Exception('Test failure.')
|
||||
|
||||
logging.info('Test success.')
|
||||
|
||||
finally:
|
||||
if use_tmp_dir:
|
||||
build_dir = tempfile.mkdtemp()
|
||||
|
||||
try:
|
||||
base_name = ARTIFACT_ID + '-' + version
|
||||
aar_file = os.path.join(build_dir, base_name + '.aar')
|
||||
|
||||
logging.info('Building at %s', build_dir)
|
||||
BuildAar(ARCHS,
|
||||
aar_file,
|
||||
use_goma=use_goma,
|
||||
ext_build_dir=os.path.join(build_dir, 'aar-build'))
|
||||
|
||||
tests_pass = skip_tests or _TestAAR(build_dir)
|
||||
if not tests_pass:
|
||||
raise Exception('Test failure.')
|
||||
|
||||
logging.info('Test success.')
|
||||
|
||||
finally:
|
||||
if use_tmp_dir:
|
||||
shutil.rmtree(build_dir, True)
|
||||
shutil.rmtree(build_dir, True)
|
||||
|
||||
|
||||
def main():
|
||||
args = _ParseArgs()
|
||||
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
|
||||
BuildAndTestAar(args.use_goma, args.skip_tests, args.build_dir)
|
||||
args = _ParseArgs()
|
||||
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
|
||||
BuildAndTestAar(args.use_goma, args.skip_tests, args.build_dir)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
sys.exit(main())
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -12,36 +13,34 @@ import re
|
|||
import sys
|
||||
|
||||
|
||||
def replace_double_quote(line):
|
||||
re_rtc_import = re.compile(
|
||||
r'(\s*)#import\s+"(\S+/|)(\w+\+|)RTC(\w+)\.h"(.*)', re.DOTALL)
|
||||
match = re_rtc_import.match(line)
|
||||
if not match:
|
||||
return line
|
||||
def _ReplaceDoubleQuote(line):
|
||||
re_rtc_import = re.compile(r'(\s*)#import\s+"(\S+/|)(\w+\+|)RTC(\w+)\.h"(.*)',
|
||||
re.DOTALL)
|
||||
match = re_rtc_import.match(line)
|
||||
if not match:
|
||||
return line
|
||||
|
||||
return '%s#import <WebRTC/%sRTC%s.h>%s' % (match.group(1), match.group(3),
|
||||
match.group(4), match.group(5))
|
||||
return '%s#import <WebRTC/%sRTC%s.h>%s' % (match.group(1), match.group(3),
|
||||
match.group(4), match.group(5))
|
||||
|
||||
|
||||
def process(input_file, output_file):
|
||||
with open(input_file, 'rb') as fb, open(output_file, 'wb') as fw:
|
||||
for line in fb.read().decode('UTF-8').splitlines():
|
||||
fw.write(replace_double_quote(line).encode('UTF-8'))
|
||||
fw.write(b"\n")
|
||||
def Process(input_file, output_file):
|
||||
with open(input_file, 'rb') as fb, open(output_file, 'wb') as fw:
|
||||
for line in fb.read().decode('UTF-8').splitlines():
|
||||
fw.write(_ReplaceDoubleQuote(line).encode('UTF-8'))
|
||||
fw.write(b"\n")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description=
|
||||
"Copy headers of framework and replace double-quoted includes to" +
|
||||
" angle-bracketed respectively.")
|
||||
parser.add_argument('--input',
|
||||
help='Input header files to copy.',
|
||||
type=str)
|
||||
parser.add_argument('--output', help='Output file.', type=str)
|
||||
parsed_args = parser.parse_args()
|
||||
return process(parsed_args.input, parsed_args.output)
|
||||
parser = argparse.ArgumentParser(
|
||||
description=
|
||||
"Copy headers of framework and replace double-quoted includes to" +
|
||||
" angle-bracketed respectively.")
|
||||
parser.add_argument('--input', help='Input header files to copy.', type=str)
|
||||
parser.add_argument('--output', help='Output file.', type=str)
|
||||
parsed_args = parser.parse_args()
|
||||
return Process(parsed_args.input, parsed_args.output)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
sys.exit(main())
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -8,28 +9,27 @@
|
|||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
import unittest
|
||||
from copy_framework_header import replace_double_quote
|
||||
from copy_framework_header import _ReplaceDoubleQuote
|
||||
|
||||
|
||||
class TestCopyFramework(unittest.TestCase):
|
||||
def testReplaceDoubleQuote(self):
|
||||
self.assertEqual(replace_double_quote("""#import "RTCMacros.h\""""),
|
||||
"""#import <WebRTC/RTCMacros.h>""")
|
||||
self.assertEqual(replace_double_quote("""#import "RTCMacros.h\"\n"""),
|
||||
"""#import <WebRTC/RTCMacros.h>\n""")
|
||||
self.assertEqual(
|
||||
replace_double_quote("""#import "UIDevice+RTCDevice.h\"\n"""),
|
||||
"""#import <WebRTC/UIDevice+RTCDevice.h>\n""")
|
||||
self.assertEqual(
|
||||
replace_double_quote("#import \"components/video_codec/" +
|
||||
"RTCVideoDecoderFactoryH264.h\"\n"),
|
||||
"""#import <WebRTC/RTCVideoDecoderFactoryH264.h>\n""")
|
||||
self.assertEqual(
|
||||
replace_double_quote(
|
||||
"""@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *\n"""
|
||||
),
|
||||
"""@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *\n""")
|
||||
def testReplaceDoubleQuote(self):
|
||||
self.assertEqual(_ReplaceDoubleQuote("""#import "RTCMacros.h\""""),
|
||||
"""#import <WebRTC/RTCMacros.h>""")
|
||||
self.assertEqual(_ReplaceDoubleQuote("""#import "RTCMacros.h\"\n"""),
|
||||
"""#import <WebRTC/RTCMacros.h>\n""")
|
||||
self.assertEqual(
|
||||
_ReplaceDoubleQuote("""#import "UIDevice+RTCDevice.h\"\n"""),
|
||||
"""#import <WebRTC/UIDevice+RTCDevice.h>\n""")
|
||||
self.assertEqual(
|
||||
_ReplaceDoubleQuote("#import \"components/video_codec/" +
|
||||
"RTCVideoDecoderFactoryH264.h\"\n"),
|
||||
"""#import <WebRTC/RTCVideoDecoderFactoryH264.h>\n""")
|
||||
self.assertEqual(
|
||||
_ReplaceDoubleQuote(
|
||||
"""@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *\n"""),
|
||||
"""@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) *\n""")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
unittest.main()
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env vpython
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -7,7 +8,6 @@
|
|||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import glob
|
||||
import os
|
||||
|
@ -15,16 +15,11 @@ import shutil
|
|||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
import mock
|
||||
|
||||
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
PARENT_DIR = os.path.join(SCRIPT_DIR, os.pardir)
|
||||
sys.path.append(PARENT_DIR)
|
||||
# Workaround for the presubmit, plan only to run in py3 now.
|
||||
# TODO(webrtc:13418) Remove when py2 presubmit is gone.
|
||||
if sys.version_info >= (3, 3):
|
||||
from unittest import mock
|
||||
else:
|
||||
import mock
|
||||
|
||||
import roll_deps
|
||||
from roll_deps import CalculateChangedDeps, FindAddedDeps, \
|
||||
|
@ -54,293 +49,284 @@ NO_CHROMIUM_REVISION_UPDATE = ChromiumRevisionUpdate('cafe', 'cafe')
|
|||
|
||||
|
||||
class TestError(Exception):
|
||||
pass
|
||||
pass
|
||||
|
||||
|
||||
class FakeCmd(object):
|
||||
def __init__(self):
|
||||
self.expectations = []
|
||||
class FakeCmd:
|
||||
def __init__(self):
|
||||
self.expectations = []
|
||||
|
||||
def AddExpectation(self, *args, **kwargs):
|
||||
returns = kwargs.pop('_returns', None)
|
||||
ignores = kwargs.pop('_ignores', [])
|
||||
self.expectations.append((args, kwargs, returns, ignores))
|
||||
def AddExpectation(self, *args, **kwargs):
|
||||
returns = kwargs.pop('_returns', None)
|
||||
ignores = kwargs.pop('_ignores', [])
|
||||
self.expectations.append((args, kwargs, returns, ignores))
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
if not self.expectations:
|
||||
raise TestError('Got unexpected\n%s\n%s' % (args, kwargs))
|
||||
exp_args, exp_kwargs, exp_returns, ignores = self.expectations.pop(0)
|
||||
for item in ignores:
|
||||
kwargs.pop(item, None)
|
||||
if args != exp_args or kwargs != exp_kwargs:
|
||||
message = 'Expected:\n args: %s\n kwargs: %s\n' % (exp_args,
|
||||
exp_kwargs)
|
||||
message += 'Got:\n args: %s\n kwargs: %s\n' % (args, kwargs)
|
||||
raise TestError(message)
|
||||
return exp_returns
|
||||
def __call__(self, *args, **kwargs):
|
||||
if not self.expectations:
|
||||
raise TestError('Got unexpected\n%s\n%s' % (args, kwargs))
|
||||
exp_args, exp_kwargs, exp_returns, ignores = self.expectations.pop(0)
|
||||
for item in ignores:
|
||||
kwargs.pop(item, None)
|
||||
if args != exp_args or kwargs != exp_kwargs:
|
||||
message = 'Expected:\n args: %s\n kwargs: %s\n' % (exp_args, exp_kwargs)
|
||||
message += 'Got:\n args: %s\n kwargs: %s\n' % (args, kwargs)
|
||||
raise TestError(message)
|
||||
return exp_returns
|
||||
|
||||
|
||||
class NullCmd(object):
|
||||
"""No-op mock when calls mustn't be checked. """
|
||||
class NullCmd:
|
||||
"""No-op mock when calls mustn't be checked. """
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
# Empty stdout and stderr.
|
||||
return None, None
|
||||
def __call__(self, *args, **kwargs):
|
||||
# Empty stdout and stderr.
|
||||
return None, None
|
||||
|
||||
|
||||
class TestRollChromiumRevision(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self._output_dir = tempfile.mkdtemp()
|
||||
test_data_dir = os.path.join(SCRIPT_DIR, 'testdata', 'roll_deps')
|
||||
for test_file in glob.glob(os.path.join(test_data_dir, '*')):
|
||||
shutil.copy(test_file, self._output_dir)
|
||||
join = lambda f: os.path.join(self._output_dir, f)
|
||||
self._webrtc_depsfile = join('DEPS')
|
||||
self._new_cr_depsfile = join('DEPS.chromium.new')
|
||||
self._webrtc_depsfile_android = join('DEPS.with_android_deps')
|
||||
self._new_cr_depsfile_android = join('DEPS.chromium.with_android_deps')
|
||||
self.fake = FakeCmd()
|
||||
def setUp(self):
|
||||
self._output_dir = tempfile.mkdtemp()
|
||||
test_data_dir = os.path.join(SCRIPT_DIR, 'testdata', 'roll_deps')
|
||||
for test_file in glob.glob(os.path.join(test_data_dir, '*')):
|
||||
shutil.copy(test_file, self._output_dir)
|
||||
join = lambda f: os.path.join(self._output_dir, f)
|
||||
self._webrtc_depsfile = join('DEPS')
|
||||
self._new_cr_depsfile = join('DEPS.chromium.new')
|
||||
self._webrtc_depsfile_android = join('DEPS.with_android_deps')
|
||||
self._new_cr_depsfile_android = join('DEPS.chromium.with_android_deps')
|
||||
self.fake = FakeCmd()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self._output_dir, ignore_errors=True)
|
||||
self.assertEqual(self.fake.expectations, [])
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self._output_dir, ignore_errors=True)
|
||||
self.assertEqual(self.fake.expectations, [])
|
||||
|
||||
def testVarLookup(self):
|
||||
local_scope = {'foo': 'wrong', 'vars': {'foo': 'bar'}}
|
||||
lookup = roll_deps.VarLookup(local_scope)
|
||||
self.assertEqual(lookup('foo'), 'bar')
|
||||
def testVarLookup(self):
|
||||
local_scope = {'foo': 'wrong', 'vars': {'foo': 'bar'}}
|
||||
lookup = roll_deps.VarLookup(local_scope)
|
||||
self.assertEqual(lookup('foo'), 'bar')
|
||||
|
||||
def testUpdateDepsFile(self):
|
||||
new_rev = 'aaaaabbbbbcccccdddddeeeeefffff0000011111'
|
||||
current_rev = TEST_DATA_VARS['chromium_revision']
|
||||
def testUpdateDepsFile(self):
|
||||
new_rev = 'aaaaabbbbbcccccdddddeeeeefffff0000011111'
|
||||
current_rev = TEST_DATA_VARS['chromium_revision']
|
||||
|
||||
with open(self._new_cr_depsfile_android) as deps_file:
|
||||
new_cr_contents = deps_file.read()
|
||||
with open(self._new_cr_depsfile_android) as deps_file:
|
||||
new_cr_contents = deps_file.read()
|
||||
|
||||
UpdateDepsFile(self._webrtc_depsfile,
|
||||
ChromiumRevisionUpdate(current_rev, new_rev), [],
|
||||
new_cr_contents)
|
||||
with open(self._webrtc_depsfile) as deps_file:
|
||||
deps_contents = deps_file.read()
|
||||
self.assertTrue(
|
||||
new_rev in deps_contents,
|
||||
'Failed to find %s in\n%s' % (new_rev, deps_contents))
|
||||
UpdateDepsFile(self._webrtc_depsfile,
|
||||
ChromiumRevisionUpdate(current_rev, new_rev), [],
|
||||
new_cr_contents)
|
||||
with open(self._webrtc_depsfile) as deps_file:
|
||||
deps_contents = deps_file.read()
|
||||
self.assertTrue(new_rev in deps_contents,
|
||||
'Failed to find %s in\n%s' % (new_rev, deps_contents))
|
||||
|
||||
def _UpdateDepsSetup(self):
|
||||
with open(self._webrtc_depsfile_android) as deps_file:
|
||||
webrtc_contents = deps_file.read()
|
||||
with open(self._new_cr_depsfile_android) as deps_file:
|
||||
new_cr_contents = deps_file.read()
|
||||
webrtc_deps = ParseDepsDict(webrtc_contents)
|
||||
new_cr_deps = ParseDepsDict(new_cr_contents)
|
||||
def _UpdateDepsSetup(self):
|
||||
with open(self._webrtc_depsfile_android) as deps_file:
|
||||
webrtc_contents = deps_file.read()
|
||||
with open(self._new_cr_depsfile_android) as deps_file:
|
||||
new_cr_contents = deps_file.read()
|
||||
webrtc_deps = ParseDepsDict(webrtc_contents)
|
||||
new_cr_deps = ParseDepsDict(new_cr_contents)
|
||||
|
||||
changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
|
||||
with mock.patch('roll_deps._RunCommand', NullCmd()):
|
||||
UpdateDepsFile(self._webrtc_depsfile_android,
|
||||
NO_CHROMIUM_REVISION_UPDATE, changed_deps,
|
||||
new_cr_contents)
|
||||
changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
|
||||
with mock.patch('roll_deps._RunCommand', NullCmd()):
|
||||
UpdateDepsFile(self._webrtc_depsfile_android, NO_CHROMIUM_REVISION_UPDATE,
|
||||
changed_deps, new_cr_contents)
|
||||
|
||||
with open(self._webrtc_depsfile_android) as deps_file:
|
||||
updated_contents = deps_file.read()
|
||||
with open(self._webrtc_depsfile_android) as deps_file:
|
||||
updated_contents = deps_file.read()
|
||||
|
||||
return webrtc_contents, updated_contents
|
||||
return webrtc_contents, updated_contents
|
||||
|
||||
def testUpdateAndroidGeneratedDeps(self):
|
||||
_, updated_contents = self._UpdateDepsSetup()
|
||||
def testUpdateAndroidGeneratedDeps(self):
|
||||
_, updated_contents = self._UpdateDepsSetup()
|
||||
|
||||
changed = 'third_party/android_deps/libs/android_arch_core_common'
|
||||
changed_version = '1.0.0-cr0'
|
||||
self.assertTrue(changed in updated_contents)
|
||||
self.assertTrue(changed_version in updated_contents)
|
||||
changed = 'third_party/android_deps/libs/android_arch_core_common'
|
||||
changed_version = '1.0.0-cr0'
|
||||
self.assertTrue(changed in updated_contents)
|
||||
self.assertTrue(changed_version in updated_contents)
|
||||
|
||||
def testAddAndroidGeneratedDeps(self):
|
||||
webrtc_contents, updated_contents = self._UpdateDepsSetup()
|
||||
def testAddAndroidGeneratedDeps(self):
|
||||
webrtc_contents, updated_contents = self._UpdateDepsSetup()
|
||||
|
||||
added = 'third_party/android_deps/libs/android_arch_lifecycle_common'
|
||||
self.assertFalse(added in webrtc_contents)
|
||||
self.assertTrue(added in updated_contents)
|
||||
added = 'third_party/android_deps/libs/android_arch_lifecycle_common'
|
||||
self.assertFalse(added in webrtc_contents)
|
||||
self.assertTrue(added in updated_contents)
|
||||
|
||||
def testRemoveAndroidGeneratedDeps(self):
|
||||
webrtc_contents, updated_contents = self._UpdateDepsSetup()
|
||||
def testRemoveAndroidGeneratedDeps(self):
|
||||
webrtc_contents, updated_contents = self._UpdateDepsSetup()
|
||||
|
||||
removed = 'third_party/android_deps/libs/android_arch_lifecycle_runtime'
|
||||
self.assertTrue(removed in webrtc_contents)
|
||||
self.assertFalse(removed in updated_contents)
|
||||
removed = 'third_party/android_deps/libs/android_arch_lifecycle_runtime'
|
||||
self.assertTrue(removed in webrtc_contents)
|
||||
self.assertFalse(removed in updated_contents)
|
||||
|
||||
def testParseDepsDict(self):
|
||||
with open(self._webrtc_depsfile) as deps_file:
|
||||
deps_contents = deps_file.read()
|
||||
local_scope = ParseDepsDict(deps_contents)
|
||||
vars_dict = local_scope['vars']
|
||||
def testParseDepsDict(self):
|
||||
with open(self._webrtc_depsfile) as deps_file:
|
||||
deps_contents = deps_file.read()
|
||||
local_scope = ParseDepsDict(deps_contents)
|
||||
vars_dict = local_scope['vars']
|
||||
|
||||
def AssertVar(variable_name):
|
||||
self.assertEqual(vars_dict[variable_name],
|
||||
TEST_DATA_VARS[variable_name])
|
||||
def AssertVar(variable_name):
|
||||
self.assertEqual(vars_dict[variable_name], TEST_DATA_VARS[variable_name])
|
||||
|
||||
AssertVar('chromium_git')
|
||||
AssertVar('chromium_revision')
|
||||
self.assertEqual(len(local_scope['deps']), 3)
|
||||
self.assertEqual(len(local_scope['deps_os']), 1)
|
||||
AssertVar('chromium_git')
|
||||
AssertVar('chromium_revision')
|
||||
self.assertEqual(len(local_scope['deps']), 3)
|
||||
self.assertEqual(len(local_scope['deps_os']), 1)
|
||||
|
||||
def testGetMatchingDepsEntriesReturnsPathInSimpleCase(self):
|
||||
entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing/gtest')
|
||||
self.assertEqual(len(entries), 1)
|
||||
self.assertEqual(entries[0], DEPS_ENTRIES['src/testing/gtest'])
|
||||
def testGetMatchingDepsEntriesReturnsPathInSimpleCase(self):
|
||||
entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing/gtest')
|
||||
self.assertEqual(len(entries), 1)
|
||||
self.assertEqual(entries[0], DEPS_ENTRIES['src/testing/gtest'])
|
||||
|
||||
def testGetMatchingDepsEntriesHandlesSimilarStartingPaths(self):
|
||||
entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing')
|
||||
self.assertEqual(len(entries), 2)
|
||||
def testGetMatchingDepsEntriesHandlesSimilarStartingPaths(self):
|
||||
entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/testing')
|
||||
self.assertEqual(len(entries), 2)
|
||||
|
||||
def testGetMatchingDepsEntriesHandlesTwoPathsWithIdenticalFirstParts(self):
|
||||
entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/build')
|
||||
self.assertEqual(len(entries), 1)
|
||||
def testGetMatchingDepsEntriesHandlesTwoPathsWithIdenticalFirstParts(self):
|
||||
entries = GetMatchingDepsEntries(DEPS_ENTRIES, 'src/build')
|
||||
self.assertEqual(len(entries), 1)
|
||||
|
||||
def testCalculateChangedDeps(self):
|
||||
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile)
|
||||
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile)
|
||||
with mock.patch('roll_deps._RunCommand', self.fake):
|
||||
_SetupGitLsRemoteCall(
|
||||
self.fake,
|
||||
'https://chromium.googlesource.com/chromium/src/build',
|
||||
BUILD_NEW_REV)
|
||||
changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
|
||||
def testCalculateChangedDeps(self):
|
||||
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile)
|
||||
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile)
|
||||
with mock.patch('roll_deps._RunCommand', self.fake):
|
||||
_SetupGitLsRemoteCall(
|
||||
self.fake, 'https://chromium.googlesource.com/chromium/src/build',
|
||||
BUILD_NEW_REV)
|
||||
changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
|
||||
|
||||
self.assertEqual(len(changed_deps), 3)
|
||||
self.assertEqual(changed_deps[0].path, 'src/build')
|
||||
self.assertEqual(changed_deps[0].current_rev, BUILD_OLD_REV)
|
||||
self.assertEqual(changed_deps[0].new_rev, BUILD_NEW_REV)
|
||||
self.assertEqual(len(changed_deps), 3)
|
||||
self.assertEqual(changed_deps[0].path, 'src/build')
|
||||
self.assertEqual(changed_deps[0].current_rev, BUILD_OLD_REV)
|
||||
self.assertEqual(changed_deps[0].new_rev, BUILD_NEW_REV)
|
||||
|
||||
self.assertEqual(changed_deps[1].path, 'src/buildtools/linux64')
|
||||
self.assertEqual(changed_deps[1].package, 'gn/gn/linux-amd64')
|
||||
self.assertEqual(changed_deps[1].current_version,
|
||||
'git_revision:69ec4fca1fa69ddadae13f9e6b7507efa0675263')
|
||||
self.assertEqual(changed_deps[1].new_version,
|
||||
'git_revision:new-revision')
|
||||
self.assertEqual(changed_deps[1].path, 'src/buildtools/linux64')
|
||||
self.assertEqual(changed_deps[1].package, 'gn/gn/linux-amd64')
|
||||
self.assertEqual(changed_deps[1].current_version,
|
||||
'git_revision:69ec4fca1fa69ddadae13f9e6b7507efa0675263')
|
||||
self.assertEqual(changed_deps[1].new_version, 'git_revision:new-revision')
|
||||
|
||||
self.assertEqual(changed_deps[2].path, 'src/third_party/depot_tools')
|
||||
self.assertEqual(changed_deps[2].current_rev, DEPOTTOOLS_OLD_REV)
|
||||
self.assertEqual(changed_deps[2].new_rev, DEPOTTOOLS_NEW_REV)
|
||||
self.assertEqual(changed_deps[2].path, 'src/third_party/depot_tools')
|
||||
self.assertEqual(changed_deps[2].current_rev, DEPOTTOOLS_OLD_REV)
|
||||
self.assertEqual(changed_deps[2].new_rev, DEPOTTOOLS_NEW_REV)
|
||||
|
||||
def testWithDistinctDeps(self):
|
||||
"""Check CalculateChangedDeps works when deps are added/removed."""
|
||||
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android)
|
||||
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
|
||||
changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
|
||||
self.assertEqual(len(changed_deps), 1)
|
||||
self.assertEqual(
|
||||
changed_deps[0].path,
|
||||
'src/third_party/android_deps/libs/android_arch_core_common')
|
||||
self.assertEqual(
|
||||
changed_deps[0].package,
|
||||
'chromium/third_party/android_deps/libs/android_arch_core_common')
|
||||
self.assertEqual(changed_deps[0].current_version, 'version:0.9.0')
|
||||
self.assertEqual(changed_deps[0].new_version, 'version:1.0.0-cr0')
|
||||
def testWithDistinctDeps(self):
|
||||
"""Check CalculateChangedDeps works when deps are added/removed."""
|
||||
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android)
|
||||
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
|
||||
changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
|
||||
self.assertEqual(len(changed_deps), 1)
|
||||
self.assertEqual(
|
||||
changed_deps[0].path,
|
||||
'src/third_party/android_deps/libs/android_arch_core_common')
|
||||
self.assertEqual(
|
||||
changed_deps[0].package,
|
||||
'chromium/third_party/android_deps/libs/android_arch_core_common')
|
||||
self.assertEqual(changed_deps[0].current_version, 'version:0.9.0')
|
||||
self.assertEqual(changed_deps[0].new_version, 'version:1.0.0-cr0')
|
||||
|
||||
def testFindAddedDeps(self):
|
||||
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android)
|
||||
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
|
||||
added_android_paths, other_paths = FindAddedDeps(
|
||||
webrtc_deps, new_cr_deps)
|
||||
self.assertEqual(added_android_paths, [
|
||||
'src/third_party/android_deps/libs/android_arch_lifecycle_common'
|
||||
])
|
||||
self.assertEqual(other_paths, [])
|
||||
def testFindAddedDeps(self):
|
||||
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android)
|
||||
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
|
||||
added_android_paths, other_paths = FindAddedDeps(webrtc_deps, new_cr_deps)
|
||||
self.assertEqual(
|
||||
added_android_paths,
|
||||
['src/third_party/android_deps/libs/android_arch_lifecycle_common'])
|
||||
self.assertEqual(other_paths, [])
|
||||
|
||||
def testFindRemovedDeps(self):
|
||||
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android)
|
||||
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
|
||||
removed_android_paths, other_paths = FindRemovedDeps(
|
||||
webrtc_deps, new_cr_deps)
|
||||
self.assertEqual(removed_android_paths, [
|
||||
'src/third_party/android_deps/libs/android_arch_lifecycle_runtime'
|
||||
])
|
||||
self.assertEqual(other_paths, [])
|
||||
def testFindRemovedDeps(self):
|
||||
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android)
|
||||
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
|
||||
removed_android_paths, other_paths = FindRemovedDeps(
|
||||
webrtc_deps, new_cr_deps)
|
||||
self.assertEqual(
|
||||
removed_android_paths,
|
||||
['src/third_party/android_deps/libs/android_arch_lifecycle_runtime'])
|
||||
self.assertEqual(other_paths, [])
|
||||
|
||||
def testMissingDepsIsDetected(self):
|
||||
"""Check error is reported when deps cannot be automatically removed."""
|
||||
# The situation at test is the following:
|
||||
# * A WebRTC DEPS entry is missing from Chromium.
|
||||
# * The dependency isn't an android_deps (those are supported).
|
||||
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile)
|
||||
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
|
||||
_, other_paths = FindRemovedDeps(webrtc_deps, new_cr_deps)
|
||||
self.assertEqual(
|
||||
other_paths,
|
||||
['src/buildtools/linux64', 'src/third_party/depot_tools'])
|
||||
def testMissingDepsIsDetected(self):
|
||||
"""Check error is reported when deps cannot be automatically removed."""
|
||||
# The situation at test is the following:
|
||||
# * A WebRTC DEPS entry is missing from Chromium.
|
||||
# * The dependency isn't an android_deps (those are supported).
|
||||
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile)
|
||||
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
|
||||
_, other_paths = FindRemovedDeps(webrtc_deps, new_cr_deps)
|
||||
self.assertEqual(other_paths,
|
||||
['src/buildtools/linux64', 'src/third_party/depot_tools'])
|
||||
|
||||
def testExpectedDepsIsNotReportedMissing(self):
|
||||
"""Some deps musn't be seen as missing, even if absent from Chromium."""
|
||||
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile)
|
||||
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
|
||||
removed_android_paths, other_paths = FindRemovedDeps(
|
||||
webrtc_deps, new_cr_deps)
|
||||
self.assertTrue('src/build' not in removed_android_paths)
|
||||
self.assertTrue('src/build' not in other_paths)
|
||||
def testExpectedDepsIsNotReportedMissing(self):
|
||||
"""Some deps musn't be seen as missing, even if absent from Chromium."""
|
||||
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile)
|
||||
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
|
||||
removed_android_paths, other_paths = FindRemovedDeps(
|
||||
webrtc_deps, new_cr_deps)
|
||||
self.assertTrue('src/build' not in removed_android_paths)
|
||||
self.assertTrue('src/build' not in other_paths)
|
||||
|
||||
def _CommitMessageSetup(self):
|
||||
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android)
|
||||
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
|
||||
def _CommitMessageSetup(self):
|
||||
webrtc_deps = ParseLocalDepsFile(self._webrtc_depsfile_android)
|
||||
new_cr_deps = ParseLocalDepsFile(self._new_cr_depsfile_android)
|
||||
|
||||
changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
|
||||
added_paths, _ = FindAddedDeps(webrtc_deps, new_cr_deps)
|
||||
removed_paths, _ = FindRemovedDeps(webrtc_deps, new_cr_deps)
|
||||
changed_deps = CalculateChangedDeps(webrtc_deps, new_cr_deps)
|
||||
added_paths, _ = FindAddedDeps(webrtc_deps, new_cr_deps)
|
||||
removed_paths, _ = FindRemovedDeps(webrtc_deps, new_cr_deps)
|
||||
|
||||
current_commit_pos = 'cafe'
|
||||
new_commit_pos = 'f00d'
|
||||
current_commit_pos = 'cafe'
|
||||
new_commit_pos = 'f00d'
|
||||
|
||||
commit_msg = GenerateCommitMessage(NO_CHROMIUM_REVISION_UPDATE,
|
||||
current_commit_pos,
|
||||
new_commit_pos, changed_deps,
|
||||
added_paths, removed_paths)
|
||||
commit_msg = GenerateCommitMessage(NO_CHROMIUM_REVISION_UPDATE,
|
||||
current_commit_pos, new_commit_pos,
|
||||
changed_deps, added_paths, removed_paths)
|
||||
|
||||
return [l.strip() for l in commit_msg.split('\n')]
|
||||
return [l.strip() for l in commit_msg.split('\n')]
|
||||
|
||||
def testChangedDepsInCommitMessage(self):
|
||||
commit_lines = self._CommitMessageSetup()
|
||||
def testChangedDepsInCommitMessage(self):
|
||||
commit_lines = self._CommitMessageSetup()
|
||||
|
||||
changed = '* src/third_party/android_deps/libs/' \
|
||||
'android_arch_core_common: version:0.9.0..version:1.0.0-cr0'
|
||||
self.assertTrue(changed in commit_lines)
|
||||
# Check it is in adequate section.
|
||||
changed_line = commit_lines.index(changed)
|
||||
self.assertTrue('Changed' in commit_lines[changed_line - 1])
|
||||
changed = '* src/third_party/android_deps/libs/' \
|
||||
'android_arch_core_common: version:0.9.0..version:1.0.0-cr0'
|
||||
self.assertTrue(changed in commit_lines)
|
||||
# Check it is in adequate section.
|
||||
changed_line = commit_lines.index(changed)
|
||||
self.assertTrue('Changed' in commit_lines[changed_line - 1])
|
||||
|
||||
def testAddedDepsInCommitMessage(self):
|
||||
commit_lines = self._CommitMessageSetup()
|
||||
def testAddedDepsInCommitMessage(self):
|
||||
commit_lines = self._CommitMessageSetup()
|
||||
|
||||
added = '* src/third_party/android_deps/libs/' \
|
||||
'android_arch_lifecycle_common'
|
||||
self.assertTrue(added in commit_lines)
|
||||
# Check it is in adequate section.
|
||||
added_line = commit_lines.index(added)
|
||||
self.assertTrue('Added' in commit_lines[added_line - 1])
|
||||
added = '* src/third_party/android_deps/libs/' \
|
||||
'android_arch_lifecycle_common'
|
||||
self.assertTrue(added in commit_lines)
|
||||
# Check it is in adequate section.
|
||||
added_line = commit_lines.index(added)
|
||||
self.assertTrue('Added' in commit_lines[added_line - 1])
|
||||
|
||||
def testRemovedDepsInCommitMessage(self):
|
||||
commit_lines = self._CommitMessageSetup()
|
||||
def testRemovedDepsInCommitMessage(self):
|
||||
commit_lines = self._CommitMessageSetup()
|
||||
|
||||
removed = '* src/third_party/android_deps/libs/' \
|
||||
'android_arch_lifecycle_runtime'
|
||||
self.assertTrue(removed in commit_lines)
|
||||
# Check it is in adequate section.
|
||||
removed_line = commit_lines.index(removed)
|
||||
self.assertTrue('Removed' in commit_lines[removed_line - 1])
|
||||
removed = '* src/third_party/android_deps/libs/' \
|
||||
'android_arch_lifecycle_runtime'
|
||||
self.assertTrue(removed in commit_lines)
|
||||
# Check it is in adequate section.
|
||||
removed_line = commit_lines.index(removed)
|
||||
self.assertTrue('Removed' in commit_lines[removed_line - 1])
|
||||
|
||||
|
||||
class TestChooseCQMode(unittest.TestCase):
|
||||
def testSkip(self):
|
||||
self.assertEqual(ChooseCQMode(True, 99, 500000, 500100), 0)
|
||||
def testSkip(self):
|
||||
self.assertEqual(ChooseCQMode(True, 99, 500000, 500100), 0)
|
||||
|
||||
def testDryRun(self):
|
||||
self.assertEqual(ChooseCQMode(False, 101, 500000, 500100), 1)
|
||||
def testDryRun(self):
|
||||
self.assertEqual(ChooseCQMode(False, 101, 500000, 500100), 1)
|
||||
|
||||
def testSubmit(self):
|
||||
self.assertEqual(ChooseCQMode(False, 100, 500000, 500100), 2)
|
||||
def testSubmit(self):
|
||||
self.assertEqual(ChooseCQMode(False, 100, 500000, 500100), 2)
|
||||
|
||||
|
||||
def _SetupGitLsRemoteCall(cmd_fake, url, revision):
|
||||
cmd = ['git', 'ls-remote', url, revision]
|
||||
cmd_fake.AddExpectation(cmd, _returns=(revision, None))
|
||||
cmd = ['git', 'ls-remote', url, revision]
|
||||
cmd_fake.AddExpectation(cmd, _returns=(revision, None))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
unittest.main()
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -16,19 +18,19 @@ WEBRTC_VERSION_RE = re.compile(
|
|||
|
||||
|
||||
if __name__ == '__main__':
|
||||
args = sys.argv
|
||||
if len(args) != 2:
|
||||
print('Usage: binary_version_test.py <FILE_NAME>')
|
||||
exit(1)
|
||||
filename = sys.argv[1]
|
||||
output = subprocess.check_output(['strings', filename])
|
||||
strings_in_binary = output.decode('utf-8').splitlines()
|
||||
for symbol in strings_in_binary:
|
||||
if WEBRTC_VERSION_RE.match(symbol):
|
||||
with open('webrtc_binary_version_check', 'w') as f:
|
||||
f.write(symbol)
|
||||
exit(0)
|
||||
print('WebRTC source timestamp not found in "%s"' % filename)
|
||||
print('Check why "kSourceTimestamp" from call/version.cc is not linked '
|
||||
'(or why it has been optimized away by the compiler/linker)')
|
||||
exit(1)
|
||||
args = sys.argv
|
||||
if len(args) != 2:
|
||||
print('Usage: binary_version_test.py <FILE_NAME>')
|
||||
sys.exit(1)
|
||||
filename = sys.argv[1]
|
||||
output = subprocess.check_output(['strings', filename])
|
||||
strings_in_binary = output.decode('utf-8').splitlines()
|
||||
for symbol in strings_in_binary:
|
||||
if WEBRTC_VERSION_RE.match(symbol):
|
||||
with open('webrtc_binary_version_check', 'w') as f:
|
||||
f.write(symbol)
|
||||
sys.exit(0)
|
||||
print('WebRTC source timestamp not found in "%s"' % filename)
|
||||
print('Check why "kSourceTimestamp" from call/version.cc is not linked '
|
||||
'(or why it has been optimized away by the compiler/linker)')
|
||||
sys.exit(1)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -20,9 +21,8 @@ import shutil
|
|||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
#pylint: disable=relative-import
|
||||
from presubmit_checks_lib.build_helpers import GetClangTidyPath, \
|
||||
GetCompilationCommand
|
||||
from presubmit_checks_lib.build_helpers import (GetClangTidyPath,
|
||||
GetCompilationCommand)
|
||||
|
||||
# We enable all checkers by default for investigation purpose.
|
||||
# This includes clang-analyzer-* checks.
|
||||
|
@ -32,66 +32,66 @@ CHECKER_OPTION = '-checks=*'
|
|||
|
||||
|
||||
def Process(filepath, args):
|
||||
# Build directory is needed to gather compilation flags.
|
||||
# Create a temporary one (instead of reusing an existing one)
|
||||
# to keep the CLI simple and unencumbered.
|
||||
out_dir = tempfile.mkdtemp('clang_tidy')
|
||||
# Build directory is needed to gather compilation flags.
|
||||
# Create a temporary one (instead of reusing an existing one)
|
||||
# to keep the CLI simple and unencumbered.
|
||||
out_dir = tempfile.mkdtemp('clang_tidy')
|
||||
|
||||
try:
|
||||
gn_args = [] # Use default build.
|
||||
command = GetCompilationCommand(filepath, gn_args, out_dir)
|
||||
try:
|
||||
gn_args = [] # Use default build.
|
||||
command = GetCompilationCommand(filepath, gn_args, out_dir)
|
||||
|
||||
# Remove warning flags. They aren't needed and they cause trouble
|
||||
# when clang-tidy doesn't match most recent clang.
|
||||
# Same battle for -f (e.g. -fcomplete-member-pointers).
|
||||
command = [
|
||||
arg for arg in command
|
||||
if not (arg.startswith('-W') or arg.startswith('-f'))
|
||||
]
|
||||
# Remove warning flags. They aren't needed and they cause trouble
|
||||
# when clang-tidy doesn't match most recent clang.
|
||||
# Same battle for -f (e.g. -fcomplete-member-pointers).
|
||||
command = [
|
||||
arg for arg in command
|
||||
if not (arg.startswith('-W') or arg.startswith('-f'))
|
||||
]
|
||||
|
||||
# Path from build dir.
|
||||
rel_path = os.path.relpath(os.path.abspath(filepath), out_dir)
|
||||
# Path from build dir.
|
||||
rel_path = os.path.relpath(os.path.abspath(filepath), out_dir)
|
||||
|
||||
# Replace clang++ by clang-tidy
|
||||
command[0:1] = [GetClangTidyPath(), CHECKER_OPTION, rel_path
|
||||
] + args + ['--'] # Separator for clang flags.
|
||||
print "Running: %s" % ' '.join(command)
|
||||
# Run from build dir so that relative paths are correct.
|
||||
p = subprocess.Popen(command,
|
||||
cwd=out_dir,
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr)
|
||||
p.communicate()
|
||||
return p.returncode
|
||||
finally:
|
||||
shutil.rmtree(out_dir, ignore_errors=True)
|
||||
# Replace clang++ by clang-tidy
|
||||
command[0:1] = [GetClangTidyPath(), CHECKER_OPTION, rel_path
|
||||
] + args + ['--'] # Separator for clang flags.
|
||||
print("Running: %s" % ' '.join(command))
|
||||
# Run from build dir so that relative paths are correct.
|
||||
p = subprocess.Popen(command,
|
||||
cwd=out_dir,
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr)
|
||||
p.communicate()
|
||||
return p.returncode
|
||||
finally:
|
||||
shutil.rmtree(out_dir, ignore_errors=True)
|
||||
|
||||
|
||||
def ValidateCC(filepath):
|
||||
"""We can only analyze .cc files. Provide explicit message about that."""
|
||||
if filepath.endswith('.cc'):
|
||||
return filepath
|
||||
msg = ('%s not supported.\n'
|
||||
'For now, we can only analyze translation units (.cc files).' %
|
||||
filepath)
|
||||
raise argparse.ArgumentTypeError(msg)
|
||||
"""We can only analyze .cc files. Provide explicit message about that."""
|
||||
if filepath.endswith('.cc'):
|
||||
return filepath
|
||||
msg = ('%s not supported.\n'
|
||||
'For now, we can only analyze translation units (.cc files).' %
|
||||
filepath)
|
||||
raise argparse.ArgumentTypeError(msg)
|
||||
|
||||
|
||||
def Main():
|
||||
description = (
|
||||
"Run clang-tidy on single cc file.\n"
|
||||
"Use flags, defines and include paths as in default debug build.\n"
|
||||
"WARNING, this is a POC version with rough edges.")
|
||||
parser = argparse.ArgumentParser(description=description)
|
||||
parser.add_argument('filepath',
|
||||
help='Specifies the path of the .cc file to analyze.',
|
||||
type=ValidateCC)
|
||||
parser.add_argument('args',
|
||||
nargs=argparse.REMAINDER,
|
||||
help='Arguments passed to clang-tidy')
|
||||
parsed_args = parser.parse_args()
|
||||
return Process(parsed_args.filepath, parsed_args.args)
|
||||
description = (
|
||||
"Run clang-tidy on single cc file.\n"
|
||||
"Use flags, defines and include paths as in default debug build.\n"
|
||||
"WARNING, this is a POC version with rough edges.")
|
||||
parser = argparse.ArgumentParser(description=description)
|
||||
parser.add_argument('filepath',
|
||||
help='Specifies the path of the .cc file to analyze.',
|
||||
type=ValidateCC)
|
||||
parser.add_argument('args',
|
||||
nargs=argparse.REMAINDER,
|
||||
help='Arguments passed to clang-tidy')
|
||||
parsed_args = parser.parse_args()
|
||||
return Process(parsed_args.filepath, parsed_args.args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(Main())
|
||||
sys.exit(Main())
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -27,21 +28,20 @@ TESTS = [
|
|||
|
||||
|
||||
def main():
|
||||
cmd = ([sys.executable, 'tools/code_coverage/coverage.py'] + TESTS +
|
||||
['-b out/coverage', '-o out/report'] +
|
||||
['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''] +
|
||||
['-c \'out/coverage/%s\'' % t for t in TESTS])
|
||||
cmd = ([sys.executable, 'tools/code_coverage/coverage.py'] + TESTS +
|
||||
['-b out/coverage', '-o out/report'] +
|
||||
['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''] +
|
||||
['-c \'out/coverage/%s\'' % t for t in TESTS])
|
||||
|
||||
def WithXvfb(binary):
|
||||
return '-c \'%s testing/xvfb.py %s\'' % (sys.executable, binary)
|
||||
def WithXvfb(binary):
|
||||
return '-c \'%s testing/xvfb.py %s\'' % (sys.executable, binary)
|
||||
|
||||
modules_unittests = 'out/coverage/modules_unittests'
|
||||
cmd[cmd.index('-c \'%s\'' %
|
||||
modules_unittests)] = WithXvfb(modules_unittests)
|
||||
modules_unittests = 'out/coverage/modules_unittests'
|
||||
cmd[cmd.index('-c \'%s\'' % modules_unittests)] = WithXvfb(modules_unittests)
|
||||
|
||||
print ' '.join(cmd)
|
||||
return 0
|
||||
print(' '.join(cmd))
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
sys.exit(main())
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -46,7 +47,6 @@ if os.path.exists(binary_path):
|
|||
========== ENDING OF PATCH ==========
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
DIRECTORY = 'out/coverage'
|
||||
|
@ -77,89 +77,89 @@ XC_TESTS = [
|
|||
|
||||
|
||||
def FormatIossimTest(test_name, is_xctest=False):
|
||||
args = ['%s/%s.app' % (DIRECTORY, test_name)]
|
||||
if is_xctest:
|
||||
args += ['%s/%s_module.xctest' % (DIRECTORY, test_name)]
|
||||
args = ['%s/%s.app' % (DIRECTORY, test_name)]
|
||||
if is_xctest:
|
||||
args += ['%s/%s_module.xctest' % (DIRECTORY, test_name)]
|
||||
|
||||
return '-c \'%s/iossim %s\'' % (DIRECTORY, ' '.join(args))
|
||||
return '-c \'%s/iossim %s\'' % (DIRECTORY, ' '.join(args))
|
||||
|
||||
|
||||
def GetGNArgs(is_simulator):
|
||||
target_cpu = 'x64' if is_simulator else 'arm64'
|
||||
return ([] + ['target_os="ios"'] + ['target_cpu="%s"' % target_cpu] +
|
||||
['use_clang_coverage=true'] + ['is_component_build=false'] +
|
||||
['dcheck_always_on=true'])
|
||||
target_cpu = 'x64' if is_simulator else 'arm64'
|
||||
return ([] + ['target_os="ios"'] + ['target_cpu="%s"' % target_cpu] +
|
||||
['use_clang_coverage=true'] + ['is_component_build=false'] +
|
||||
['dcheck_always_on=true'])
|
||||
|
||||
|
||||
def GenerateIOSSimulatorCommand():
|
||||
gn_args_string = ' '.join(GetGNArgs(is_simulator=True))
|
||||
gn_cmd = ['gn', 'gen', DIRECTORY, '--args=\'%s\'' % gn_args_string]
|
||||
gn_args_string = ' '.join(GetGNArgs(is_simulator=True))
|
||||
gn_cmd = ['gn', 'gen', DIRECTORY, '--args=\'%s\'' % gn_args_string]
|
||||
|
||||
coverage_cmd = ([sys.executable, 'tools/code_coverage/coverage.py'] +
|
||||
["%s.app" % t for t in XC_TESTS + TESTS] +
|
||||
['-b %s' % DIRECTORY, '-o out/report'] +
|
||||
['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''] +
|
||||
[FormatIossimTest(t, is_xctest=True) for t in XC_TESTS] +
|
||||
[FormatIossimTest(t, is_xctest=False) for t in TESTS])
|
||||
coverage_cmd = ([sys.executable, 'tools/code_coverage/coverage.py'] +
|
||||
["%s.app" % t for t in XC_TESTS + TESTS] +
|
||||
['-b %s' % DIRECTORY, '-o out/report'] +
|
||||
['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''] +
|
||||
[FormatIossimTest(t, is_xctest=True) for t in XC_TESTS] +
|
||||
[FormatIossimTest(t, is_xctest=False) for t in TESTS])
|
||||
|
||||
print 'To get code coverage using iOS sim just run following commands:'
|
||||
print ''
|
||||
print ' '.join(gn_cmd)
|
||||
print ''
|
||||
print ' '.join(coverage_cmd)
|
||||
return 0
|
||||
print('To get code coverage using iOS sim just run following commands:')
|
||||
print('')
|
||||
print(' '.join(gn_cmd))
|
||||
print('')
|
||||
print(' '.join(coverage_cmd))
|
||||
return 0
|
||||
|
||||
|
||||
def GenerateIOSDeviceCommand():
|
||||
gn_args_string = ' '.join(GetGNArgs(is_simulator=False))
|
||||
gn_args_string = ' '.join(GetGNArgs(is_simulator=False))
|
||||
|
||||
coverage_report_cmd = (
|
||||
[sys.executable, 'tools/code_coverage/coverage.py'] +
|
||||
['%s.app' % t for t in TESTS] + ['-b %s' % DIRECTORY] +
|
||||
['-o out/report'] + ['-p %s/merged.profdata' % DIRECTORY] +
|
||||
['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''])
|
||||
coverage_report_cmd = ([sys.executable, 'tools/code_coverage/coverage.py'] +
|
||||
['%s.app' % t for t in TESTS] + ['-b %s' % DIRECTORY] +
|
||||
['-o out/report'] +
|
||||
['-p %s/merged.profdata' % DIRECTORY] +
|
||||
['-i=\'.*/out/.*|.*/third_party/.*|.*test.*\''])
|
||||
|
||||
print 'Computing code coverage for real iOS device is a little bit tedious.'
|
||||
print ''
|
||||
print 'You will need:'
|
||||
print ''
|
||||
print '1. Generate xcode project and open it with Xcode 10+:'
|
||||
print ' gn gen %s --ide=xcode --args=\'%s\'' % (DIRECTORY, gn_args_string)
|
||||
print ' open %s/all.xcworkspace' % DIRECTORY
|
||||
print ''
|
||||
print '2. Execute these Run targets manually with Xcode Run button and '
|
||||
print 'manually save generated coverage.profraw file to %s:' % DIRECTORY
|
||||
print '\n'.join('- %s' % t for t in TESTS)
|
||||
print ''
|
||||
print '3. Execute these Test targets manually with Xcode Test button and '
|
||||
print 'manually save generated coverage.profraw file to %s:' % DIRECTORY
|
||||
print '\n'.join('- %s' % t for t in XC_TESTS)
|
||||
print ''
|
||||
print '4. Merge *.profraw files to *.profdata using llvm-profdata tool:'
|
||||
print(' build/mac_files/Xcode.app/Contents/Developer/Toolchains/' +
|
||||
'XcodeDefault.xctoolchain/usr/bin/llvm-profdata merge ' +
|
||||
'-o %s/merged.profdata ' % DIRECTORY +
|
||||
'-sparse=true %s/*.profraw' % DIRECTORY)
|
||||
print ''
|
||||
print '5. Generate coverage report:'
|
||||
print ' ' + ' '.join(coverage_report_cmd)
|
||||
return 0
|
||||
print('Computing code coverage for real iOS device is a little bit tedious.')
|
||||
print('')
|
||||
print('You will need:')
|
||||
print('')
|
||||
print('1. Generate xcode project and open it with Xcode 10+:')
|
||||
print(' gn gen %s --ide=xcode --args=\'%s\'' % (DIRECTORY, gn_args_string))
|
||||
print(' open %s/all.xcworkspace' % DIRECTORY)
|
||||
print('')
|
||||
print('2. Execute these Run targets manually with Xcode Run button and ')
|
||||
print('manually save generated coverage.profraw file to %s:' % DIRECTORY)
|
||||
print('\n'.join('- %s' % t for t in TESTS))
|
||||
print('')
|
||||
print('3. Execute these Test targets manually with Xcode Test button and ')
|
||||
print('manually save generated coverage.profraw file to %s:' % DIRECTORY)
|
||||
print('\n'.join('- %s' % t for t in XC_TESTS))
|
||||
print('')
|
||||
print('4. Merge *.profraw files to *.profdata using llvm-profdata tool:')
|
||||
print((' build/mac_files/Xcode.app/Contents/Developer/Toolchains/' +
|
||||
'XcodeDefault.xctoolchain/usr/bin/llvm-profdata merge ' +
|
||||
'-o %s/merged.profdata ' % DIRECTORY +
|
||||
'-sparse=true %s/*.profraw' % DIRECTORY))
|
||||
print('')
|
||||
print('5. Generate coverage report:')
|
||||
print(' ' + ' '.join(coverage_report_cmd))
|
||||
return 0
|
||||
|
||||
|
||||
def Main():
|
||||
if len(sys.argv) < 2:
|
||||
print 'Please specify type of coverage:'
|
||||
print ' %s simulator' % sys.argv[0]
|
||||
print ' %s device' % sys.argv[0]
|
||||
elif sys.argv[1] == 'simulator':
|
||||
GenerateIOSSimulatorCommand()
|
||||
elif sys.argv[1] == 'device':
|
||||
GenerateIOSDeviceCommand()
|
||||
else:
|
||||
print 'Unsupported type of coverage'
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print('Please specify type of coverage:')
|
||||
print(' %s simulator' % sys.argv[0])
|
||||
print(' %s device' % sys.argv[0])
|
||||
elif sys.argv[1] == 'simulator':
|
||||
GenerateIOSSimulatorCommand()
|
||||
elif sys.argv[1] == 'device':
|
||||
GenerateIOSDeviceCommand()
|
||||
else:
|
||||
print('Unsupported type of coverage')
|
||||
|
||||
return 0
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(Main())
|
||||
sys.exit(main())
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
#
|
||||
# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
|
@ -8,76 +8,76 @@
|
|||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
import psutil
|
||||
import sys
|
||||
|
||||
import psutil
|
||||
import numpy
|
||||
from matplotlib import pyplot
|
||||
|
||||
|
||||
class CpuSnapshot(object):
|
||||
def __init__(self, label):
|
||||
self.label = label
|
||||
self.samples = []
|
||||
class CpuSnapshot:
|
||||
def __init__(self, label):
|
||||
self.label = label
|
||||
self.samples = []
|
||||
|
||||
def Capture(self, sample_count):
|
||||
print('Capturing %d CPU samples for %s...' %
|
||||
((sample_count - len(self.samples)), self.label))
|
||||
while len(self.samples) < sample_count:
|
||||
self.samples.append(psutil.cpu_percent(1.0, False))
|
||||
def Capture(self, sample_count):
|
||||
print(('Capturing %d CPU samples for %s...' %
|
||||
((sample_count - len(self.samples)), self.label)))
|
||||
while len(self.samples) < sample_count:
|
||||
self.samples.append(psutil.cpu_percent(1.0, False))
|
||||
|
||||
def Text(self):
|
||||
return ('%s: avg=%s, median=%s, min=%s, max=%s' %
|
||||
(self.label, numpy.average(self.samples),
|
||||
numpy.median(self.samples), numpy.min(
|
||||
self.samples), numpy.max(self.samples)))
|
||||
def Text(self):
|
||||
return (
|
||||
'%s: avg=%s, median=%s, min=%s, max=%s' %
|
||||
(self.label, numpy.average(self.samples), numpy.median(
|
||||
self.samples), numpy.min(self.samples), numpy.max(self.samples)))
|
||||
|
||||
def Max(self):
|
||||
return numpy.max(self.samples)
|
||||
def Max(self):
|
||||
return numpy.max(self.samples)
|
||||
|
||||
|
||||
def GrabCpuSamples(sample_count):
|
||||
print 'Label for snapshot (enter to quit): '
|
||||
label = raw_input().strip()
|
||||
if len(label) == 0:
|
||||
return None
|
||||
print('Label for snapshot (enter to quit): ')
|
||||
label = eval(input().strip())
|
||||
if len(label) == 0:
|
||||
return None
|
||||
|
||||
snapshot = CpuSnapshot(label)
|
||||
snapshot.Capture(sample_count)
|
||||
snapshot = CpuSnapshot(label)
|
||||
snapshot.Capture(sample_count)
|
||||
|
||||
return snapshot
|
||||
return snapshot
|
||||
|
||||
|
||||
def main():
|
||||
print 'How many seconds to capture per snapshot (enter for 60)?'
|
||||
sample_count = raw_input().strip()
|
||||
if len(sample_count) > 0 and int(sample_count) > 0:
|
||||
sample_count = int(sample_count)
|
||||
else:
|
||||
print 'Defaulting to 60 samples.'
|
||||
sample_count = 60
|
||||
print('How many seconds to capture per snapshot (enter for 60)?')
|
||||
sample_count = eval(input().strip())
|
||||
if len(sample_count) > 0 and int(sample_count) > 0:
|
||||
sample_count = int(sample_count)
|
||||
else:
|
||||
print('Defaulting to 60 samples.')
|
||||
sample_count = 60
|
||||
|
||||
snapshots = []
|
||||
while True:
|
||||
snapshot = GrabCpuSamples(sample_count)
|
||||
if snapshot is None:
|
||||
break
|
||||
snapshots.append(snapshot)
|
||||
snapshots = []
|
||||
while True:
|
||||
snapshot = GrabCpuSamples(sample_count)
|
||||
if snapshot is None:
|
||||
break
|
||||
snapshots.append(snapshot)
|
||||
|
||||
if len(snapshots) == 0:
|
||||
print 'no samples captured'
|
||||
return -1
|
||||
if len(snapshots) == 0:
|
||||
print('no samples captured')
|
||||
return -1
|
||||
|
||||
pyplot.title('CPU usage')
|
||||
pyplot.title('CPU usage')
|
||||
|
||||
for s in snapshots:
|
||||
pyplot.plot(s.samples, label=s.Text(), linewidth=2)
|
||||
for s in snapshots:
|
||||
pyplot.plot(s.samples, label=s.Text(), linewidth=2)
|
||||
|
||||
pyplot.legend()
|
||||
pyplot.legend()
|
||||
|
||||
pyplot.show()
|
||||
return 0
|
||||
pyplot.show()
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
sys.exit(main())
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -27,34 +28,34 @@ import subprocess2
|
|||
|
||||
|
||||
def main(directories):
|
||||
if not directories:
|
||||
directories = [SCRIPT_DIR]
|
||||
if not directories:
|
||||
directories = [SCRIPT_DIR]
|
||||
|
||||
for path in directories:
|
||||
cmd = [
|
||||
sys.executable,
|
||||
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH,
|
||||
'download_from_google_storage.py'),
|
||||
'--directory',
|
||||
'--num_threads=10',
|
||||
'--bucket',
|
||||
'chrome-webrtc-resources',
|
||||
'--auto_platform',
|
||||
'--recursive',
|
||||
path,
|
||||
]
|
||||
print 'Downloading precompiled tools...'
|
||||
for path in directories:
|
||||
cmd = [
|
||||
sys.executable,
|
||||
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH,
|
||||
'download_from_google_storage.py'),
|
||||
'--directory',
|
||||
'--num_threads=10',
|
||||
'--bucket',
|
||||
'chrome-webrtc-resources',
|
||||
'--auto_platform',
|
||||
'--recursive',
|
||||
path,
|
||||
]
|
||||
print('Downloading precompiled tools...')
|
||||
|
||||
# Perform download similar to how gclient hooks execute.
|
||||
try:
|
||||
gclient_utils.CheckCallAndFilter(cmd,
|
||||
cwd=SRC_DIR,
|
||||
always_show_header=True)
|
||||
except (gclient_utils.Error, subprocess2.CalledProcessError) as e:
|
||||
print 'Error: %s' % str(e)
|
||||
return 2
|
||||
return 0
|
||||
# Perform download similar to how gclient hooks execute.
|
||||
try:
|
||||
gclient_utils.CheckCallAndFilter(cmd,
|
||||
cwd=SRC_DIR,
|
||||
always_show_header=True)
|
||||
except (gclient_utils.Error, subprocess2.CalledProcessError) as e:
|
||||
print('Error: %s' % str(e))
|
||||
return 2
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -26,10 +27,10 @@ If any command line arguments are passed to the script, it is executed as a
|
|||
command in a subprocess.
|
||||
"""
|
||||
|
||||
# psutil is not installed on non-Linux machines by default.
|
||||
import psutil # pylint: disable=F0401
|
||||
import subprocess
|
||||
import sys
|
||||
# psutil is not installed on non-Linux machines by default.
|
||||
import psutil # pylint: disable=F0401
|
||||
|
||||
WEBCAM_WIN = ('schtasks', '/run', '/tn', 'ManyCam')
|
||||
WEBCAM_MAC = ('open', '/Applications/ManyCam/ManyCam.app')
|
||||
|
@ -81,7 +82,7 @@ def StartWebCam():
|
|||
def _ForcePythonInterpreter(cmd):
|
||||
"""Returns the fixed command line to call the right python executable."""
|
||||
out = cmd[:]
|
||||
if out[0] == 'python':
|
||||
if out[0] == 'vpython3':
|
||||
out[0] = sys.executable
|
||||
elif out[0].endswith('.py'):
|
||||
out.insert(0, sys.executable)
|
||||
|
@ -95,8 +96,7 @@ def Main(argv):
|
|||
|
||||
if argv:
|
||||
return subprocess.call(_ForcePythonInterpreter(argv))
|
||||
else:
|
||||
return 0
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env/python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
|
@ -24,7 +24,7 @@ following executable in your out folder:
|
|||
You will be able to compile the same executable targeting your host machine
|
||||
by running:
|
||||
|
||||
$ python tools_webrtc/executable_host_build.py --executable_name random_exec
|
||||
$ vpython3 tools_webrtc/executable_host_build.py --executable_name random_exec
|
||||
|
||||
The generated executable will have the same name as the input executable with
|
||||
suffix '_host'.
|
||||
|
@ -62,40 +62,39 @@ import find_depot_tools
|
|||
|
||||
|
||||
def _ParseArgs():
|
||||
desc = 'Generates a GN executable targeting the host machine.'
|
||||
parser = argparse.ArgumentParser(description=desc)
|
||||
parser.add_argument('--executable_name',
|
||||
required=True,
|
||||
help='Name of the executable to build')
|
||||
args = parser.parse_args()
|
||||
return args
|
||||
desc = 'Generates a GN executable targeting the host machine.'
|
||||
parser = argparse.ArgumentParser(description=desc)
|
||||
parser.add_argument('--executable_name',
|
||||
required=True,
|
||||
help='Name of the executable to build')
|
||||
args = parser.parse_args()
|
||||
return args
|
||||
|
||||
|
||||
@contextmanager
|
||||
def HostBuildDir():
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
yield temp_dir
|
||||
finally:
|
||||
shutil.rmtree(temp_dir)
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
yield temp_dir
|
||||
finally:
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
|
||||
def _RunCommand(argv, cwd=SRC_DIR, **kwargs):
|
||||
with open(os.devnull, 'w') as devnull:
|
||||
subprocess.check_call(argv, cwd=cwd, stdout=devnull, **kwargs)
|
||||
with open(os.devnull, 'w') as devnull:
|
||||
subprocess.check_call(argv, cwd=cwd, stdout=devnull, **kwargs)
|
||||
|
||||
|
||||
def DepotToolPath(*args):
|
||||
return os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, *args)
|
||||
return os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, *args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
ARGS = _ParseArgs()
|
||||
EXECUTABLE_TO_BUILD = ARGS.executable_name
|
||||
EXECUTABLE_FINAL_NAME = ARGS.executable_name + '_host'
|
||||
with HostBuildDir() as build_dir:
|
||||
_RunCommand([sys.executable, DepotToolPath('gn.py'), 'gen', build_dir])
|
||||
_RunCommand(
|
||||
[DepotToolPath('ninja'), '-C', build_dir, EXECUTABLE_TO_BUILD])
|
||||
shutil.copy(os.path.join(build_dir, EXECUTABLE_TO_BUILD),
|
||||
EXECUTABLE_FINAL_NAME)
|
||||
ARGS = _ParseArgs()
|
||||
EXECUTABLE_TO_BUILD = ARGS.executable_name
|
||||
EXECUTABLE_FINAL_NAME = ARGS.executable_name + '_host'
|
||||
with HostBuildDir() as build_dir:
|
||||
_RunCommand([sys.executable, DepotToolPath('gn.py'), 'gen', build_dir])
|
||||
_RunCommand([DepotToolPath('ninja'), '-C', build_dir, EXECUTABLE_TO_BUILD])
|
||||
shutil.copy(os.path.join(build_dir, EXECUTABLE_TO_BUILD),
|
||||
EXECUTABLE_FINAL_NAME)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
|
@ -36,10 +36,11 @@ def main():
|
|||
def _ForcePythonInterpreter(cmd):
|
||||
"""Returns the fixed command line to call the right python executable."""
|
||||
out = cmd[:]
|
||||
if out[0] == 'python':
|
||||
out[0] = sys.executable
|
||||
elif out[0].endswith('.py'):
|
||||
out.insert(0, sys.executable)
|
||||
if len(out) > 0:
|
||||
if out[0] == 'python':
|
||||
out[0] = sys.executable
|
||||
elif out[0].endswith('.py'):
|
||||
out.insert(0, sys.executable)
|
||||
return out
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -11,9 +12,6 @@ This file emits the list of reasons why a particular build needs to be clobbered
|
|||
(or a list of 'landmines').
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
@ -26,46 +24,45 @@ host_os = landmine_utils.host_os # pylint: disable=invalid-name
|
|||
|
||||
|
||||
def print_landmines(): # pylint: disable=invalid-name
|
||||
"""
|
||||
"""
|
||||
ALL LANDMINES ARE EMITTED FROM HERE.
|
||||
"""
|
||||
# DO NOT add landmines as part of a regular CL. Landmines are a last-effort
|
||||
# bandaid fix if a CL that got landed has a build dependency bug and all
|
||||
# bots need to be cleaned up. If you're writing a new CL that causes build
|
||||
# dependency problems, fix the dependency problems instead of adding a
|
||||
# landmine.
|
||||
# See the Chromium version in src/build/get_landmines.py for usage examples.
|
||||
print('Clobber to remove out/{Debug,Release}/args.gn (webrtc:5070)')
|
||||
if host_os() == 'win':
|
||||
print('Clobber to resolve some issues with corrupt .pdb files on bots.')
|
||||
print('Clobber due to corrupt .pdb files (after #14623)')
|
||||
print(
|
||||
'Clobber due to Win 64-bit Debug linking error (crbug.com/668961)')
|
||||
print('Clobber due to Win Clang Debug linking errors in '
|
||||
'https://codereview.webrtc.org/2786603002')
|
||||
print('Clobber due to Win Debug linking errors in '
|
||||
'https://codereview.webrtc.org/2832063003/')
|
||||
print('Clobber win x86 bots (issues with isolated files).')
|
||||
if host_os() == 'mac':
|
||||
print('Clobber due to iOS compile errors (crbug.com/694721)')
|
||||
print('Clobber to unblock https://codereview.webrtc.org/2709573003')
|
||||
print('Clobber to fix https://codereview.webrtc.org/2709573003 after '
|
||||
'landing')
|
||||
print('Clobber to fix https://codereview.webrtc.org/2767383005 before'
|
||||
'landing (changing rtc_executable -> rtc_test on iOS)')
|
||||
print('Clobber to fix https://codereview.webrtc.org/2767383005 before'
|
||||
'landing (changing rtc_executable -> rtc_test on iOS)')
|
||||
print('Another landmine for low_bandwidth_audio_test (webrtc:7430)')
|
||||
print('Clobber to change neteq_rtpplay type to executable')
|
||||
print('Clobber to remove .xctest files.')
|
||||
print('Clobber to remove .xctest files (take 2).')
|
||||
print('Switching rtc_executable to rtc_test')
|
||||
# DO NOT add landmines as part of a regular CL. Landmines are a last-effort
|
||||
# bandaid fix if a CL that got landed has a build dependency bug and all
|
||||
# bots need to be cleaned up. If you're writing a new CL that causes build
|
||||
# dependency problems, fix the dependency problems instead of adding a
|
||||
# landmine.
|
||||
# See the Chromium version in src/build/get_landmines.py for usage examples.
|
||||
print('Clobber to remove out/{Debug,Release}/args.gn (webrtc:5070)')
|
||||
if host_os() == 'win':
|
||||
print('Clobber to resolve some issues with corrupt .pdb files on bots.')
|
||||
print('Clobber due to corrupt .pdb files (after #14623)')
|
||||
print('Clobber due to Win 64-bit Debug linking error (crbug.com/668961)')
|
||||
print('Clobber due to Win Clang Debug linking errors in '
|
||||
'https://codereview.webrtc.org/2786603002')
|
||||
print('Clobber due to Win Debug linking errors in '
|
||||
'https://codereview.webrtc.org/2832063003/')
|
||||
print('Clobber win x86 bots (issues with isolated files).')
|
||||
if host_os() == 'mac':
|
||||
print('Clobber due to iOS compile errors (crbug.com/694721)')
|
||||
print('Clobber to unblock https://codereview.webrtc.org/2709573003')
|
||||
print('Clobber to fix https://codereview.webrtc.org/2709573003 after '
|
||||
'landing')
|
||||
print('Clobber to fix https://codereview.webrtc.org/2767383005 before'
|
||||
'landing (changing rtc_executable -> rtc_test on iOS)')
|
||||
print('Clobber to fix https://codereview.webrtc.org/2767383005 before'
|
||||
'landing (changing rtc_executable -> rtc_test on iOS)')
|
||||
print('Another landmine for low_bandwidth_audio_test (webrtc:7430)')
|
||||
print('Clobber to change neteq_rtpplay type to executable')
|
||||
print('Clobber to remove .xctest files.')
|
||||
print('Clobber to remove .xctest files (take 2).')
|
||||
print('Switching rtc_executable to rtc_test')
|
||||
|
||||
|
||||
def main():
|
||||
print_landmines()
|
||||
return 0
|
||||
print_landmines()
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
sys.exit(main())
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
|
@ -14,9 +14,9 @@ It will run `mb gen` in a temporary directory and it is really useful to
|
|||
check for different configurations.
|
||||
|
||||
Usage:
|
||||
$ python tools_webrtc/gn_check_autofix.py -m some_mater -b some_bot
|
||||
$ vpython3 tools_webrtc/gn_check_autofix.py -m some_mater -b some_bot
|
||||
or
|
||||
$ python tools_webrtc/gn_check_autofix.py -c some_mb_config
|
||||
$ vpython3 tools_webrtc/gn_check_autofix.py -c some_mb_config
|
||||
"""
|
||||
|
||||
import os
|
||||
|
@ -38,70 +38,69 @@ TARGET_RE = re.compile(
|
|||
r'(?P<indentation_level>\s*)\w*\("(?P<target_name>\w*)"\) {$')
|
||||
|
||||
|
||||
class TemporaryDirectory(object):
|
||||
def __init__(self):
|
||||
self._closed = False
|
||||
self._name = None
|
||||
self._name = tempfile.mkdtemp()
|
||||
class TemporaryDirectory:
|
||||
def __init__(self):
|
||||
self._closed = False
|
||||
self._name = None
|
||||
self._name = tempfile.mkdtemp()
|
||||
|
||||
def __enter__(self):
|
||||
return self._name
|
||||
def __enter__(self):
|
||||
return self._name
|
||||
|
||||
def __exit__(self, exc, value, _tb):
|
||||
if self._name and not self._closed:
|
||||
shutil.rmtree(self._name)
|
||||
self._closed = True
|
||||
def __exit__(self, exc, value, _tb):
|
||||
if self._name and not self._closed:
|
||||
shutil.rmtree(self._name)
|
||||
self._closed = True
|
||||
|
||||
|
||||
def Run(cmd):
|
||||
print 'Running:', ' '.join(cmd)
|
||||
sub = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
return sub.communicate()
|
||||
print('Running:', ' '.join(cmd))
|
||||
sub = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
return sub.communicate()
|
||||
|
||||
|
||||
def FixErrors(filename, missing_deps, deleted_sources):
|
||||
with open(filename) as f:
|
||||
lines = f.readlines()
|
||||
with open(filename) as f:
|
||||
lines = f.readlines()
|
||||
|
||||
fixed_file = ''
|
||||
indentation_level = None
|
||||
for line in lines:
|
||||
match = TARGET_RE.match(line)
|
||||
if match:
|
||||
target = match.group('target_name')
|
||||
if target in missing_deps:
|
||||
indentation_level = match.group('indentation_level')
|
||||
elif indentation_level is not None:
|
||||
match = re.match(indentation_level + '}$', line)
|
||||
if match:
|
||||
line = ('deps = [\n' + ''.join(' "' + dep + '",\n'
|
||||
for dep in missing_deps[target])
|
||||
+ ']\n') + line
|
||||
indentation_level = None
|
||||
elif line.strip().startswith('deps'):
|
||||
is_empty_deps = line.strip() == 'deps = []'
|
||||
line = 'deps = [\n' if is_empty_deps else line
|
||||
line += ''.join(' "' + dep + '",\n'
|
||||
for dep in missing_deps[target])
|
||||
line += ']\n' if is_empty_deps else ''
|
||||
indentation_level = None
|
||||
fixed_file = ''
|
||||
indentation_level = None
|
||||
for line in lines:
|
||||
match = TARGET_RE.match(line)
|
||||
if match:
|
||||
target = match.group('target_name')
|
||||
if target in missing_deps:
|
||||
indentation_level = match.group('indentation_level')
|
||||
elif indentation_level is not None:
|
||||
match = re.match(indentation_level + '}$', line)
|
||||
if match:
|
||||
line = ('deps = [\n' + ''.join(' "' + dep + '",\n'
|
||||
for dep in missing_deps[target]) +
|
||||
']\n') + line
|
||||
indentation_level = None
|
||||
elif line.strip().startswith('deps'):
|
||||
is_empty_deps = line.strip() == 'deps = []'
|
||||
line = 'deps = [\n' if is_empty_deps else line
|
||||
line += ''.join(' "' + dep + '",\n' for dep in missing_deps[target])
|
||||
line += ']\n' if is_empty_deps else ''
|
||||
indentation_level = None
|
||||
|
||||
if line.strip() not in deleted_sources:
|
||||
fixed_file += line
|
||||
if line.strip() not in deleted_sources:
|
||||
fixed_file += line
|
||||
|
||||
with open(filename, 'w') as f:
|
||||
f.write(fixed_file)
|
||||
with open(filename, 'w') as f:
|
||||
f.write(fixed_file)
|
||||
|
||||
Run(['gn', 'format', filename])
|
||||
Run(['gn', 'format', filename])
|
||||
|
||||
|
||||
def FirstNonEmpty(iterable):
|
||||
"""Return first item which evaluates to True, or fallback to None."""
|
||||
return next((x for x in iterable if x), None)
|
||||
"""Return first item which evaluates to True, or fallback to None."""
|
||||
return next((x for x in iterable if x), None)
|
||||
|
||||
|
||||
def Rebase(base_path, dependency_path, dependency):
|
||||
"""Adapt paths so they work both in stand-alone WebRTC and Chromium tree.
|
||||
"""Adapt paths so they work both in stand-alone WebRTC and Chromium tree.
|
||||
|
||||
To cope with varying top-level directory (WebRTC VS Chromium), we use:
|
||||
* relative paths for WebRTC modules.
|
||||
|
@ -118,82 +117,81 @@ def Rebase(base_path, dependency_path, dependency):
|
|||
Full target path (E.g. '../rtc_base/time:timestamp_extrapolator').
|
||||
"""
|
||||
|
||||
root = FirstNonEmpty(dependency_path.split('/'))
|
||||
if root in CHROMIUM_DIRS:
|
||||
# Chromium paths must remain absolute. E.g. //third_party//abseil-cpp...
|
||||
rebased = dependency_path
|
||||
else:
|
||||
base_path = base_path.split(os.path.sep)
|
||||
dependency_path = dependency_path.split(os.path.sep)
|
||||
root = FirstNonEmpty(dependency_path.split('/'))
|
||||
if root in CHROMIUM_DIRS:
|
||||
# Chromium paths must remain absolute. E.g. //third_party//abseil-cpp...
|
||||
rebased = dependency_path
|
||||
else:
|
||||
base_path = base_path.split(os.path.sep)
|
||||
dependency_path = dependency_path.split(os.path.sep)
|
||||
|
||||
first_difference = None
|
||||
shortest_length = min(len(dependency_path), len(base_path))
|
||||
for i in range(shortest_length):
|
||||
if dependency_path[i] != base_path[i]:
|
||||
first_difference = i
|
||||
break
|
||||
first_difference = None
|
||||
shortest_length = min(len(dependency_path), len(base_path))
|
||||
for i in range(shortest_length):
|
||||
if dependency_path[i] != base_path[i]:
|
||||
first_difference = i
|
||||
break
|
||||
|
||||
first_difference = first_difference or shortest_length
|
||||
base_path = base_path[first_difference:]
|
||||
dependency_path = dependency_path[first_difference:]
|
||||
rebased = os.path.sep.join((['..'] * len(base_path)) + dependency_path)
|
||||
return rebased + ':' + dependency
|
||||
first_difference = first_difference or shortest_length
|
||||
base_path = base_path[first_difference:]
|
||||
dependency_path = dependency_path[first_difference:]
|
||||
rebased = os.path.sep.join((['..'] * len(base_path)) + dependency_path)
|
||||
return rebased + ':' + dependency
|
||||
|
||||
|
||||
def main():
|
||||
deleted_sources = set()
|
||||
errors_by_file = defaultdict(lambda: defaultdict(set))
|
||||
deleted_sources = set()
|
||||
errors_by_file = defaultdict(lambda: defaultdict(set))
|
||||
|
||||
with TemporaryDirectory() as tmp_dir:
|
||||
mb_script_path = os.path.join(SCRIPT_DIR, 'mb', 'mb.py')
|
||||
mb_config_file_path = os.path.join(SCRIPT_DIR, 'mb', 'mb_config.pyl')
|
||||
mb_gen_command = ([
|
||||
mb_script_path,
|
||||
'gen',
|
||||
tmp_dir,
|
||||
'--config-file',
|
||||
mb_config_file_path,
|
||||
] + sys.argv[1:])
|
||||
with TemporaryDirectory() as tmp_dir:
|
||||
mb_script_path = os.path.join(SCRIPT_DIR, 'mb', 'mb.py')
|
||||
mb_config_file_path = os.path.join(SCRIPT_DIR, 'mb', 'mb_config.pyl')
|
||||
mb_gen_command = ([
|
||||
mb_script_path,
|
||||
'gen',
|
||||
tmp_dir,
|
||||
'--config-file',
|
||||
mb_config_file_path,
|
||||
] + sys.argv[1:])
|
||||
|
||||
mb_output = Run(mb_gen_command)
|
||||
errors = mb_output[0].split('ERROR')[1:]
|
||||
mb_output = Run(mb_gen_command)
|
||||
errors = mb_output[0].decode('utf-8').split('ERROR')[1:]
|
||||
|
||||
if mb_output[1]:
|
||||
print mb_output[1]
|
||||
return 1
|
||||
if mb_output[1]:
|
||||
print(mb_output[1])
|
||||
return 1
|
||||
|
||||
for error in errors:
|
||||
error = error.splitlines()
|
||||
target_msg = 'The target:'
|
||||
if target_msg not in error:
|
||||
target_msg = 'It is not in any dependency of'
|
||||
if target_msg not in error:
|
||||
print '\n'.join(error)
|
||||
continue
|
||||
index = error.index(target_msg) + 1
|
||||
path, target = error[index].strip().split(':')
|
||||
if error[index + 1] in ('is including a file from the target:',
|
||||
'The include file is in the target(s):'):
|
||||
dep = error[index + 2].strip()
|
||||
dep_path, dep = dep.split(':')
|
||||
dep = Rebase(path, dep_path, dep)
|
||||
# Replacing /target:target with /target
|
||||
dep = re.sub(r'/(\w+):(\1)$', r'/\1', dep)
|
||||
path = os.path.join(path[2:], 'BUILD.gn')
|
||||
errors_by_file[path][target].add(dep)
|
||||
elif error[index + 1] == 'has a source file:':
|
||||
deleted_file = '"' + os.path.basename(
|
||||
error[index + 2].strip()) + '",'
|
||||
deleted_sources.add(deleted_file)
|
||||
else:
|
||||
print '\n'.join(error)
|
||||
continue
|
||||
for error in errors:
|
||||
error = error.splitlines()
|
||||
target_msg = 'The target:'
|
||||
if target_msg not in error:
|
||||
target_msg = 'It is not in any dependency of'
|
||||
if target_msg not in error:
|
||||
print('\n'.join(error))
|
||||
continue
|
||||
index = error.index(target_msg) + 1
|
||||
path, target = error[index].strip().split(':')
|
||||
if error[index + 1] in ('is including a file from the target:',
|
||||
'The include file is in the target(s):'):
|
||||
dep = error[index + 2].strip()
|
||||
dep_path, dep = dep.split(':')
|
||||
dep = Rebase(path, dep_path, dep)
|
||||
# Replacing /target:target with /target
|
||||
dep = re.sub(r'/(\w+):(\1)$', r'/\1', dep)
|
||||
path = os.path.join(path[2:], 'BUILD.gn')
|
||||
errors_by_file[path][target].add(dep)
|
||||
elif error[index + 1] == 'has a source file:':
|
||||
deleted_file = '"' + os.path.basename(error[index + 2].strip()) + '",'
|
||||
deleted_sources.add(deleted_file)
|
||||
else:
|
||||
print('\n'.join(error))
|
||||
continue
|
||||
|
||||
for path, missing_deps in errors_by_file.items():
|
||||
FixErrors(path, missing_deps, deleted_sources)
|
||||
for path, missing_deps in list(errors_by_file.items()):
|
||||
FixErrors(path, missing_deps, deleted_sources)
|
||||
|
||||
return 0
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
sys.exit(main())
|
||||
|
|
|
@ -53,7 +53,7 @@ For example:
|
|||
|
||||
Will be converted into:
|
||||
|
||||
python gtest-parallel \
|
||||
vpython3 gtest-parallel \
|
||||
--shard_index 0 \
|
||||
--shard_count 1 \
|
||||
--output_dir=SOME_OUTPUT_DIR \
|
||||
|
@ -82,8 +82,8 @@ Args = collections.namedtuple(
|
|||
['gtest_parallel_args', 'test_env', 'output_dir', 'test_artifacts_dir'])
|
||||
|
||||
|
||||
def _CatFiles(file_list, output_file):
|
||||
with open(output_file, 'w') as output_file:
|
||||
def _CatFiles(file_list, output_file_destination):
|
||||
with open(output_file_destination, 'w') as output_file:
|
||||
for filename in file_list:
|
||||
with open(filename) as input_file:
|
||||
output_file.write(input_file.read())
|
||||
|
@ -100,7 +100,7 @@ def _ParseWorkersOption(workers):
|
|||
return max(result, 1) # Sanitize when using e.g. '0.5x'.
|
||||
|
||||
|
||||
class ReconstructibleArgumentGroup(object):
|
||||
class ReconstructibleArgumentGroup:
|
||||
"""An argument group that can be converted back into a command line.
|
||||
|
||||
This acts like ArgumentParser.add_argument_group, but names of arguments added
|
||||
|
@ -154,7 +154,7 @@ def ParseArgs(argv=None):
|
|||
parser.add_argument('--store-test-artifacts', action='store_true')
|
||||
|
||||
# No-sandbox is a Chromium-specific flag, ignore it.
|
||||
# TODO(oprypin): Remove (bugs.webrtc.org/8115)
|
||||
# TODO(bugs.webrtc.org/8115): Remove workaround when fixed.
|
||||
parser.add_argument('--no-sandbox',
|
||||
action='store_true',
|
||||
help=argparse.SUPPRESS)
|
||||
|
@ -171,7 +171,7 @@ def ParseArgs(argv=None):
|
|||
}
|
||||
args_to_pass = []
|
||||
for arg in unrecognized_args:
|
||||
if any(arg.startswith(k) for k in webrtc_flags_to_change.keys()):
|
||||
if any(arg.startswith(k) for k in list(webrtc_flags_to_change.keys())):
|
||||
arg_split = arg.split('=')
|
||||
args_to_pass.append(webrtc_flags_to_change[arg_split[0]] + '=' +
|
||||
arg_split[1])
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
|
@ -21,152 +21,147 @@ gtest_parallel_wrapper = __import__('gtest-parallel-wrapper')
|
|||
|
||||
@contextmanager
|
||||
def TemporaryDirectory():
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
yield tmp_dir
|
||||
os.rmdir(tmp_dir)
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
yield tmp_dir
|
||||
os.rmdir(tmp_dir)
|
||||
|
||||
|
||||
class GtestParallelWrapperHelpersTest(unittest.TestCase):
|
||||
def testGetWorkersAsIs(self):
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('12'), 12)
|
||||
def testGetWorkersAsIs(self):
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('12'), 12)
|
||||
|
||||
def testGetTwiceWorkers(self):
|
||||
expected = 2 * multiprocessing.cpu_count()
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('2x'),
|
||||
expected)
|
||||
def testGetTwiceWorkers(self):
|
||||
expected = 2 * multiprocessing.cpu_count()
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('2x'), expected)
|
||||
|
||||
def testGetHalfWorkers(self):
|
||||
expected = max(multiprocessing.cpu_count() // 2, 1)
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('0.5x'),
|
||||
expected)
|
||||
def testGetHalfWorkers(self):
|
||||
expected = max(multiprocessing.cpu_count() // 2, 1)
|
||||
# pylint: disable=protected-access
|
||||
self.assertEqual(gtest_parallel_wrapper._ParseWorkersOption('0.5x'),
|
||||
expected)
|
||||
|
||||
|
||||
class GtestParallelWrapperTest(unittest.TestCase):
|
||||
@classmethod
|
||||
def _Expected(cls, gtest_parallel_args):
|
||||
return ['--shard_index=0', '--shard_count=1'] + gtest_parallel_args
|
||||
@classmethod
|
||||
def _Expected(cls, gtest_parallel_args):
|
||||
return ['--shard_index=0', '--shard_count=1'] + gtest_parallel_args
|
||||
|
||||
def testOverwrite(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(
|
||||
['--timeout=123', 'exec', '--timeout', '124'])
|
||||
expected = self._Expected(['--timeout=124', 'exec'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
def testOverwrite(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(
|
||||
['--timeout=123', 'exec', '--timeout', '124'])
|
||||
expected = self._Expected(['--timeout=124', 'exec'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
|
||||
def testMixing(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs([
|
||||
'--timeout=123', '--param1', 'exec', '--param2', '--timeout', '124'
|
||||
])
|
||||
expected = self._Expected(
|
||||
['--timeout=124', 'exec', '--', '--param1', '--param2'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
def testMixing(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(
|
||||
['--timeout=123', '--param1', 'exec', '--param2', '--timeout', '124'])
|
||||
expected = self._Expected(
|
||||
['--timeout=124', 'exec', '--', '--param1', '--param2'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
|
||||
def testMixingPositional(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs([
|
||||
'--timeout=123', 'exec', '--foo1', 'bar1', '--timeout', '124',
|
||||
'--foo2', 'bar2'
|
||||
])
|
||||
expected = self._Expected([
|
||||
'--timeout=124', 'exec', '--', '--foo1', 'bar1', '--foo2', 'bar2'
|
||||
])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
def testMixingPositional(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs([
|
||||
'--timeout=123', 'exec', '--foo1', 'bar1', '--timeout', '124', '--foo2',
|
||||
'bar2'
|
||||
])
|
||||
expected = self._Expected(
|
||||
['--timeout=124', 'exec', '--', '--foo1', 'bar1', '--foo2', 'bar2'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
|
||||
def testDoubleDash1(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(
|
||||
['--timeout', '123', 'exec', '--', '--timeout', '124'])
|
||||
expected = self._Expected(
|
||||
['--timeout=123', 'exec', '--', '--timeout', '124'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
def testDoubleDash1(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(
|
||||
['--timeout', '123', 'exec', '--', '--timeout', '124'])
|
||||
expected = self._Expected(
|
||||
['--timeout=123', 'exec', '--', '--timeout', '124'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
|
||||
def testDoubleDash2(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(
|
||||
['--timeout=123', '--', 'exec', '--timeout=124'])
|
||||
expected = self._Expected(
|
||||
['--timeout=123', 'exec', '--', '--timeout=124'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
def testDoubleDash2(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(
|
||||
['--timeout=123', '--', 'exec', '--timeout=124'])
|
||||
expected = self._Expected(['--timeout=123', 'exec', '--', '--timeout=124'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
|
||||
def testArtifacts(self):
|
||||
with TemporaryDirectory() as tmp_dir:
|
||||
output_dir = os.path.join(tmp_dir, 'foo')
|
||||
result = gtest_parallel_wrapper.ParseArgs(
|
||||
['exec', '--store-test-artifacts', '--output_dir', output_dir])
|
||||
exp_artifacts_dir = os.path.join(output_dir, 'test_artifacts')
|
||||
exp = self._Expected([
|
||||
'--output_dir=' + output_dir, 'exec', '--',
|
||||
'--test_artifacts_dir=' + exp_artifacts_dir
|
||||
])
|
||||
self.assertEqual(result.gtest_parallel_args, exp)
|
||||
self.assertEqual(result.output_dir, output_dir)
|
||||
self.assertEqual(result.test_artifacts_dir, exp_artifacts_dir)
|
||||
def testArtifacts(self):
|
||||
with TemporaryDirectory() as tmp_dir:
|
||||
output_dir = os.path.join(tmp_dir, 'foo')
|
||||
result = gtest_parallel_wrapper.ParseArgs(
|
||||
['exec', '--store-test-artifacts', '--output_dir', output_dir])
|
||||
exp_artifacts_dir = os.path.join(output_dir, 'test_artifacts')
|
||||
exp = self._Expected([
|
||||
'--output_dir=' + output_dir, 'exec', '--',
|
||||
'--test_artifacts_dir=' + exp_artifacts_dir
|
||||
])
|
||||
self.assertEqual(result.gtest_parallel_args, exp)
|
||||
self.assertEqual(result.output_dir, output_dir)
|
||||
self.assertEqual(result.test_artifacts_dir, exp_artifacts_dir)
|
||||
|
||||
def testNoDirsSpecified(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(['exec'])
|
||||
self.assertEqual(result.output_dir, None)
|
||||
self.assertEqual(result.test_artifacts_dir, None)
|
||||
def testNoDirsSpecified(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(['exec'])
|
||||
self.assertEqual(result.output_dir, None)
|
||||
self.assertEqual(result.test_artifacts_dir, None)
|
||||
|
||||
def testOutputDirSpecified(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(
|
||||
['exec', '--output_dir', '/tmp/foo'])
|
||||
self.assertEqual(result.output_dir, '/tmp/foo')
|
||||
self.assertEqual(result.test_artifacts_dir, None)
|
||||
def testOutputDirSpecified(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(
|
||||
['exec', '--output_dir', '/tmp/foo'])
|
||||
self.assertEqual(result.output_dir, '/tmp/foo')
|
||||
self.assertEqual(result.test_artifacts_dir, None)
|
||||
|
||||
def testShortArg(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(['-d', '/tmp/foo', 'exec'])
|
||||
expected = self._Expected(['--output_dir=/tmp/foo', 'exec'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
self.assertEqual(result.output_dir, '/tmp/foo')
|
||||
def testShortArg(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(['-d', '/tmp/foo', 'exec'])
|
||||
expected = self._Expected(['--output_dir=/tmp/foo', 'exec'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
self.assertEqual(result.output_dir, '/tmp/foo')
|
||||
|
||||
def testBoolArg(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(
|
||||
['--gtest_also_run_disabled_tests', 'exec'])
|
||||
expected = self._Expected(['--gtest_also_run_disabled_tests', 'exec'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
def testBoolArg(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(
|
||||
['--gtest_also_run_disabled_tests', 'exec'])
|
||||
expected = self._Expected(['--gtest_also_run_disabled_tests', 'exec'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
|
||||
def testNoArgs(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(['exec'])
|
||||
expected = self._Expected(['exec'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
def testNoArgs(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(['exec'])
|
||||
expected = self._Expected(['exec'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
|
||||
def testDocExample(self):
|
||||
with TemporaryDirectory() as tmp_dir:
|
||||
output_dir = os.path.join(tmp_dir, 'foo')
|
||||
result = gtest_parallel_wrapper.ParseArgs([
|
||||
'some_test', '--some_flag=some_value', '--another_flag',
|
||||
'--output_dir=' + output_dir, '--store-test-artifacts',
|
||||
'--isolated-script-test-perf-output=SOME_OTHER_DIR',
|
||||
'--foo=bar', '--baz'
|
||||
])
|
||||
expected_artifacts_dir = os.path.join(output_dir, 'test_artifacts')
|
||||
expected = self._Expected([
|
||||
'--output_dir=' + output_dir, 'some_test', '--',
|
||||
'--test_artifacts_dir=' + expected_artifacts_dir,
|
||||
'--some_flag=some_value', '--another_flag',
|
||||
'--isolated_script_test_perf_output=SOME_OTHER_DIR',
|
||||
'--foo=bar', '--baz'
|
||||
])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
def testDocExample(self):
|
||||
with TemporaryDirectory() as tmp_dir:
|
||||
output_dir = os.path.join(tmp_dir, 'foo')
|
||||
result = gtest_parallel_wrapper.ParseArgs([
|
||||
'some_test', '--some_flag=some_value', '--another_flag',
|
||||
'--output_dir=' + output_dir, '--store-test-artifacts',
|
||||
'--isolated-script-test-perf-output=SOME_OTHER_DIR', '--foo=bar',
|
||||
'--baz'
|
||||
])
|
||||
expected_artifacts_dir = os.path.join(output_dir, 'test_artifacts')
|
||||
expected = self._Expected([
|
||||
'--output_dir=' + output_dir, 'some_test', '--',
|
||||
'--test_artifacts_dir=' + expected_artifacts_dir,
|
||||
'--some_flag=some_value', '--another_flag',
|
||||
'--isolated_script_test_perf_output=SOME_OTHER_DIR', '--foo=bar',
|
||||
'--baz'
|
||||
])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
|
||||
def testStandardWorkers(self):
|
||||
"""Check integer value is passed as-is."""
|
||||
result = gtest_parallel_wrapper.ParseArgs(['--workers', '17', 'exec'])
|
||||
expected = self._Expected(['--workers=17', 'exec'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
def testStandardWorkers(self):
|
||||
"""Check integer value is passed as-is."""
|
||||
result = gtest_parallel_wrapper.ParseArgs(['--workers', '17', 'exec'])
|
||||
expected = self._Expected(['--workers=17', 'exec'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
|
||||
def testTwoWorkersPerCpuCore(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(['--workers', '2x', 'exec'])
|
||||
workers = 2 * multiprocessing.cpu_count()
|
||||
expected = self._Expected(['--workers=%s' % workers, 'exec'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
def testTwoWorkersPerCpuCore(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(['--workers', '2x', 'exec'])
|
||||
workers = 2 * multiprocessing.cpu_count()
|
||||
expected = self._Expected(['--workers=%s' % workers, 'exec'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
|
||||
def testUseHalfTheCpuCores(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(
|
||||
['--workers', '0.5x', 'exec'])
|
||||
workers = max(multiprocessing.cpu_count() // 2, 1)
|
||||
expected = self._Expected(['--workers=%s' % workers, 'exec'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
def testUseHalfTheCpuCores(self):
|
||||
result = gtest_parallel_wrapper.ParseArgs(['--workers', '0.5x', 'exec'])
|
||||
workers = max(multiprocessing.cpu_count() // 2, 1)
|
||||
expected = self._Expected(['--workers=%s' % workers, 'exec'])
|
||||
self.assertEqual(result.gtest_parallel_args, expected)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
unittest.main()
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
|
@ -51,303 +51,296 @@ from generate_licenses import LicenseBuilder
|
|||
|
||||
|
||||
def _ParseArgs():
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument('--build_config',
|
||||
default='release',
|
||||
choices=['debug', 'release'],
|
||||
help='The build config. Can be "debug" or "release". '
|
||||
'Defaults to "release".')
|
||||
parser.add_argument(
|
||||
'--arch',
|
||||
nargs='+',
|
||||
default=DEFAULT_ARCHS,
|
||||
choices=ENABLED_ARCHS,
|
||||
help='Architectures to build. Defaults to %(default)s.')
|
||||
parser.add_argument(
|
||||
'-c',
|
||||
'--clean',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Removes the previously generated build output, if any.')
|
||||
parser.add_argument(
|
||||
'-p',
|
||||
'--purify',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Purifies the previously generated build output by '
|
||||
'removing the temporary results used when (re)building.')
|
||||
parser.add_argument(
|
||||
'-o',
|
||||
'--output-dir',
|
||||
type=os.path.abspath,
|
||||
default=SDK_OUTPUT_DIR,
|
||||
help='Specifies a directory to output the build artifacts to. '
|
||||
'If specified together with -c, deletes the dir.')
|
||||
parser.add_argument(
|
||||
'-r',
|
||||
'--revision',
|
||||
type=int,
|
||||
default=0,
|
||||
help='Specifies a revision number to embed if building the framework.')
|
||||
parser.add_argument('-e',
|
||||
'--bitcode',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Compile with bitcode.')
|
||||
parser.add_argument('--verbose',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Debug logging.')
|
||||
parser.add_argument('--use-goma',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Use goma to build.')
|
||||
parser.add_argument(
|
||||
'--extra-gn-args',
|
||||
default=[],
|
||||
nargs='*',
|
||||
help='Additional GN args to be used during Ninja generation.')
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument('--build_config',
|
||||
default='release',
|
||||
choices=['debug', 'release'],
|
||||
help='The build config. Can be "debug" or "release". '
|
||||
'Defaults to "release".')
|
||||
parser.add_argument('--arch',
|
||||
nargs='+',
|
||||
default=DEFAULT_ARCHS,
|
||||
choices=ENABLED_ARCHS,
|
||||
help='Architectures to build. Defaults to %(default)s.')
|
||||
parser.add_argument(
|
||||
'-c',
|
||||
'--clean',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Removes the previously generated build output, if any.')
|
||||
parser.add_argument('-p',
|
||||
'--purify',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Purifies the previously generated build output by '
|
||||
'removing the temporary results used when (re)building.')
|
||||
parser.add_argument(
|
||||
'-o',
|
||||
'--output-dir',
|
||||
type=os.path.abspath,
|
||||
default=SDK_OUTPUT_DIR,
|
||||
help='Specifies a directory to output the build artifacts to. '
|
||||
'If specified together with -c, deletes the dir.')
|
||||
parser.add_argument(
|
||||
'-r',
|
||||
'--revision',
|
||||
type=int,
|
||||
default=0,
|
||||
help='Specifies a revision number to embed if building the framework.')
|
||||
parser.add_argument('-e',
|
||||
'--bitcode',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Compile with bitcode.')
|
||||
parser.add_argument('--verbose',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Debug logging.')
|
||||
parser.add_argument('--use-goma',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Use goma to build.')
|
||||
parser.add_argument(
|
||||
'--extra-gn-args',
|
||||
default=[],
|
||||
nargs='*',
|
||||
help='Additional GN args to be used during Ninja generation.')
|
||||
|
||||
return parser.parse_args()
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def _RunCommand(cmd):
|
||||
logging.debug('Running: %r', cmd)
|
||||
subprocess.check_call(cmd, cwd=SRC_DIR)
|
||||
logging.debug('Running: %r', cmd)
|
||||
subprocess.check_call(cmd, cwd=SRC_DIR)
|
||||
|
||||
|
||||
def _CleanArtifacts(output_dir):
|
||||
if os.path.isdir(output_dir):
|
||||
logging.info('Deleting %s', output_dir)
|
||||
shutil.rmtree(output_dir)
|
||||
if os.path.isdir(output_dir):
|
||||
logging.info('Deleting %s', output_dir)
|
||||
shutil.rmtree(output_dir)
|
||||
|
||||
|
||||
def _CleanTemporary(output_dir, architectures):
|
||||
if os.path.isdir(output_dir):
|
||||
logging.info('Removing temporary build files.')
|
||||
for arch in architectures:
|
||||
arch_lib_path = os.path.join(output_dir, arch)
|
||||
if os.path.isdir(arch_lib_path):
|
||||
shutil.rmtree(arch_lib_path)
|
||||
if os.path.isdir(output_dir):
|
||||
logging.info('Removing temporary build files.')
|
||||
for arch in architectures:
|
||||
arch_lib_path = os.path.join(output_dir, arch)
|
||||
if os.path.isdir(arch_lib_path):
|
||||
shutil.rmtree(arch_lib_path)
|
||||
|
||||
|
||||
def _ParseArchitecture(architectures):
|
||||
result = dict()
|
||||
for arch in architectures:
|
||||
if ":" in arch:
|
||||
target_environment, target_cpu = arch.split(":")
|
||||
else:
|
||||
logging.warning('The environment for build is not specified.')
|
||||
logging.warning('It is assumed based on cpu type.')
|
||||
logging.warning('See crbug.com/1138425 for more details.')
|
||||
if arch == "x64":
|
||||
target_environment = "simulator"
|
||||
else:
|
||||
target_environment = "device"
|
||||
target_cpu = arch
|
||||
archs = result.get(target_environment)
|
||||
if archs is None:
|
||||
result[target_environment] = {target_cpu}
|
||||
else:
|
||||
archs.add(target_cpu)
|
||||
result = dict()
|
||||
for arch in architectures:
|
||||
if ":" in arch:
|
||||
target_environment, target_cpu = arch.split(":")
|
||||
else:
|
||||
logging.warning('The environment for build is not specified.')
|
||||
logging.warning('It is assumed based on cpu type.')
|
||||
logging.warning('See crbug.com/1138425 for more details.')
|
||||
if arch == "x64":
|
||||
target_environment = "simulator"
|
||||
else:
|
||||
target_environment = "device"
|
||||
target_cpu = arch
|
||||
archs = result.get(target_environment)
|
||||
if archs is None:
|
||||
result[target_environment] = {target_cpu}
|
||||
else:
|
||||
archs.add(target_cpu)
|
||||
|
||||
return result
|
||||
return result
|
||||
|
||||
|
||||
def BuildWebRTC(output_dir, target_environment, target_arch, flavor,
|
||||
gn_target_name, ios_deployment_target, libvpx_build_vp9,
|
||||
use_bitcode, use_goma, extra_gn_args):
|
||||
gn_args = [
|
||||
'target_os="ios"', 'ios_enable_code_signing=false',
|
||||
'is_component_build=false', 'rtc_include_tests=false',
|
||||
]
|
||||
gn_args = [
|
||||
'target_os="ios"',
|
||||
'ios_enable_code_signing=false',
|
||||
'is_component_build=false',
|
||||
'rtc_include_tests=false',
|
||||
]
|
||||
|
||||
# Add flavor option.
|
||||
if flavor == 'debug':
|
||||
gn_args.append('is_debug=true')
|
||||
elif flavor == 'release':
|
||||
gn_args.append('is_debug=false')
|
||||
else:
|
||||
raise ValueError('Unexpected flavor type: %s' % flavor)
|
||||
# Add flavor option.
|
||||
if flavor == 'debug':
|
||||
gn_args.append('is_debug=true')
|
||||
elif flavor == 'release':
|
||||
gn_args.append('is_debug=false')
|
||||
else:
|
||||
raise ValueError('Unexpected flavor type: %s' % flavor)
|
||||
|
||||
gn_args.append('target_environment="%s"' % target_environment)
|
||||
gn_args.append('target_environment="%s"' % target_environment)
|
||||
|
||||
gn_args.append('target_cpu="%s"' % target_arch)
|
||||
gn_args.append('target_cpu="%s"' % target_arch)
|
||||
|
||||
gn_args.append('ios_deployment_target="%s"' % ios_deployment_target)
|
||||
gn_args.append('ios_deployment_target="%s"' % ios_deployment_target)
|
||||
|
||||
gn_args.append('rtc_libvpx_build_vp9=' +
|
||||
('true' if libvpx_build_vp9 else 'false'))
|
||||
gn_args.append('rtc_libvpx_build_vp9=' +
|
||||
('true' if libvpx_build_vp9 else 'false'))
|
||||
|
||||
gn_args.append('enable_ios_bitcode=' +
|
||||
('true' if use_bitcode else 'false'))
|
||||
gn_args.append('use_goma=' + ('true' if use_goma else 'false'))
|
||||
gn_args.append('rtc_enable_objc_symbol_export=true')
|
||||
gn_args.append('enable_ios_bitcode=' + ('true' if use_bitcode else 'false'))
|
||||
gn_args.append('use_goma=' + ('true' if use_goma else 'false'))
|
||||
gn_args.append('rtc_enable_objc_symbol_export=true')
|
||||
|
||||
args_string = ' '.join(gn_args + extra_gn_args)
|
||||
logging.info('Building WebRTC with args: %s', args_string)
|
||||
args_string = ' '.join(gn_args + extra_gn_args)
|
||||
logging.info('Building WebRTC with args: %s', args_string)
|
||||
|
||||
cmd = [
|
||||
sys.executable,
|
||||
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'),
|
||||
'gen',
|
||||
output_dir,
|
||||
'--args=' + args_string,
|
||||
]
|
||||
_RunCommand(cmd)
|
||||
logging.info('Building target: %s', gn_target_name)
|
||||
cmd = [
|
||||
sys.executable,
|
||||
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'),
|
||||
'gen',
|
||||
output_dir,
|
||||
'--args=' + args_string,
|
||||
]
|
||||
_RunCommand(cmd)
|
||||
logging.info('Building target: %s', gn_target_name)
|
||||
|
||||
cmd = [
|
||||
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja'),
|
||||
'-C',
|
||||
output_dir,
|
||||
gn_target_name,
|
||||
]
|
||||
if use_goma:
|
||||
cmd.extend(['-j', '200'])
|
||||
_RunCommand(cmd)
|
||||
cmd = [
|
||||
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja'),
|
||||
'-C',
|
||||
output_dir,
|
||||
gn_target_name,
|
||||
]
|
||||
if use_goma:
|
||||
cmd.extend(['-j', '200'])
|
||||
_RunCommand(cmd)
|
||||
|
||||
|
||||
def main():
|
||||
args = _ParseArgs()
|
||||
args = _ParseArgs()
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
|
||||
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
|
||||
|
||||
if args.clean:
|
||||
_CleanArtifacts(args.output_dir)
|
||||
return 0
|
||||
if args.clean:
|
||||
_CleanArtifacts(args.output_dir)
|
||||
return 0
|
||||
|
||||
# architectures is typed as Dict[str, Set[str]],
|
||||
# where key is for the environment (device or simulator)
|
||||
# and value is for the cpu type.
|
||||
architectures = _ParseArchitecture(args.arch)
|
||||
gn_args = args.extra_gn_args
|
||||
# architectures is typed as Dict[str, Set[str]],
|
||||
# where key is for the environment (device or simulator)
|
||||
# and value is for the cpu type.
|
||||
architectures = _ParseArchitecture(args.arch)
|
||||
gn_args = args.extra_gn_args
|
||||
|
||||
if args.purify:
|
||||
_CleanTemporary(args.output_dir, architectures.keys())
|
||||
return 0
|
||||
if args.purify:
|
||||
_CleanTemporary(args.output_dir, list(architectures.keys()))
|
||||
return 0
|
||||
|
||||
gn_target_name = 'framework_objc'
|
||||
if not args.bitcode:
|
||||
gn_args.append('enable_dsyms=true')
|
||||
gn_args.append('enable_stripping=true')
|
||||
gn_target_name = 'framework_objc'
|
||||
if not args.bitcode:
|
||||
gn_args.append('enable_dsyms=true')
|
||||
gn_args.append('enable_stripping=true')
|
||||
|
||||
# Build all architectures.
|
||||
framework_paths = []
|
||||
all_lib_paths = []
|
||||
for (environment, archs) in architectures.items():
|
||||
framework_path = os.path.join(args.output_dir, environment)
|
||||
framework_paths.append(framework_path)
|
||||
lib_paths = []
|
||||
for arch in archs:
|
||||
lib_path = os.path.join(framework_path, arch + '_libs')
|
||||
lib_paths.append(lib_path)
|
||||
BuildWebRTC(lib_path, environment, arch, args.build_config,
|
||||
gn_target_name, IOS_DEPLOYMENT_TARGET[environment],
|
||||
LIBVPX_BUILD_VP9, args.bitcode, args.use_goma, gn_args)
|
||||
all_lib_paths.extend(lib_paths)
|
||||
|
||||
# Combine the slices.
|
||||
dylib_path = os.path.join(SDK_FRAMEWORK_NAME, 'WebRTC')
|
||||
# Dylibs will be combined, all other files are the same across archs.
|
||||
shutil.rmtree(
|
||||
os.path.join(framework_path, SDK_FRAMEWORK_NAME),
|
||||
ignore_errors=True)
|
||||
shutil.copytree(
|
||||
os.path.join(lib_paths[0], SDK_FRAMEWORK_NAME),
|
||||
os.path.join(framework_path, SDK_FRAMEWORK_NAME),
|
||||
symlinks=True)
|
||||
logging.info('Merging framework slices for %s.', environment)
|
||||
dylib_paths = [os.path.join(path, dylib_path) for path in lib_paths]
|
||||
out_dylib_path = os.path.join(framework_path, dylib_path)
|
||||
if os.path.islink(out_dylib_path):
|
||||
out_dylib_path = os.path.join(os.path.dirname(out_dylib_path),
|
||||
os.readlink(out_dylib_path))
|
||||
try:
|
||||
os.remove(out_dylib_path)
|
||||
except OSError:
|
||||
pass
|
||||
cmd = ['lipo'] + dylib_paths + ['-create', '-output', out_dylib_path]
|
||||
_RunCommand(cmd)
|
||||
|
||||
# Merge the dSYM slices.
|
||||
lib_dsym_dir_path = os.path.join(lib_paths[0], SDK_DSYM_NAME)
|
||||
if os.path.isdir(lib_dsym_dir_path):
|
||||
shutil.rmtree(
|
||||
os.path.join(framework_path, SDK_DSYM_NAME),
|
||||
ignore_errors=True)
|
||||
shutil.copytree(
|
||||
lib_dsym_dir_path, os.path.join(framework_path, SDK_DSYM_NAME))
|
||||
logging.info('Merging dSYM slices.')
|
||||
dsym_path = os.path.join(SDK_DSYM_NAME, 'Contents', 'Resources',
|
||||
'DWARF', 'WebRTC')
|
||||
lib_dsym_paths = [
|
||||
os.path.join(path, dsym_path) for path in lib_paths
|
||||
]
|
||||
out_dsym_path = os.path.join(framework_path, dsym_path)
|
||||
try:
|
||||
os.remove(out_dsym_path)
|
||||
except OSError:
|
||||
pass
|
||||
cmd = ['lipo'
|
||||
] + lib_dsym_paths + ['-create', '-output', out_dsym_path]
|
||||
_RunCommand(cmd)
|
||||
|
||||
# Check for Mac-style WebRTC.framework/Resources/ (for Catalyst)...
|
||||
resources_dir = os.path.join(framework_path, SDK_FRAMEWORK_NAME,
|
||||
'Resources')
|
||||
if not os.path.exists(resources_dir):
|
||||
# ...then fall back to iOS-style WebRTC.framework/
|
||||
resources_dir = os.path.dirname(resources_dir)
|
||||
|
||||
# Modify the version number.
|
||||
# Format should be <Branch cut MXX>.<Hotfix #>.<Rev #>.
|
||||
# e.g. 55.0.14986 means
|
||||
# branch cut 55, no hotfixes, and revision 14986.
|
||||
infoplist_path = os.path.join(resources_dir, 'Info.plist')
|
||||
cmd = [
|
||||
'PlistBuddy', '-c', 'Print :CFBundleShortVersionString',
|
||||
infoplist_path
|
||||
]
|
||||
major_minor = subprocess.check_output(cmd).decode('utf-8').strip()
|
||||
version_number = '%s.%s' % (major_minor, args.revision)
|
||||
logging.info('Substituting revision number: %s', version_number)
|
||||
cmd = [
|
||||
'PlistBuddy', '-c', 'Set :CFBundleVersion ' + version_number,
|
||||
infoplist_path
|
||||
]
|
||||
_RunCommand(cmd)
|
||||
_RunCommand(['plutil', '-convert', 'binary1', infoplist_path])
|
||||
|
||||
xcframework_dir = os.path.join(args.output_dir, SDK_XCFRAMEWORK_NAME)
|
||||
if os.path.isdir(xcframework_dir):
|
||||
shutil.rmtree(xcframework_dir)
|
||||
|
||||
logging.info('Creating xcframework.')
|
||||
cmd = ['xcodebuild', '-create-xcframework', '-output', xcframework_dir]
|
||||
|
||||
# Apparently, xcodebuild needs absolute paths for input arguments
|
||||
for framework_path in framework_paths:
|
||||
cmd += [
|
||||
'-framework',
|
||||
os.path.abspath(os.path.join(framework_path, SDK_FRAMEWORK_NAME)),
|
||||
]
|
||||
dsym_full_path = os.path.join(framework_path, SDK_DSYM_NAME)
|
||||
if os.path.exists(dsym_full_path):
|
||||
cmd += ['-debug-symbols', os.path.abspath(dsym_full_path)]
|
||||
# Build all architectures.
|
||||
framework_paths = []
|
||||
all_lib_paths = []
|
||||
for (environment, archs) in list(architectures.items()):
|
||||
framework_path = os.path.join(args.output_dir, environment)
|
||||
framework_paths.append(framework_path)
|
||||
lib_paths = []
|
||||
for arch in archs:
|
||||
lib_path = os.path.join(framework_path, arch + '_libs')
|
||||
lib_paths.append(lib_path)
|
||||
BuildWebRTC(lib_path, environment, arch, args.build_config,
|
||||
gn_target_name, IOS_DEPLOYMENT_TARGET[environment],
|
||||
LIBVPX_BUILD_VP9, args.bitcode, args.use_goma, gn_args)
|
||||
all_lib_paths.extend(lib_paths)
|
||||
|
||||
# Combine the slices.
|
||||
dylib_path = os.path.join(SDK_FRAMEWORK_NAME, 'WebRTC')
|
||||
# Dylibs will be combined, all other files are the same across archs.
|
||||
shutil.rmtree(os.path.join(framework_path, SDK_FRAMEWORK_NAME),
|
||||
ignore_errors=True)
|
||||
shutil.copytree(os.path.join(lib_paths[0], SDK_FRAMEWORK_NAME),
|
||||
os.path.join(framework_path, SDK_FRAMEWORK_NAME),
|
||||
symlinks=True)
|
||||
logging.info('Merging framework slices for %s.', environment)
|
||||
dylib_paths = [os.path.join(path, dylib_path) for path in lib_paths]
|
||||
out_dylib_path = os.path.join(framework_path, dylib_path)
|
||||
if os.path.islink(out_dylib_path):
|
||||
out_dylib_path = os.path.join(os.path.dirname(out_dylib_path),
|
||||
os.readlink(out_dylib_path))
|
||||
try:
|
||||
os.remove(out_dylib_path)
|
||||
except OSError:
|
||||
pass
|
||||
cmd = ['lipo'] + dylib_paths + ['-create', '-output', out_dylib_path]
|
||||
_RunCommand(cmd)
|
||||
|
||||
# Generate the license file.
|
||||
logging.info('Generate license file.')
|
||||
gn_target_full_name = '//sdk:' + gn_target_name
|
||||
builder = LicenseBuilder(all_lib_paths, [gn_target_full_name])
|
||||
builder.GenerateLicenseText(
|
||||
os.path.join(args.output_dir, SDK_XCFRAMEWORK_NAME))
|
||||
# Merge the dSYM slices.
|
||||
lib_dsym_dir_path = os.path.join(lib_paths[0], SDK_DSYM_NAME)
|
||||
if os.path.isdir(lib_dsym_dir_path):
|
||||
shutil.rmtree(os.path.join(framework_path, SDK_DSYM_NAME),
|
||||
ignore_errors=True)
|
||||
shutil.copytree(lib_dsym_dir_path,
|
||||
os.path.join(framework_path, SDK_DSYM_NAME))
|
||||
logging.info('Merging dSYM slices.')
|
||||
dsym_path = os.path.join(SDK_DSYM_NAME, 'Contents', 'Resources', 'DWARF',
|
||||
'WebRTC')
|
||||
lib_dsym_paths = [os.path.join(path, dsym_path) for path in lib_paths]
|
||||
out_dsym_path = os.path.join(framework_path, dsym_path)
|
||||
try:
|
||||
os.remove(out_dsym_path)
|
||||
except OSError:
|
||||
pass
|
||||
cmd = ['lipo'] + lib_dsym_paths + ['-create', '-output', out_dsym_path]
|
||||
_RunCommand(cmd)
|
||||
|
||||
logging.info('Done.')
|
||||
return 0
|
||||
# Check for Mac-style WebRTC.framework/Resources/ (for Catalyst)...
|
||||
resources_dir = os.path.join(framework_path, SDK_FRAMEWORK_NAME,
|
||||
'Resources')
|
||||
if not os.path.exists(resources_dir):
|
||||
# ...then fall back to iOS-style WebRTC.framework/
|
||||
resources_dir = os.path.dirname(resources_dir)
|
||||
|
||||
# Modify the version number.
|
||||
# Format should be <Branch cut MXX>.<Hotfix #>.<Rev #>.
|
||||
# e.g. 55.0.14986 means
|
||||
# branch cut 55, no hotfixes, and revision 14986.
|
||||
infoplist_path = os.path.join(resources_dir, 'Info.plist')
|
||||
cmd = [
|
||||
'PlistBuddy', '-c', 'Print :CFBundleShortVersionString',
|
||||
infoplist_path
|
||||
]
|
||||
major_minor = subprocess.check_output(cmd).decode('utf-8').strip()
|
||||
version_number = '%s.%s' % (major_minor, args.revision)
|
||||
logging.info('Substituting revision number: %s', version_number)
|
||||
cmd = [
|
||||
'PlistBuddy', '-c', 'Set :CFBundleVersion ' + version_number,
|
||||
infoplist_path
|
||||
]
|
||||
_RunCommand(cmd)
|
||||
_RunCommand(['plutil', '-convert', 'binary1', infoplist_path])
|
||||
|
||||
xcframework_dir = os.path.join(args.output_dir, SDK_XCFRAMEWORK_NAME)
|
||||
if os.path.isdir(xcframework_dir):
|
||||
shutil.rmtree(xcframework_dir)
|
||||
|
||||
logging.info('Creating xcframework.')
|
||||
cmd = ['xcodebuild', '-create-xcframework', '-output', xcframework_dir]
|
||||
|
||||
# Apparently, xcodebuild needs absolute paths for input arguments
|
||||
for framework_path in framework_paths:
|
||||
cmd += [
|
||||
'-framework',
|
||||
os.path.abspath(os.path.join(framework_path, SDK_FRAMEWORK_NAME)),
|
||||
]
|
||||
dsym_full_path = os.path.join(framework_path, SDK_DSYM_NAME)
|
||||
if os.path.exists(dsym_full_path):
|
||||
cmd += ['-debug-symbols', os.path.abspath(dsym_full_path)]
|
||||
|
||||
_RunCommand(cmd)
|
||||
|
||||
# Generate the license file.
|
||||
logging.info('Generate license file.')
|
||||
gn_target_full_name = '//sdk:' + gn_target_name
|
||||
builder = LicenseBuilder(all_lib_paths, [gn_target_full_name])
|
||||
builder.GenerateLicenseText(
|
||||
os.path.join(args.output_dir, SDK_XCFRAMEWORK_NAME))
|
||||
|
||||
logging.info('Done.')
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
sys.exit(main())
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -11,22 +13,22 @@ import sys
|
|||
|
||||
|
||||
def GenerateModulemap():
|
||||
parser = argparse.ArgumentParser(description='Generate modulemap')
|
||||
parser.add_argument("-o", "--out", type=str, help="Output file.")
|
||||
parser.add_argument("-n", "--name", type=str, help="Name of binary.")
|
||||
parser = argparse.ArgumentParser(description='Generate modulemap')
|
||||
parser.add_argument("-o", "--out", type=str, help="Output file.")
|
||||
parser.add_argument("-n", "--name", type=str, help="Name of binary.")
|
||||
|
||||
args = parser.parse_args()
|
||||
args = parser.parse_args()
|
||||
|
||||
with open(args.out, "w") as outfile:
|
||||
module_template = 'framework module %s {\n' \
|
||||
' umbrella header "%s.h"\n' \
|
||||
'\n' \
|
||||
' export *\n' \
|
||||
' module * { export * }\n' \
|
||||
'}\n' % (args.name, args.name)
|
||||
outfile.write(module_template)
|
||||
return 0
|
||||
with open(args.out, "w") as outfile:
|
||||
module_template = 'framework module %s {\n' \
|
||||
' umbrella header "%s.h"\n' \
|
||||
'\n' \
|
||||
' export *\n' \
|
||||
' module * { export * }\n' \
|
||||
'}\n' % (args.name, args.name)
|
||||
outfile.write(module_template)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(GenerateModulemap())
|
||||
sys.exit(GenerateModulemap())
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -14,20 +16,20 @@ import textwrap
|
|||
|
||||
|
||||
def GenerateUmbrellaHeader():
|
||||
parser = argparse.ArgumentParser(description='Generate umbrella header')
|
||||
parser.add_argument("-o", "--out", type=str, help="Output file.")
|
||||
parser.add_argument("-s",
|
||||
"--sources",
|
||||
default=[],
|
||||
type=str,
|
||||
nargs='+',
|
||||
help="Headers to include.")
|
||||
parser = argparse.ArgumentParser(description='Generate umbrella header')
|
||||
parser.add_argument("-o", "--out", type=str, help="Output file.")
|
||||
parser.add_argument("-s",
|
||||
"--sources",
|
||||
default=[],
|
||||
type=str,
|
||||
nargs='+',
|
||||
help="Headers to include.")
|
||||
|
||||
args = parser.parse_args()
|
||||
args = parser.parse_args()
|
||||
|
||||
with open(args.out, "w") as outfile:
|
||||
outfile.write(
|
||||
textwrap.dedent("""\
|
||||
with open(args.out, "w") as outfile:
|
||||
outfile.write(
|
||||
textwrap.dedent("""\
|
||||
/*
|
||||
* Copyright %d The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
|
@ -38,11 +40,11 @@ def GenerateUmbrellaHeader():
|
|||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/\n\n""" % datetime.datetime.now().year))
|
||||
|
||||
for s in args.sources:
|
||||
outfile.write("#import <WebRTC/{}>\n".format(os.path.basename(s)))
|
||||
for s in args.sources:
|
||||
outfile.write("#import <WebRTC/{}>\n".format(os.path.basename(s)))
|
||||
|
||||
return 0
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(GenerateUmbrellaHeader())
|
||||
sys.exit(GenerateUmbrellaHeader())
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
|
@ -10,18 +10,19 @@
|
|||
"""Script for merging generated iOS libraries."""
|
||||
|
||||
import sys
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from six.moves import range
|
||||
|
||||
|
||||
# Valid arch subdir names.
|
||||
VALID_ARCHS = ['arm_libs', 'arm64_libs', 'ia32_libs', 'x64_libs']
|
||||
|
||||
|
||||
def MergeLibs(lib_base_dir):
|
||||
"""Merges generated iOS libraries for different archs.
|
||||
"""Merges generated iOS libraries for different archs.
|
||||
|
||||
Uses libtool to generate FAT archive files for each generated library.
|
||||
|
||||
|
@ -32,96 +33,94 @@ def MergeLibs(lib_base_dir):
|
|||
Returns:
|
||||
Exit code of libtool.
|
||||
"""
|
||||
output_dir_name = 'fat_libs'
|
||||
archs = [arch for arch in os.listdir(lib_base_dir) if arch in VALID_ARCHS]
|
||||
# For each arch, find (library name, libary path) for arch. We will merge
|
||||
# all libraries with the same name.
|
||||
libs = {}
|
||||
for lib_dir in [os.path.join(lib_base_dir, arch) for arch in VALID_ARCHS]:
|
||||
if not os.path.exists(lib_dir):
|
||||
continue
|
||||
for dirpath, _, filenames in os.walk(lib_dir):
|
||||
for filename in filenames:
|
||||
if not filename.endswith('.a'):
|
||||
continue
|
||||
entry = libs.get(filename, [])
|
||||
entry.append(os.path.join(dirpath, filename))
|
||||
libs[filename] = entry
|
||||
orphaned_libs = {}
|
||||
valid_libs = {}
|
||||
for library, paths in libs.items():
|
||||
if len(paths) < len(archs):
|
||||
orphaned_libs[library] = paths
|
||||
else:
|
||||
valid_libs[library] = paths
|
||||
for library, paths in orphaned_libs.items():
|
||||
components = library[:-2].split('_')[:-1]
|
||||
found = False
|
||||
# Find directly matching parent libs by stripping suffix.
|
||||
while components and not found:
|
||||
parent_library = '_'.join(components) + '.a'
|
||||
if parent_library in valid_libs:
|
||||
valid_libs[parent_library].extend(paths)
|
||||
found = True
|
||||
break
|
||||
components = components[:-1]
|
||||
# Find next best match by finding parent libs with the same prefix.
|
||||
if not found:
|
||||
base_prefix = library[:-2].split('_')[0]
|
||||
for valid_lib, valid_paths in valid_libs.items():
|
||||
if valid_lib[:len(base_prefix)] == base_prefix:
|
||||
valid_paths.extend(paths)
|
||||
found = True
|
||||
break
|
||||
assert found
|
||||
output_dir_name = 'fat_libs'
|
||||
archs = [arch for arch in os.listdir(lib_base_dir) if arch in VALID_ARCHS]
|
||||
# For each arch, find (library name, libary path) for arch. We will merge
|
||||
# all libraries with the same name.
|
||||
libs = {}
|
||||
for lib_dir in [os.path.join(lib_base_dir, arch) for arch in VALID_ARCHS]:
|
||||
if not os.path.exists(lib_dir):
|
||||
continue
|
||||
for dirpath, _, filenames in os.walk(lib_dir):
|
||||
for filename in filenames:
|
||||
if not filename.endswith('.a'):
|
||||
continue
|
||||
entry = libs.get(filename, [])
|
||||
entry.append(os.path.join(dirpath, filename))
|
||||
libs[filename] = entry
|
||||
orphaned_libs = {}
|
||||
valid_libs = {}
|
||||
for library, paths in list(libs.items()):
|
||||
if len(paths) < len(archs):
|
||||
orphaned_libs[library] = paths
|
||||
else:
|
||||
valid_libs[library] = paths
|
||||
for library, paths in list(orphaned_libs.items()):
|
||||
components = library[:-2].split('_')[:-1]
|
||||
found = False
|
||||
# Find directly matching parent libs by stripping suffix.
|
||||
while components and not found:
|
||||
parent_library = '_'.join(components) + '.a'
|
||||
if parent_library in valid_libs:
|
||||
valid_libs[parent_library].extend(paths)
|
||||
found = True
|
||||
break
|
||||
components = components[:-1]
|
||||
# Find next best match by finding parent libs with the same prefix.
|
||||
if not found:
|
||||
base_prefix = library[:-2].split('_')[0]
|
||||
for valid_lib, valid_paths in list(valid_libs.items()):
|
||||
if valid_lib[:len(base_prefix)] == base_prefix:
|
||||
valid_paths.extend(paths)
|
||||
found = True
|
||||
break
|
||||
assert found
|
||||
|
||||
# Create output directory.
|
||||
output_dir_path = os.path.join(lib_base_dir, output_dir_name)
|
||||
if not os.path.exists(output_dir_path):
|
||||
os.mkdir(output_dir_path)
|
||||
# Create output directory.
|
||||
output_dir_path = os.path.join(lib_base_dir, output_dir_name)
|
||||
if not os.path.exists(output_dir_path):
|
||||
os.mkdir(output_dir_path)
|
||||
|
||||
# Use this so libtool merged binaries are always the same.
|
||||
env = os.environ.copy()
|
||||
env['ZERO_AR_DATE'] = '1'
|
||||
# Use this so libtool merged binaries are always the same.
|
||||
env = os.environ.copy()
|
||||
env['ZERO_AR_DATE'] = '1'
|
||||
|
||||
# Ignore certain errors.
|
||||
libtool_re = re.compile(r'^.*libtool:.*file: .* has no symbols$')
|
||||
# Ignore certain errors.
|
||||
libtool_re = re.compile(r'^.*libtool:.*file: .* has no symbols$')
|
||||
|
||||
# Merge libraries using libtool.
|
||||
libtool_returncode = 0
|
||||
for library, paths in valid_libs.items():
|
||||
cmd_list = [
|
||||
'libtool', '-static', '-v', '-o',
|
||||
os.path.join(output_dir_path, library)
|
||||
] + paths
|
||||
libtoolout = subprocess.Popen(cmd_list,
|
||||
stderr=subprocess.PIPE,
|
||||
env=env)
|
||||
_, err = libtoolout.communicate()
|
||||
for line in err.splitlines():
|
||||
if not libtool_re.match(line):
|
||||
print >> sys.stderr, line
|
||||
# Unconditionally touch the output .a file on the command line if present
|
||||
# and the command succeeded. A bit hacky.
|
||||
libtool_returncode = libtoolout.returncode
|
||||
if not libtool_returncode:
|
||||
for i in range(len(cmd_list) - 1):
|
||||
if cmd_list[i] == '-o' and cmd_list[i + 1].endswith('.a'):
|
||||
os.utime(cmd_list[i + 1], None)
|
||||
break
|
||||
return libtool_returncode
|
||||
# Merge libraries using libtool.
|
||||
libtool_returncode = 0
|
||||
for library, paths in list(valid_libs.items()):
|
||||
cmd_list = [
|
||||
'libtool', '-static', '-v', '-o',
|
||||
os.path.join(output_dir_path, library)
|
||||
] + paths
|
||||
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
|
||||
_, err = libtoolout.communicate()
|
||||
for line in err.splitlines():
|
||||
if not libtool_re.match(line):
|
||||
print(line, file=sys.stderr)
|
||||
# Unconditionally touch the output .a file on the command line if present
|
||||
# and the command succeeded. A bit hacky.
|
||||
libtool_returncode = libtoolout.returncode
|
||||
if not libtool_returncode:
|
||||
for i in range(len(cmd_list) - 1):
|
||||
if cmd_list[i] == '-o' and cmd_list[i + 1].endswith('.a'):
|
||||
os.utime(cmd_list[i + 1], None)
|
||||
break
|
||||
return libtool_returncode
|
||||
|
||||
|
||||
def Main():
|
||||
parser_description = 'Merge WebRTC libraries.'
|
||||
parser = argparse.ArgumentParser(description=parser_description)
|
||||
parser.add_argument('lib_base_dir',
|
||||
help='Directory with built libraries. ',
|
||||
type=str)
|
||||
args = parser.parse_args()
|
||||
lib_base_dir = args.lib_base_dir
|
||||
MergeLibs(lib_base_dir)
|
||||
def main():
|
||||
parser_description = 'Merge WebRTC libraries.'
|
||||
parser = argparse.ArgumentParser(description=parser_description)
|
||||
parser.add_argument('lib_base_dir',
|
||||
help='Directory with built libraries. ',
|
||||
type=str)
|
||||
args = parser.parse_args()
|
||||
lib_base_dir = args.lib_base_dir
|
||||
MergeLibs(lib_base_dir)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(Main())
|
||||
sys.exit(main())
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python3
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
|
@ -13,7 +13,8 @@ Licenses are taken from dependent libraries which are determined by
|
|||
GN desc command `gn desc` on all targets specified via `--target` argument.
|
||||
|
||||
One can see all dependencies by invoking this command:
|
||||
$ gn.py desc --all --format=json <out_directory> <target> | python -m json.tool
|
||||
$ gn.py desc --all --format=json <out_directory> <target> | \
|
||||
vpython3 -m json.tool
|
||||
(see "deps" subarray)
|
||||
|
||||
Libraries are mapped to licenses via LIB_TO_LICENSES_DICT dictionary.
|
||||
|
@ -21,18 +22,13 @@ Libraries are mapped to licenses via LIB_TO_LICENSES_DICT dictionary.
|
|||
"""
|
||||
|
||||
import sys
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
try:
|
||||
# python 3.2+
|
||||
from html import escape
|
||||
except ImportError:
|
||||
from cgi import escape
|
||||
from html import escape
|
||||
|
||||
# Third_party library to licences mapping. Keys are names of the libraries
|
||||
# (right after the `third_party/` prefix)
|
||||
|
@ -107,11 +103,11 @@ LIB_REGEX_TO_LICENSES_DICT = {
|
|||
|
||||
|
||||
def FindSrcDirPath():
|
||||
"""Returns the abs path to the src/ dir of the project."""
|
||||
src_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
while os.path.basename(src_dir) != 'src':
|
||||
src_dir = os.path.normpath(os.path.join(src_dir, os.pardir))
|
||||
return src_dir
|
||||
"""Returns the abs path to the src/ dir of the project."""
|
||||
src_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
while os.path.basename(src_dir) != 'src':
|
||||
src_dir = os.path.normpath(os.path.join(src_dir, os.pardir))
|
||||
return src_dir
|
||||
|
||||
|
||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(sys.argv[0]))
|
||||
|
@ -124,29 +120,29 @@ THIRD_PARTY_LIB_SIMPLE_NAME_REGEX = r'^.*/third_party/([\w\-+]+).*$'
|
|||
THIRD_PARTY_LIB_REGEX_TEMPLATE = r'^.*/third_party/%s$'
|
||||
|
||||
|
||||
class LicenseBuilder(object):
|
||||
def __init__(self,
|
||||
buildfile_dirs,
|
||||
targets,
|
||||
lib_to_licenses_dict=None,
|
||||
lib_regex_to_licenses_dict=None):
|
||||
if lib_to_licenses_dict is None:
|
||||
lib_to_licenses_dict = LIB_TO_LICENSES_DICT
|
||||
class LicenseBuilder:
|
||||
def __init__(self,
|
||||
buildfile_dirs,
|
||||
targets,
|
||||
lib_to_licenses_dict=None,
|
||||
lib_regex_to_licenses_dict=None):
|
||||
if lib_to_licenses_dict is None:
|
||||
lib_to_licenses_dict = LIB_TO_LICENSES_DICT
|
||||
|
||||
if lib_regex_to_licenses_dict is None:
|
||||
lib_regex_to_licenses_dict = LIB_REGEX_TO_LICENSES_DICT
|
||||
if lib_regex_to_licenses_dict is None:
|
||||
lib_regex_to_licenses_dict = LIB_REGEX_TO_LICENSES_DICT
|
||||
|
||||
self.buildfile_dirs = buildfile_dirs
|
||||
self.targets = targets
|
||||
self.lib_to_licenses_dict = lib_to_licenses_dict
|
||||
self.lib_regex_to_licenses_dict = lib_regex_to_licenses_dict
|
||||
self.buildfile_dirs = buildfile_dirs
|
||||
self.targets = targets
|
||||
self.lib_to_licenses_dict = lib_to_licenses_dict
|
||||
self.lib_regex_to_licenses_dict = lib_regex_to_licenses_dict
|
||||
|
||||
self.common_licenses_dict = self.lib_to_licenses_dict.copy()
|
||||
self.common_licenses_dict.update(self.lib_regex_to_licenses_dict)
|
||||
self.common_licenses_dict = self.lib_to_licenses_dict.copy()
|
||||
self.common_licenses_dict.update(self.lib_regex_to_licenses_dict)
|
||||
|
||||
@staticmethod
|
||||
def _ParseLibraryName(dep):
|
||||
"""Returns library name after third_party
|
||||
@staticmethod
|
||||
def _ParseLibraryName(dep):
|
||||
"""Returns library name after third_party
|
||||
|
||||
Input one of:
|
||||
//a/b/third_party/libname:c
|
||||
|
@ -155,11 +151,11 @@ class LicenseBuilder(object):
|
|||
|
||||
Outputs libname or None if this is not a third_party dependency.
|
||||
"""
|
||||
groups = re.match(THIRD_PARTY_LIB_SIMPLE_NAME_REGEX, dep)
|
||||
return groups.group(1) if groups else None
|
||||
groups = re.match(THIRD_PARTY_LIB_SIMPLE_NAME_REGEX, dep)
|
||||
return groups.group(1) if groups else None
|
||||
|
||||
def _ParseLibrary(self, dep):
|
||||
"""Returns library simple or regex name that matches `dep` after third_party
|
||||
def _ParseLibrary(self, dep):
|
||||
"""Returns library simple or regex name that matches `dep` after third_party
|
||||
|
||||
This method matches `dep` dependency against simple names in
|
||||
LIB_TO_LICENSES_DICT and regular expression names in
|
||||
|
@ -167,109 +163,104 @@ class LicenseBuilder(object):
|
|||
|
||||
Outputs matched dict key or None if this is not a third_party dependency.
|
||||
"""
|
||||
libname = LicenseBuilder._ParseLibraryName(dep)
|
||||
libname = LicenseBuilder._ParseLibraryName(dep)
|
||||
|
||||
for lib_regex in self.lib_regex_to_licenses_dict:
|
||||
if re.match(THIRD_PARTY_LIB_REGEX_TEMPLATE % lib_regex, dep):
|
||||
return lib_regex
|
||||
for lib_regex in self.lib_regex_to_licenses_dict:
|
||||
if re.match(THIRD_PARTY_LIB_REGEX_TEMPLATE % lib_regex, dep):
|
||||
return lib_regex
|
||||
|
||||
return libname
|
||||
return libname
|
||||
|
||||
@staticmethod
|
||||
def _RunGN(buildfile_dir, target):
|
||||
cmd = [
|
||||
sys.executable,
|
||||
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'),
|
||||
'desc',
|
||||
'--all',
|
||||
'--format=json',
|
||||
os.path.abspath(buildfile_dir),
|
||||
target,
|
||||
]
|
||||
logging.debug('Running: %r', cmd)
|
||||
output_json = subprocess.check_output(cmd, cwd=WEBRTC_ROOT).decode('UTF-8')
|
||||
logging.debug('Output: %s', output_json)
|
||||
return output_json
|
||||
@staticmethod
|
||||
def _RunGN(buildfile_dir, target):
|
||||
cmd = [
|
||||
sys.executable,
|
||||
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py'),
|
||||
'desc',
|
||||
'--all',
|
||||
'--format=json',
|
||||
os.path.abspath(buildfile_dir),
|
||||
target,
|
||||
]
|
||||
logging.debug('Running: %r', cmd)
|
||||
output_json = subprocess.check_output(cmd, cwd=WEBRTC_ROOT).decode('UTF-8')
|
||||
logging.debug('Output: %s', output_json)
|
||||
return output_json
|
||||
|
||||
def _GetThirdPartyLibraries(self, buildfile_dir, target):
|
||||
output = json.loads(LicenseBuilder._RunGN(buildfile_dir, target))
|
||||
libraries = set()
|
||||
for described_target in output.values():
|
||||
third_party_libs = (self._ParseLibrary(dep)
|
||||
for dep in described_target['deps'])
|
||||
libraries |= set(lib for lib in third_party_libs if lib)
|
||||
return libraries
|
||||
def _GetThirdPartyLibraries(self, buildfile_dir, target):
|
||||
output = json.loads(LicenseBuilder._RunGN(buildfile_dir, target))
|
||||
libraries = set()
|
||||
for described_target in list(output.values()):
|
||||
third_party_libs = (self._ParseLibrary(dep)
|
||||
for dep in described_target['deps'])
|
||||
libraries |= set(lib for lib in third_party_libs if lib)
|
||||
return libraries
|
||||
|
||||
def GenerateLicenseText(self, output_dir):
|
||||
# Get a list of third_party libs from gn. For fat libraries we must consider
|
||||
# all architectures, hence the multiple buildfile directories.
|
||||
third_party_libs = set()
|
||||
for buildfile in self.buildfile_dirs:
|
||||
for target in self.targets:
|
||||
third_party_libs |= self._GetThirdPartyLibraries(
|
||||
buildfile, target)
|
||||
assert len(third_party_libs) > 0
|
||||
def GenerateLicenseText(self, output_dir):
|
||||
# Get a list of third_party libs from gn. For fat libraries we must consider
|
||||
# all architectures, hence the multiple buildfile directories.
|
||||
third_party_libs = set()
|
||||
for buildfile in self.buildfile_dirs:
|
||||
for target in self.targets:
|
||||
third_party_libs |= self._GetThirdPartyLibraries(buildfile, target)
|
||||
assert len(third_party_libs) > 0
|
||||
|
||||
missing_licenses = third_party_libs - set(
|
||||
self.common_licenses_dict.keys())
|
||||
if missing_licenses:
|
||||
error_msg = 'Missing licenses for following third_party targets: %s' % \
|
||||
', '.join(sorted(missing_licenses))
|
||||
logging.error(error_msg)
|
||||
raise Exception(error_msg)
|
||||
missing_licenses = third_party_libs - set(self.common_licenses_dict.keys())
|
||||
if missing_licenses:
|
||||
error_msg = 'Missing licenses for following third_party targets: %s' % \
|
||||
', '.join(sorted(missing_licenses))
|
||||
logging.error(error_msg)
|
||||
raise Exception(error_msg)
|
||||
|
||||
# Put webrtc at the front of the list.
|
||||
license_libs = sorted(third_party_libs)
|
||||
license_libs.insert(0, 'webrtc')
|
||||
# Put webrtc at the front of the list.
|
||||
license_libs = sorted(third_party_libs)
|
||||
license_libs.insert(0, 'webrtc')
|
||||
|
||||
logging.info('List of licenses: %s', ', '.join(license_libs))
|
||||
logging.info('List of licenses: %s', ', '.join(license_libs))
|
||||
|
||||
# Generate markdown.
|
||||
output_license_file = open(os.path.join(output_dir, 'LICENSE.md'),
|
||||
'w+')
|
||||
for license_lib in license_libs:
|
||||
if len(self.common_licenses_dict[license_lib]) == 0:
|
||||
logging.info(
|
||||
'Skipping compile time or internal dependency: %s',
|
||||
license_lib)
|
||||
continue # Compile time dependency
|
||||
# Generate markdown.
|
||||
output_license_file = open(os.path.join(output_dir, 'LICENSE.md'), 'w+')
|
||||
for license_lib in license_libs:
|
||||
if len(self.common_licenses_dict[license_lib]) == 0:
|
||||
logging.info('Skipping compile time or internal dependency: %s',
|
||||
license_lib)
|
||||
continue # Compile time dependency
|
||||
|
||||
output_license_file.write('# %s\n' % license_lib)
|
||||
output_license_file.write('```\n')
|
||||
for path in self.common_licenses_dict[license_lib]:
|
||||
license_path = os.path.join(WEBRTC_ROOT, path)
|
||||
with open(license_path, 'r') as license_file:
|
||||
license_text = escape(license_file.read(), quote=True)
|
||||
output_license_file.write(license_text)
|
||||
output_license_file.write('\n')
|
||||
output_license_file.write('```\n\n')
|
||||
output_license_file.write('# %s\n' % license_lib)
|
||||
output_license_file.write('```\n')
|
||||
for path in self.common_licenses_dict[license_lib]:
|
||||
license_path = os.path.join(WEBRTC_ROOT, path)
|
||||
with open(license_path, 'r') as license_file:
|
||||
license_text = escape(license_file.read(), quote=True)
|
||||
output_license_file.write(license_text)
|
||||
output_license_file.write('\n')
|
||||
output_license_file.write('```\n\n')
|
||||
|
||||
output_license_file.close()
|
||||
output_license_file.close()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Generate WebRTC LICENSE.md')
|
||||
parser.add_argument('--verbose',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Debug logging.')
|
||||
parser.add_argument('--target',
|
||||
required=True,
|
||||
action='append',
|
||||
default=[],
|
||||
help='Name of the GN target to generate a license for')
|
||||
parser.add_argument('output_dir',
|
||||
help='Directory to output LICENSE.md to.')
|
||||
parser.add_argument('buildfile_dirs',
|
||||
nargs='+',
|
||||
help='Directories containing gn generated ninja files')
|
||||
args = parser.parse_args()
|
||||
parser = argparse.ArgumentParser(description='Generate WebRTC LICENSE.md')
|
||||
parser.add_argument('--verbose',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Debug logging.')
|
||||
parser.add_argument('--target',
|
||||
required=True,
|
||||
action='append',
|
||||
default=[],
|
||||
help='Name of the GN target to generate a license for')
|
||||
parser.add_argument('output_dir', help='Directory to output LICENSE.md to.')
|
||||
parser.add_argument('buildfile_dirs',
|
||||
nargs='+',
|
||||
help='Directories containing gn generated ninja files')
|
||||
args = parser.parse_args()
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
|
||||
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
|
||||
|
||||
builder = LicenseBuilder(args.buildfile_dirs, args.target)
|
||||
builder.GenerateLicenseText(args.output_dir)
|
||||
builder = LicenseBuilder(args.buildfile_dirs, args.target)
|
||||
builder.GenerateLicenseText(args.output_dir)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
sys.exit(main())
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#!/usr/bin/env vpython
|
||||
# pylint: disable=relative-import,protected-access,unused-argument
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# pylint: disable=protected-access,unused-argument
|
||||
|
||||
# Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
|
@ -10,20 +11,15 @@
|
|||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
import unittest
|
||||
try:
|
||||
# python 3.3+
|
||||
from unittest.mock import patch
|
||||
except ImportError:
|
||||
# From site-package
|
||||
from mock import patch
|
||||
from mock import patch
|
||||
|
||||
from generate_licenses import LicenseBuilder
|
||||
|
||||
|
||||
class TestLicenseBuilder(unittest.TestCase):
|
||||
@staticmethod
|
||||
def _FakeRunGN(buildfile_dir, target):
|
||||
return """
|
||||
@staticmethod
|
||||
def _FakeRunGN(buildfile_dir, target):
|
||||
return """
|
||||
{
|
||||
"target1": {
|
||||
"deps": [
|
||||
|
@ -36,93 +32,89 @@ class TestLicenseBuilder(unittest.TestCase):
|
|||
}
|
||||
"""
|
||||
|
||||
def testParseLibraryName(self):
|
||||
self.assertEqual(
|
||||
LicenseBuilder._ParseLibraryName('//a/b/third_party/libname1:c'),
|
||||
'libname1')
|
||||
self.assertEqual(
|
||||
LicenseBuilder._ParseLibraryName(
|
||||
'//a/b/third_party/libname2:c(d)'), 'libname2')
|
||||
self.assertEqual(
|
||||
LicenseBuilder._ParseLibraryName(
|
||||
'//a/b/third_party/libname3/c:d(e)'), 'libname3')
|
||||
self.assertEqual(
|
||||
LicenseBuilder._ParseLibraryName('//a/b/not_third_party/c'), None)
|
||||
def testParseLibraryName(self):
|
||||
self.assertEqual(
|
||||
LicenseBuilder._ParseLibraryName('//a/b/third_party/libname1:c'),
|
||||
'libname1')
|
||||
self.assertEqual(
|
||||
LicenseBuilder._ParseLibraryName('//a/b/third_party/libname2:c(d)'),
|
||||
'libname2')
|
||||
self.assertEqual(
|
||||
LicenseBuilder._ParseLibraryName('//a/b/third_party/libname3/c:d(e)'),
|
||||
'libname3')
|
||||
self.assertEqual(
|
||||
LicenseBuilder._ParseLibraryName('//a/b/not_third_party/c'), None)
|
||||
|
||||
def testParseLibrarySimpleMatch(self):
|
||||
builder = LicenseBuilder([], [], {}, {})
|
||||
self.assertEqual(builder._ParseLibrary('//a/b/third_party/libname:c'),
|
||||
'libname')
|
||||
def testParseLibrarySimpleMatch(self):
|
||||
builder = LicenseBuilder([], [], {}, {})
|
||||
self.assertEqual(builder._ParseLibrary('//a/b/third_party/libname:c'),
|
||||
'libname')
|
||||
|
||||
def testParseLibraryRegExNoMatchFallbacksToDefaultLibname(self):
|
||||
lib_dict = {
|
||||
'libname:foo.*': ['path/to/LICENSE'],
|
||||
}
|
||||
builder = LicenseBuilder([], [], lib_dict, {})
|
||||
self.assertEqual(
|
||||
builder._ParseLibrary('//a/b/third_party/libname:bar_java'),
|
||||
'libname')
|
||||
def testParseLibraryRegExNoMatchFallbacksToDefaultLibname(self):
|
||||
lib_dict = {
|
||||
'libname:foo.*': ['path/to/LICENSE'],
|
||||
}
|
||||
builder = LicenseBuilder([], [], lib_dict, {})
|
||||
self.assertEqual(
|
||||
builder._ParseLibrary('//a/b/third_party/libname:bar_java'), 'libname')
|
||||
|
||||
def testParseLibraryRegExMatch(self):
|
||||
lib_regex_dict = {
|
||||
'libname:foo.*': ['path/to/LICENSE'],
|
||||
}
|
||||
builder = LicenseBuilder([], [], {}, lib_regex_dict)
|
||||
self.assertEqual(
|
||||
builder._ParseLibrary('//a/b/third_party/libname:foo_bar_java'),
|
||||
'libname:foo.*')
|
||||
def testParseLibraryRegExMatch(self):
|
||||
lib_regex_dict = {
|
||||
'libname:foo.*': ['path/to/LICENSE'],
|
||||
}
|
||||
builder = LicenseBuilder([], [], {}, lib_regex_dict)
|
||||
self.assertEqual(
|
||||
builder._ParseLibrary('//a/b/third_party/libname:foo_bar_java'),
|
||||
'libname:foo.*')
|
||||
|
||||
def testParseLibraryRegExMatchWithSubDirectory(self):
|
||||
lib_regex_dict = {
|
||||
'libname/foo:bar.*': ['path/to/LICENSE'],
|
||||
}
|
||||
builder = LicenseBuilder([], [], {}, lib_regex_dict)
|
||||
self.assertEqual(
|
||||
builder._ParseLibrary('//a/b/third_party/libname/foo:bar_java'),
|
||||
'libname/foo:bar.*')
|
||||
def testParseLibraryRegExMatchWithSubDirectory(self):
|
||||
lib_regex_dict = {
|
||||
'libname/foo:bar.*': ['path/to/LICENSE'],
|
||||
}
|
||||
builder = LicenseBuilder([], [], {}, lib_regex_dict)
|
||||
self.assertEqual(
|
||||
builder._ParseLibrary('//a/b/third_party/libname/foo:bar_java'),
|
||||
'libname/foo:bar.*')
|
||||
|
||||
def testParseLibraryRegExMatchWithStarInside(self):
|
||||
lib_regex_dict = {
|
||||
'libname/foo.*bar.*': ['path/to/LICENSE'],
|
||||
}
|
||||
builder = LicenseBuilder([], [], {}, lib_regex_dict)
|
||||
self.assertEqual(
|
||||
builder._ParseLibrary(
|
||||
'//a/b/third_party/libname/fooHAHA:bar_java'),
|
||||
'libname/foo.*bar.*')
|
||||
def testParseLibraryRegExMatchWithStarInside(self):
|
||||
lib_regex_dict = {
|
||||
'libname/foo.*bar.*': ['path/to/LICENSE'],
|
||||
}
|
||||
builder = LicenseBuilder([], [], {}, lib_regex_dict)
|
||||
self.assertEqual(
|
||||
builder._ParseLibrary('//a/b/third_party/libname/fooHAHA:bar_java'),
|
||||
'libname/foo.*bar.*')
|
||||
|
||||
@patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
|
||||
def testGetThirdPartyLibrariesWithoutRegex(self):
|
||||
builder = LicenseBuilder([], [], {}, {})
|
||||
self.assertEqual(
|
||||
builder._GetThirdPartyLibraries('out/arm', 'target1'),
|
||||
set(['libname1', 'libname2', 'libname3']))
|
||||
@patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
|
||||
def testGetThirdPartyLibrariesWithoutRegex(self):
|
||||
builder = LicenseBuilder([], [], {}, {})
|
||||
self.assertEqual(builder._GetThirdPartyLibraries('out/arm', 'target1'),
|
||||
set(['libname1', 'libname2', 'libname3']))
|
||||
|
||||
@patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
|
||||
def testGetThirdPartyLibrariesWithRegex(self):
|
||||
lib_regex_dict = {
|
||||
'libname2:c.*': ['path/to/LICENSE'],
|
||||
}
|
||||
builder = LicenseBuilder([], [], {}, lib_regex_dict)
|
||||
self.assertEqual(
|
||||
builder._GetThirdPartyLibraries('out/arm', 'target1'),
|
||||
set(['libname1', 'libname2:c.*', 'libname3']))
|
||||
@patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
|
||||
def testGetThirdPartyLibrariesWithRegex(self):
|
||||
lib_regex_dict = {
|
||||
'libname2:c.*': ['path/to/LICENSE'],
|
||||
}
|
||||
builder = LicenseBuilder([], [], {}, lib_regex_dict)
|
||||
self.assertEqual(builder._GetThirdPartyLibraries('out/arm', 'target1'),
|
||||
set(['libname1', 'libname2:c.*', 'libname3']))
|
||||
|
||||
@patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
|
||||
def testGenerateLicenseTextFailIfUnknownLibrary(self):
|
||||
lib_dict = {
|
||||
'simple_library': ['path/to/LICENSE'],
|
||||
}
|
||||
builder = LicenseBuilder(['dummy_dir'], ['dummy_target'], lib_dict, {})
|
||||
@patch('generate_licenses.LicenseBuilder._RunGN', _FakeRunGN)
|
||||
def testGenerateLicenseTextFailIfUnknownLibrary(self):
|
||||
lib_dict = {
|
||||
'simple_library': ['path/to/LICENSE'],
|
||||
}
|
||||
builder = LicenseBuilder(['dummy_dir'], ['dummy_target'], lib_dict, {})
|
||||
|
||||
with self.assertRaises(Exception) as context:
|
||||
builder.GenerateLicenseText('dummy/dir')
|
||||
with self.assertRaises(Exception) as context:
|
||||
builder.GenerateLicenseText('dummy/dir')
|
||||
|
||||
self.assertEqual(
|
||||
context.exception.args[0],
|
||||
'Missing licenses for following third_party targets: '
|
||||
'libname1, libname2, libname3')
|
||||
self.assertEqual(
|
||||
context.exception.args[0],
|
||||
'Missing licenses for following third_party targets: '
|
||||
'libname1, libname2, libname3')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
unittest.main()
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -7,6 +9,10 @@
|
|||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
|
||||
# Runs PRESUBMIT.py in py3 mode by git cl presubmit.
|
||||
USE_PYTHON3 = True
|
||||
|
||||
|
||||
def _CommonChecks(input_api, output_api):
|
||||
results = []
|
||||
|
||||
|
@ -27,15 +33,16 @@ def _CommonChecks(input_api, output_api):
|
|||
results.extend(input_api.RunTests(pylint_checks))
|
||||
|
||||
# Run the MB unittests.
|
||||
results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
|
||||
input_api,
|
||||
output_api,
|
||||
'.',
|
||||
[ r'^.+_unittest\.py$'],
|
||||
skip_shebang_check=True))
|
||||
results.extend(
|
||||
input_api.canned_checks.RunUnitTestsInDirectory(input_api,
|
||||
output_api,
|
||||
'.',
|
||||
[r'^.+_unittest\.py$'],
|
||||
skip_shebang_check=False,
|
||||
run_on_python2=False))
|
||||
|
||||
# Validate the format of the mb_config.pyl file.
|
||||
cmd = [input_api.python_executable, 'mb.py', 'validate']
|
||||
cmd = [input_api.python3_executable, 'mb.py', 'validate']
|
||||
kwargs = {'cwd': input_api.PresubmitLocalPath()}
|
||||
results.extend(input_api.RunTests([
|
||||
input_api.Command(name='mb_validate',
|
||||
|
|
|
@ -3,4 +3,4 @@ setlocal
|
|||
:: This is required with cygwin only.
|
||||
PATH=%~dp0;%PATH%
|
||||
set PYTHONDONTWRITEBYTECODE=1
|
||||
call python "%~dp0mb.py" %*
|
||||
call vpython3 "%~dp0mb.py" %*
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -13,8 +14,6 @@ MB is a wrapper script for GN that can be used to generate build files
|
|||
for sets of canned configurations and analyze them.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import ast
|
||||
import errno
|
||||
|
@ -28,10 +27,7 @@ import sys
|
|||
import subprocess
|
||||
import tempfile
|
||||
import traceback
|
||||
try:
|
||||
from urllib2 import urlopen # for Python2
|
||||
except ImportError:
|
||||
from urllib.request import urlopen # for Python3
|
||||
from urllib.request import urlopen
|
||||
|
||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
SRC_DIR = os.path.dirname(os.path.dirname(SCRIPT_DIR))
|
||||
|
@ -280,7 +276,7 @@ class MetaBuildWrapper(object):
|
|||
def CmdExport(self):
|
||||
self.ReadConfigFile()
|
||||
obj = {}
|
||||
for builder_group, builders in self.builder_groups.items():
|
||||
for builder_group, builders in list(self.builder_groups.items()):
|
||||
obj[builder_group] = {}
|
||||
for builder in builders:
|
||||
config = self.builder_groups[builder_group][builder]
|
||||
|
@ -290,7 +286,7 @@ class MetaBuildWrapper(object):
|
|||
if isinstance(config, dict):
|
||||
args = {
|
||||
k: self.FlattenConfig(v)['gn_args']
|
||||
for k, v in config.items()
|
||||
for k, v in list(config.items())
|
||||
}
|
||||
elif config.startswith('//'):
|
||||
args = config
|
||||
|
@ -476,15 +472,15 @@ class MetaBuildWrapper(object):
|
|||
# Build a list of all of the configs referenced by builders.
|
||||
all_configs = {}
|
||||
for builder_group in self.builder_groups:
|
||||
for config in self.builder_groups[builder_group].values():
|
||||
for config in list(self.builder_groups[builder_group].values()):
|
||||
if isinstance(config, dict):
|
||||
for c in config.values():
|
||||
for c in list(config.values()):
|
||||
all_configs[c] = builder_group
|
||||
else:
|
||||
all_configs[config] = builder_group
|
||||
|
||||
# Check that every referenced args file or config actually exists.
|
||||
for config, loc in all_configs.items():
|
||||
for config, loc in list(all_configs.items()):
|
||||
if config.startswith('//'):
|
||||
if not self.Exists(self.ToAbsPath(config)):
|
||||
errs.append('Unknown args file "%s" referenced from "%s".' %
|
||||
|
@ -500,7 +496,7 @@ class MetaBuildWrapper(object):
|
|||
# Figure out the whole list of mixins, and check that every mixin
|
||||
# listed by a config or another mixin actually exists.
|
||||
referenced_mixins = set()
|
||||
for config, mixins in self.configs.items():
|
||||
for config, mixins in list(self.configs.items()):
|
||||
for mixin in mixins:
|
||||
if not mixin in self.mixins:
|
||||
errs.append('Unknown mixin "%s" referenced by config "%s".' %
|
||||
|
@ -1172,7 +1168,7 @@ class MetaBuildWrapper(object):
|
|||
self.Print('%s%s=%s' % (env_prefix, var, env_quoter(env[var])))
|
||||
|
||||
if cmd[0] == self.executable:
|
||||
cmd = ['python'] + cmd[1:]
|
||||
cmd = ['vpython3'] + cmd[1:]
|
||||
self.Print(*[shell_quoter(arg) for arg in cmd])
|
||||
|
||||
def PrintJSON(self, obj):
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -11,10 +12,7 @@
|
|||
|
||||
import ast
|
||||
import json
|
||||
try:
|
||||
from StringIO import StringIO # for Python2
|
||||
except ImportError:
|
||||
from io import StringIO # for Python3
|
||||
from io import StringIO # for Python3
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
@ -35,14 +33,14 @@ class FakeMBW(mb.MetaBuildWrapper):
|
|||
self.default_isolate_map = ('c:\\fake_src\\testing\\buildbot\\'
|
||||
'gn_isolate_map.pyl')
|
||||
self.platform = 'win32'
|
||||
self.executable = 'c:\\python\\python.exe'
|
||||
self.executable = 'c:\\python\\vpython3.exe'
|
||||
self.sep = '\\'
|
||||
self.cwd = 'c:\\fake_src\\out\\Default'
|
||||
else:
|
||||
self.src_dir = '/fake_src'
|
||||
self.default_config = '/fake_src/tools_webrtc/mb/mb_config.pyl'
|
||||
self.default_isolate_map = '/fake_src/testing/buildbot/gn_isolate_map.pyl'
|
||||
self.executable = '/usr/bin/python'
|
||||
self.executable = '/usr/bin/vpython3'
|
||||
self.platform = 'linux2'
|
||||
self.sep = '/'
|
||||
self.cwd = '/fake_src/out/Default'
|
||||
|
@ -197,7 +195,7 @@ class UnitTest(unittest.TestCase):
|
|||
mbw.ToAbsPath('//build/args/bots/fake_group/fake_args_bot.gn'),
|
||||
'is_debug = false\n')
|
||||
if files:
|
||||
for path, contents in files.items():
|
||||
for path, contents in list(files.items()):
|
||||
mbw.files[path] = contents
|
||||
return mbw
|
||||
|
||||
|
@ -846,8 +844,8 @@ class UnitTest(unittest.TestCase):
|
|||
'/fake_src/out/Default/base_unittests.archive.json':
|
||||
("{\"base_unittests\":\"fake_hash\"}"),
|
||||
'/fake_src/third_party/depot_tools/cipd_manifest.txt':
|
||||
("# vpython\n"
|
||||
"/some/vpython/pkg git_revision:deadbeef\n"),
|
||||
("# vpython3\n"
|
||||
"/some/vpython3/pkg git_revision:deadbeef\n"),
|
||||
}
|
||||
task_json = json.dumps({'tasks': [{'task_id': '00000'}]})
|
||||
collect_json = json.dumps({'00000': {'results': {}}})
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -9,28 +10,27 @@
|
|||
"""Configuration class for network emulation."""
|
||||
|
||||
|
||||
class ConnectionConfig(object):
|
||||
"""Configuration containing the characteristics of a network connection."""
|
||||
class ConnectionConfig:
|
||||
"""Configuration containing the characteristics of a network connection."""
|
||||
|
||||
def __init__(self, num, name, receive_bw_kbps, send_bw_kbps, delay_ms,
|
||||
packet_loss_percent, queue_slots):
|
||||
self.num = num
|
||||
self.name = name
|
||||
self.receive_bw_kbps = receive_bw_kbps
|
||||
self.send_bw_kbps = send_bw_kbps
|
||||
self.delay_ms = delay_ms
|
||||
self.packet_loss_percent = packet_loss_percent
|
||||
self.queue_slots = queue_slots
|
||||
def __init__(self, num, name, receive_bw_kbps, send_bw_kbps, delay_ms,
|
||||
packet_loss_percent, queue_slots):
|
||||
self.num = num
|
||||
self.name = name
|
||||
self.receive_bw_kbps = receive_bw_kbps
|
||||
self.send_bw_kbps = send_bw_kbps
|
||||
self.delay_ms = delay_ms
|
||||
self.packet_loss_percent = packet_loss_percent
|
||||
self.queue_slots = queue_slots
|
||||
|
||||
def __str__(self):
|
||||
"""String representing the configuration.
|
||||
def __str__(self):
|
||||
"""String representing the configuration.
|
||||
|
||||
Returns:
|
||||
A string formatted and padded like this example:
|
||||
12 Name 375 kbps 375 kbps 10 145 ms 0.1 %
|
||||
"""
|
||||
left_aligned_name = self.name.ljust(24, ' ')
|
||||
return '%2s %24s %5s kbps %5s kbps %4s %5s ms %3s %%' % (
|
||||
self.num, left_aligned_name, self.receive_bw_kbps,
|
||||
self.send_bw_kbps, self.queue_slots, self.delay_ms,
|
||||
self.packet_loss_percent)
|
||||
left_aligned_name = self.name.ljust(24, ' ')
|
||||
return '%2s %24s %5s kbps %5s kbps %4s %5s ms %3s %%' % (
|
||||
self.num, left_aligned_name, self.receive_bw_kbps, self.send_bw_kbps,
|
||||
self.queue_slots, self.delay_ms, self.packet_loss_percent)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -46,170 +47,163 @@ _DEFAULT_PRESET = _PRESETS_DICT[_DEFAULT_PRESET_ID]
|
|||
|
||||
|
||||
class NonStrippingEpilogOptionParser(optparse.OptionParser):
|
||||
"""Custom parser to let us show the epilog without weird line breaking."""
|
||||
"""Custom parser to let us show the epilog without weird line breaking."""
|
||||
|
||||
def format_epilog(self, formatter):
|
||||
return self.epilog
|
||||
def format_epilog(self, formatter):
|
||||
return self.epilog
|
||||
|
||||
|
||||
def _GetExternalIp():
|
||||
"""Finds out the machine's external IP by connecting to google.com."""
|
||||
external_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
external_socket.connect(('google.com', 80))
|
||||
return external_socket.getsockname()[0]
|
||||
"""Finds out the machine's external IP by connecting to google.com."""
|
||||
external_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
external_socket.connect(('google.com', 80))
|
||||
return external_socket.getsockname()[0]
|
||||
|
||||
|
||||
def _ParseArgs():
|
||||
"""Define and parse the command-line arguments."""
|
||||
presets_string = '\n'.join(str(p) for p in _PRESETS)
|
||||
parser = NonStrippingEpilogOptionParser(epilog=(
|
||||
'\nAvailable presets:\n'
|
||||
' Bandwidth (kbps) Packet\n'
|
||||
'ID Name Receive Send Queue Delay loss \n'
|
||||
'-- ---- --------- -------- ----- ------- ------\n'
|
||||
'%s\n' % presets_string))
|
||||
parser.add_option('-p',
|
||||
'--preset',
|
||||
type='int',
|
||||
default=_DEFAULT_PRESET_ID,
|
||||
help=('ConnectionConfig configuration, specified by ID. '
|
||||
'Default: %default'))
|
||||
parser.add_option(
|
||||
'-r',
|
||||
'--receive-bw',
|
||||
type='int',
|
||||
default=_DEFAULT_PRESET.receive_bw_kbps,
|
||||
help=('Receive bandwidth in kilobit/s. Default: %default'))
|
||||
parser.add_option('-s',
|
||||
'--send-bw',
|
||||
type='int',
|
||||
default=_DEFAULT_PRESET.send_bw_kbps,
|
||||
help=('Send bandwidth in kilobit/s. Default: %default'))
|
||||
parser.add_option('-d',
|
||||
'--delay',
|
||||
type='int',
|
||||
default=_DEFAULT_PRESET.delay_ms,
|
||||
help=('Delay in ms. Default: %default'))
|
||||
parser.add_option('-l',
|
||||
'--packet-loss',
|
||||
type='float',
|
||||
default=_DEFAULT_PRESET.packet_loss_percent,
|
||||
help=('Packet loss in %. Default: %default'))
|
||||
parser.add_option(
|
||||
'-q',
|
||||
'--queue',
|
||||
type='int',
|
||||
default=_DEFAULT_PRESET.queue_slots,
|
||||
help=('Queue size as number of slots. Default: %default'))
|
||||
parser.add_option(
|
||||
'--port-range',
|
||||
default='%s,%s' % _DEFAULT_PORT_RANGE,
|
||||
help=('Range of ports for constrained network. Specify as '
|
||||
'two comma separated integers. Default: %default'))
|
||||
parser.add_option(
|
||||
'--target-ip',
|
||||
default=None,
|
||||
help=('The interface IP address to apply the rules for. '
|
||||
'Default: the external facing interface IP address.'))
|
||||
parser.add_option('-v',
|
||||
'--verbose',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=('Turn on verbose output. Will print all \'ipfw\' '
|
||||
'commands that are executed.'))
|
||||
"""Define and parse the command-line arguments."""
|
||||
presets_string = '\n'.join(str(p) for p in _PRESETS)
|
||||
parser = NonStrippingEpilogOptionParser(epilog=(
|
||||
'\nAvailable presets:\n'
|
||||
' Bandwidth (kbps) Packet\n'
|
||||
'ID Name Receive Send Queue Delay loss \n'
|
||||
'-- ---- --------- -------- ----- ------- ------\n'
|
||||
'%s\n' % presets_string))
|
||||
parser.add_option('-p',
|
||||
'--preset',
|
||||
type='int',
|
||||
default=_DEFAULT_PRESET_ID,
|
||||
help=('ConnectionConfig configuration, specified by ID. '
|
||||
'Default: %default'))
|
||||
parser.add_option('-r',
|
||||
'--receive-bw',
|
||||
type='int',
|
||||
default=_DEFAULT_PRESET.receive_bw_kbps,
|
||||
help=('Receive bandwidth in kilobit/s. Default: %default'))
|
||||
parser.add_option('-s',
|
||||
'--send-bw',
|
||||
type='int',
|
||||
default=_DEFAULT_PRESET.send_bw_kbps,
|
||||
help=('Send bandwidth in kilobit/s. Default: %default'))
|
||||
parser.add_option('-d',
|
||||
'--delay',
|
||||
type='int',
|
||||
default=_DEFAULT_PRESET.delay_ms,
|
||||
help=('Delay in ms. Default: %default'))
|
||||
parser.add_option('-l',
|
||||
'--packet-loss',
|
||||
type='float',
|
||||
default=_DEFAULT_PRESET.packet_loss_percent,
|
||||
help=('Packet loss in %. Default: %default'))
|
||||
parser.add_option('-q',
|
||||
'--queue',
|
||||
type='int',
|
||||
default=_DEFAULT_PRESET.queue_slots,
|
||||
help=('Queue size as number of slots. Default: %default'))
|
||||
parser.add_option('--port-range',
|
||||
default='%s,%s' % _DEFAULT_PORT_RANGE,
|
||||
help=('Range of ports for constrained network. Specify as '
|
||||
'two comma separated integers. Default: %default'))
|
||||
parser.add_option('--target-ip',
|
||||
default=None,
|
||||
help=('The interface IP address to apply the rules for. '
|
||||
'Default: the external facing interface IP address.'))
|
||||
parser.add_option('-v',
|
||||
'--verbose',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=('Turn on verbose output. Will print all \'ipfw\' '
|
||||
'commands that are executed.'))
|
||||
|
||||
options = parser.parse_args()[0]
|
||||
options = parser.parse_args()[0]
|
||||
|
||||
# Find preset by ID, if specified.
|
||||
if options.preset and not _PRESETS_DICT.has_key(options.preset):
|
||||
parser.error('Invalid preset: %s' % options.preset)
|
||||
# Find preset by ID, if specified.
|
||||
if options.preset and options.preset not in _PRESETS_DICT:
|
||||
parser.error('Invalid preset: %s' % options.preset)
|
||||
|
||||
# Simple validation of the IP address, if supplied.
|
||||
if options.target_ip:
|
||||
try:
|
||||
socket.inet_aton(options.target_ip)
|
||||
except socket.error:
|
||||
parser.error('Invalid IP address specified: %s' %
|
||||
options.target_ip)
|
||||
|
||||
# Convert port range into the desired tuple format.
|
||||
# Simple validation of the IP address, if supplied.
|
||||
if options.target_ip:
|
||||
try:
|
||||
if isinstance(options.port_range, str):
|
||||
options.port_range = tuple(
|
||||
int(port) for port in options.port_range.split(','))
|
||||
if len(options.port_range) != 2:
|
||||
parser.error(
|
||||
'Invalid port range specified, please specify two '
|
||||
'integers separated by a comma.')
|
||||
except ValueError:
|
||||
parser.error('Invalid port range specified.')
|
||||
socket.inet_aton(options.target_ip)
|
||||
except socket.error:
|
||||
parser.error('Invalid IP address specified: %s' % options.target_ip)
|
||||
|
||||
_InitLogging(options.verbose)
|
||||
return options
|
||||
# Convert port range into the desired tuple format.
|
||||
try:
|
||||
if isinstance(options.port_range, str):
|
||||
options.port_range = tuple(
|
||||
int(port) for port in options.port_range.split(','))
|
||||
if len(options.port_range) != 2:
|
||||
parser.error('Invalid port range specified, please specify two '
|
||||
'integers separated by a comma.')
|
||||
except ValueError:
|
||||
parser.error('Invalid port range specified.')
|
||||
|
||||
_InitLogging(options.verbose)
|
||||
return options
|
||||
|
||||
|
||||
def _InitLogging(verbose):
|
||||
"""Setup logging."""
|
||||
log_level = _DEFAULT_LOG_LEVEL
|
||||
if verbose:
|
||||
log_level = logging.DEBUG
|
||||
logging.basicConfig(level=log_level, format='%(message)s')
|
||||
"""Setup logging."""
|
||||
log_level = _DEFAULT_LOG_LEVEL
|
||||
if verbose:
|
||||
log_level = logging.DEBUG
|
||||
logging.basicConfig(level=log_level, format='%(message)s')
|
||||
|
||||
|
||||
def main():
|
||||
options = _ParseArgs()
|
||||
options = _ParseArgs()
|
||||
|
||||
# Build a configuration object. Override any preset configuration settings if
|
||||
# a value of a setting was also given as a flag.
|
||||
connection_config = _PRESETS_DICT[options.preset]
|
||||
if options.receive_bw is not _DEFAULT_PRESET.receive_bw_kbps:
|
||||
connection_config.receive_bw_kbps = options.receive_bw
|
||||
if options.send_bw is not _DEFAULT_PRESET.send_bw_kbps:
|
||||
connection_config.send_bw_kbps = options.send_bw
|
||||
if options.delay is not _DEFAULT_PRESET.delay_ms:
|
||||
connection_config.delay_ms = options.delay
|
||||
if options.packet_loss is not _DEFAULT_PRESET.packet_loss_percent:
|
||||
connection_config.packet_loss_percent = options.packet_loss
|
||||
if options.queue is not _DEFAULT_PRESET.queue_slots:
|
||||
connection_config.queue_slots = options.queue
|
||||
emulator = network_emulator.NetworkEmulator(connection_config,
|
||||
options.port_range)
|
||||
try:
|
||||
emulator.CheckPermissions()
|
||||
except network_emulator.NetworkEmulatorError as e:
|
||||
logging.error('Error: %s\n\nCause: %s', e.fail_msg, e.error)
|
||||
return -1
|
||||
# Build a configuration object. Override any preset configuration settings if
|
||||
# a value of a setting was also given as a flag.
|
||||
connection_config = _PRESETS_DICT[options.preset]
|
||||
if options.receive_bw is not _DEFAULT_PRESET.receive_bw_kbps:
|
||||
connection_config.receive_bw_kbps = options.receive_bw
|
||||
if options.send_bw is not _DEFAULT_PRESET.send_bw_kbps:
|
||||
connection_config.send_bw_kbps = options.send_bw
|
||||
if options.delay is not _DEFAULT_PRESET.delay_ms:
|
||||
connection_config.delay_ms = options.delay
|
||||
if options.packet_loss is not _DEFAULT_PRESET.packet_loss_percent:
|
||||
connection_config.packet_loss_percent = options.packet_loss
|
||||
if options.queue is not _DEFAULT_PRESET.queue_slots:
|
||||
connection_config.queue_slots = options.queue
|
||||
emulator = network_emulator.NetworkEmulator(connection_config,
|
||||
options.port_range)
|
||||
try:
|
||||
emulator.CheckPermissions()
|
||||
except network_emulator.NetworkEmulatorError as e:
|
||||
logging.error('Error: %s\n\nCause: %s', e.fail_msg, e.error)
|
||||
return -1
|
||||
|
||||
if not options.target_ip:
|
||||
external_ip = _GetExternalIp()
|
||||
else:
|
||||
external_ip = options.target_ip
|
||||
if not options.target_ip:
|
||||
external_ip = _GetExternalIp()
|
||||
else:
|
||||
external_ip = options.target_ip
|
||||
|
||||
logging.info('Constraining traffic to/from IP: %s', external_ip)
|
||||
try:
|
||||
emulator.Emulate(external_ip)
|
||||
logging.info(
|
||||
'Started network emulation with the following configuration:\n'
|
||||
' Receive bandwidth: %s kbps (%s kB/s)\n'
|
||||
' Send bandwidth : %s kbps (%s kB/s)\n'
|
||||
' Delay : %s ms\n'
|
||||
' Packet loss : %s %%\n'
|
||||
' Queue slots : %s', connection_config.receive_bw_kbps,
|
||||
connection_config.receive_bw_kbps / 8,
|
||||
connection_config.send_bw_kbps, connection_config.send_bw_kbps / 8,
|
||||
connection_config.delay_ms, connection_config.packet_loss_percent,
|
||||
connection_config.queue_slots)
|
||||
logging.info('Affected traffic: IP traffic on ports %s-%s',
|
||||
options.port_range[0], options.port_range[1])
|
||||
raw_input('Press Enter to abort Network Emulation...')
|
||||
logging.info('Flushing all Dummynet rules...')
|
||||
network_emulator.Cleanup()
|
||||
logging.info('Completed Network Emulation.')
|
||||
return 0
|
||||
except network_emulator.NetworkEmulatorError as e:
|
||||
logging.error('Error: %s\n\nCause: %s', e.fail_msg, e.error)
|
||||
return -2
|
||||
logging.info('Constraining traffic to/from IP: %s', external_ip)
|
||||
try:
|
||||
emulator.Emulate(external_ip)
|
||||
logging.info(
|
||||
'Started network emulation with the following configuration:\n'
|
||||
' Receive bandwidth: %s kbps (%s kB/s)\n'
|
||||
' Send bandwidth : %s kbps (%s kB/s)\n'
|
||||
' Delay : %s ms\n'
|
||||
' Packet loss : %s %%\n'
|
||||
' Queue slots : %s', connection_config.receive_bw_kbps,
|
||||
connection_config.receive_bw_kbps / 8, connection_config.send_bw_kbps,
|
||||
connection_config.send_bw_kbps / 8, connection_config.delay_ms,
|
||||
connection_config.packet_loss_percent, connection_config.queue_slots)
|
||||
logging.info('Affected traffic: IP traffic on ports %s-%s',
|
||||
options.port_range[0], options.port_range[1])
|
||||
input('Press Enter to abort Network Emulation...')
|
||||
logging.info('Flushing all Dummynet rules...')
|
||||
network_emulator.Cleanup()
|
||||
logging.info('Completed Network Emulation.')
|
||||
return 0
|
||||
except network_emulator.NetworkEmulatorError as e:
|
||||
logging.error('Error: %s\n\nCause: %s', e.fail_msg, e.error)
|
||||
return -2
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
sys.exit(main())
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -16,7 +17,7 @@ import sys
|
|||
|
||||
|
||||
class NetworkEmulatorError(BaseException):
|
||||
"""Exception raised for errors in the network emulator.
|
||||
"""Exception raised for errors in the network emulator.
|
||||
|
||||
Attributes:
|
||||
fail_msg: User defined error message.
|
||||
|
@ -26,88 +27,83 @@ class NetworkEmulatorError(BaseException):
|
|||
stderr: Error output of running the command.
|
||||
"""
|
||||
|
||||
def __init__(self,
|
||||
fail_msg,
|
||||
cmd=None,
|
||||
returncode=None,
|
||||
output=None,
|
||||
error=None):
|
||||
BaseException.__init__(self, fail_msg)
|
||||
self.fail_msg = fail_msg
|
||||
self.cmd = cmd
|
||||
self.returncode = returncode
|
||||
self.output = output
|
||||
self.error = error
|
||||
def __init__(self,
|
||||
fail_msg,
|
||||
cmd=None,
|
||||
returncode=None,
|
||||
output=None,
|
||||
error=None):
|
||||
BaseException.__init__(self, fail_msg)
|
||||
self.fail_msg = fail_msg
|
||||
self.cmd = cmd
|
||||
self.returncode = returncode
|
||||
self.output = output
|
||||
self.error = error
|
||||
|
||||
|
||||
class NetworkEmulator(object):
|
||||
"""A network emulator that can constrain the network using Dummynet."""
|
||||
class NetworkEmulator:
|
||||
"""A network emulator that can constrain the network using Dummynet."""
|
||||
|
||||
def __init__(self, connection_config, port_range):
|
||||
"""Constructor.
|
||||
def __init__(self, connection_config, port_range):
|
||||
"""Constructor.
|
||||
|
||||
Args:
|
||||
connection_config: A config.ConnectionConfig object containing the
|
||||
characteristics for the connection to be emulation.
|
||||
port_range: Tuple containing two integers defining the port range.
|
||||
"""
|
||||
self._pipe_counter = 0
|
||||
self._rule_counter = 0
|
||||
self._port_range = port_range
|
||||
self._connection_config = connection_config
|
||||
self._pipe_counter = 0
|
||||
self._rule_counter = 0
|
||||
self._port_range = port_range
|
||||
self._connection_config = connection_config
|
||||
|
||||
def Emulate(self, target_ip):
|
||||
"""Starts a network emulation by setting up Dummynet rules.
|
||||
def Emulate(self, target_ip):
|
||||
"""Starts a network emulation by setting up Dummynet rules.
|
||||
|
||||
Args:
|
||||
target_ip: The IP address of the interface that shall be that have the
|
||||
network constraints applied to it.
|
||||
"""
|
||||
receive_pipe_id = self._CreateDummynetPipe(
|
||||
self._connection_config.receive_bw_kbps,
|
||||
self._connection_config.delay_ms,
|
||||
self._connection_config.packet_loss_percent,
|
||||
self._connection_config.queue_slots)
|
||||
logging.debug('Created receive pipe: %s', receive_pipe_id)
|
||||
send_pipe_id = self._CreateDummynetPipe(
|
||||
self._connection_config.send_bw_kbps,
|
||||
self._connection_config.delay_ms,
|
||||
self._connection_config.packet_loss_percent,
|
||||
self._connection_config.queue_slots)
|
||||
logging.debug('Created send pipe: %s', send_pipe_id)
|
||||
receive_pipe_id = self._CreateDummynetPipe(
|
||||
self._connection_config.receive_bw_kbps,
|
||||
self._connection_config.delay_ms,
|
||||
self._connection_config.packet_loss_percent,
|
||||
self._connection_config.queue_slots)
|
||||
logging.debug('Created receive pipe: %s', receive_pipe_id)
|
||||
send_pipe_id = self._CreateDummynetPipe(
|
||||
self._connection_config.send_bw_kbps, self._connection_config.delay_ms,
|
||||
self._connection_config.packet_loss_percent,
|
||||
self._connection_config.queue_slots)
|
||||
logging.debug('Created send pipe: %s', send_pipe_id)
|
||||
|
||||
# Adding the rules will start the emulation.
|
||||
incoming_rule_id = self._CreateDummynetRule(receive_pipe_id, 'any',
|
||||
target_ip,
|
||||
self._port_range)
|
||||
logging.debug('Created incoming rule: %s', incoming_rule_id)
|
||||
outgoing_rule_id = self._CreateDummynetRule(send_pipe_id, target_ip,
|
||||
'any', self._port_range)
|
||||
logging.debug('Created outgoing rule: %s', outgoing_rule_id)
|
||||
# Adding the rules will start the emulation.
|
||||
incoming_rule_id = self._CreateDummynetRule(receive_pipe_id, 'any',
|
||||
target_ip, self._port_range)
|
||||
logging.debug('Created incoming rule: %s', incoming_rule_id)
|
||||
outgoing_rule_id = self._CreateDummynetRule(send_pipe_id, target_ip, 'any',
|
||||
self._port_range)
|
||||
logging.debug('Created outgoing rule: %s', outgoing_rule_id)
|
||||
|
||||
@staticmethod
|
||||
def CheckPermissions():
|
||||
"""Checks if permissions are available to run Dummynet commands.
|
||||
@staticmethod
|
||||
def CheckPermissions():
|
||||
"""Checks if permissions are available to run Dummynet commands.
|
||||
|
||||
Raises:
|
||||
NetworkEmulatorError: If permissions to run Dummynet commands are not
|
||||
available.
|
||||
"""
|
||||
try:
|
||||
if os.getuid() != 0:
|
||||
raise NetworkEmulatorError(
|
||||
'You must run this script with sudo.')
|
||||
except AttributeError:
|
||||
try:
|
||||
if os.getuid() != 0:
|
||||
raise NetworkEmulatorError('You must run this script with sudo.')
|
||||
except AttributeError as permission_error:
|
||||
|
||||
# AttributeError will be raised on Windows.
|
||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||
raise NetworkEmulatorError(
|
||||
'You must run this script with administrator'
|
||||
' privileges.')
|
||||
# AttributeError will be raised on Windows.
|
||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||
raise NetworkEmulatorError('You must run this script with administrator'
|
||||
' privileges.') from permission_error
|
||||
|
||||
def _CreateDummynetRule(self, pipe_id, from_address, to_address,
|
||||
port_range):
|
||||
"""Creates a network emulation rule and returns its ID.
|
||||
def _CreateDummynetRule(self, pipe_id, from_address, to_address, port_range):
|
||||
"""Creates a network emulation rule and returns its ID.
|
||||
|
||||
Args:
|
||||
pipe_id: integer ID of the pipe.
|
||||
|
@ -121,22 +117,20 @@ class NetworkEmulator(object):
|
|||
The ID of the rule, starting at 100. The rule ID increments with 100 for
|
||||
each rule being added.
|
||||
"""
|
||||
self._rule_counter += 100
|
||||
add_part = [
|
||||
'add', self._rule_counter, 'pipe', pipe_id, 'ip', 'from',
|
||||
from_address, 'to', to_address
|
||||
]
|
||||
_RunIpfwCommand(add_part +
|
||||
['src-port', '%s-%s' % port_range],
|
||||
'Failed to add Dummynet src-port rule.')
|
||||
_RunIpfwCommand(add_part +
|
||||
['dst-port', '%s-%s' % port_range],
|
||||
'Failed to add Dummynet dst-port rule.')
|
||||
return self._rule_counter
|
||||
self._rule_counter += 100
|
||||
add_part = [
|
||||
'add', self._rule_counter, 'pipe', pipe_id, 'ip', 'from', from_address,
|
||||
'to', to_address
|
||||
]
|
||||
_RunIpfwCommand(add_part + ['src-port', '%s-%s' % port_range],
|
||||
'Failed to add Dummynet src-port rule.')
|
||||
_RunIpfwCommand(add_part + ['dst-port', '%s-%s' % port_range],
|
||||
'Failed to add Dummynet dst-port rule.')
|
||||
return self._rule_counter
|
||||
|
||||
def _CreateDummynetPipe(self, bandwidth_kbps, delay_ms,
|
||||
packet_loss_percent, queue_slots):
|
||||
"""Creates a Dummynet pipe and return its ID.
|
||||
def _CreateDummynetPipe(self, bandwidth_kbps, delay_ms, packet_loss_percent,
|
||||
queue_slots):
|
||||
"""Creates a Dummynet pipe and return its ID.
|
||||
|
||||
Args:
|
||||
bandwidth_kbps: Bandwidth.
|
||||
|
@ -146,34 +140,33 @@ class NetworkEmulator(object):
|
|||
Returns:
|
||||
The ID of the pipe, starting at 1.
|
||||
"""
|
||||
self._pipe_counter += 1
|
||||
cmd = [
|
||||
'pipe', self._pipe_counter, 'config', 'bw',
|
||||
str(bandwidth_kbps / 8) + 'KByte/s', 'delay',
|
||||
'%sms' % delay_ms, 'plr', (packet_loss_percent / 100.0), 'queue',
|
||||
queue_slots
|
||||
]
|
||||
error_message = 'Failed to create Dummynet pipe. '
|
||||
if sys.platform.startswith('linux'):
|
||||
error_message += (
|
||||
'Make sure you have loaded the ipfw_mod.ko module to '
|
||||
'your kernel (sudo insmod /path/to/ipfw_mod.ko).')
|
||||
_RunIpfwCommand(cmd, error_message)
|
||||
return self._pipe_counter
|
||||
self._pipe_counter += 1
|
||||
cmd = [
|
||||
'pipe', self._pipe_counter, 'config', 'bw',
|
||||
str(bandwidth_kbps / 8) + 'KByte/s', 'delay',
|
||||
'%sms' % delay_ms, 'plr', (packet_loss_percent / 100.0), 'queue',
|
||||
queue_slots
|
||||
]
|
||||
error_message = 'Failed to create Dummynet pipe. '
|
||||
if sys.platform.startswith('linux'):
|
||||
error_message += ('Make sure you have loaded the ipfw_mod.ko module to '
|
||||
'your kernel (sudo insmod /path/to/ipfw_mod.ko).')
|
||||
_RunIpfwCommand(cmd, error_message)
|
||||
return self._pipe_counter
|
||||
|
||||
|
||||
def Cleanup():
|
||||
"""Stops the network emulation by flushing all Dummynet rules.
|
||||
"""Stops the network emulation by flushing all Dummynet rules.
|
||||
|
||||
Notice that this will flush any rules that may have been created previously
|
||||
before starting the emulation.
|
||||
"""
|
||||
_RunIpfwCommand(['-f', 'flush'], 'Failed to flush Dummynet rules!')
|
||||
_RunIpfwCommand(['-f', 'pipe', 'flush'], 'Failed to flush Dummynet pipes!')
|
||||
_RunIpfwCommand(['-f', 'flush'], 'Failed to flush Dummynet rules!')
|
||||
_RunIpfwCommand(['-f', 'pipe', 'flush'], 'Failed to flush Dummynet pipes!')
|
||||
|
||||
|
||||
def _RunIpfwCommand(command, fail_msg=None):
|
||||
"""Executes a command and prefixes the appropriate command for
|
||||
"""Executes a command and prefixes the appropriate command for
|
||||
Windows or Linux/UNIX.
|
||||
|
||||
Args:
|
||||
|
@ -184,19 +177,19 @@ def _RunIpfwCommand(command, fail_msg=None):
|
|||
NetworkEmulatorError: If command fails a message is set by the fail_msg
|
||||
parameter.
|
||||
"""
|
||||
if sys.platform == 'win32':
|
||||
ipfw_command = ['ipfw.exe']
|
||||
else:
|
||||
ipfw_command = ['sudo', '-n', 'ipfw']
|
||||
if sys.platform == 'win32':
|
||||
ipfw_command = ['ipfw.exe']
|
||||
else:
|
||||
ipfw_command = ['sudo', '-n', 'ipfw']
|
||||
|
||||
cmd_list = ipfw_command[:] + [str(x) for x in command]
|
||||
cmd_string = ' '.join(cmd_list)
|
||||
logging.debug('Running command: %s', cmd_string)
|
||||
process = subprocess.Popen(cmd_list,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
output, error = process.communicate()
|
||||
if process.returncode != 0:
|
||||
raise NetworkEmulatorError(fail_msg, cmd_string, process.returncode,
|
||||
output, error)
|
||||
return output.strip()
|
||||
cmd_list = ipfw_command[:] + [str(x) for x in command]
|
||||
cmd_string = ' '.join(cmd_list)
|
||||
logging.debug('Running command: %s', cmd_string)
|
||||
process = subprocess.Popen(cmd_list,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
output, error = process.communicate()
|
||||
if process.returncode != 0:
|
||||
raise NetworkEmulatorError(fail_msg, cmd_string, process.returncode, output,
|
||||
error)
|
||||
return output.strip()
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -8,12 +9,13 @@
|
|||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
import datetime
|
||||
import httplib2
|
||||
import json
|
||||
import subprocess
|
||||
import time
|
||||
import zlib
|
||||
|
||||
import httplib2
|
||||
|
||||
from tracing.value import histogram
|
||||
from tracing.value import histogram_set
|
||||
from tracing.value.diagnostics import generic_set
|
||||
|
@ -21,52 +23,51 @@ from tracing.value.diagnostics import reserved_infos
|
|||
|
||||
|
||||
def _GenerateOauthToken():
|
||||
args = ['luci-auth', 'token']
|
||||
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if p.wait() == 0:
|
||||
output = p.stdout.read()
|
||||
return output.strip()
|
||||
else:
|
||||
raise RuntimeError(
|
||||
'Error generating authentication token.\nStdout: %s\nStderr:%s' %
|
||||
(p.stdout.read(), p.stderr.read()))
|
||||
args = ['luci-auth', 'token']
|
||||
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if p.wait() == 0:
|
||||
output = p.stdout.read()
|
||||
return output.strip()
|
||||
raise RuntimeError(
|
||||
'Error generating authentication token.\nStdout: %s\nStderr:%s' %
|
||||
(p.stdout.read(), p.stderr.read()))
|
||||
|
||||
|
||||
def _CreateHeaders(oauth_token):
|
||||
return {'Authorization': 'Bearer %s' % oauth_token}
|
||||
return {'Authorization': 'Bearer %s' % oauth_token}
|
||||
|
||||
|
||||
def _SendHistogramSet(url, histograms):
|
||||
"""Make a HTTP POST with the given JSON to the Performance Dashboard.
|
||||
"""Make a HTTP POST with the given JSON to the Performance Dashboard.
|
||||
|
||||
Args:
|
||||
url: URL of Performance Dashboard instance, e.g.
|
||||
"https://chromeperf.appspot.com".
|
||||
histograms: a histogram set object that contains the data to be sent.
|
||||
"""
|
||||
headers = _CreateHeaders(_GenerateOauthToken())
|
||||
headers = _CreateHeaders(_GenerateOauthToken())
|
||||
|
||||
serialized = json.dumps(_ApplyHacks(histograms.AsDicts()), indent=4)
|
||||
serialized = json.dumps(_ApplyHacks(histograms.AsDicts()), indent=4)
|
||||
|
||||
if url.startswith('http://localhost'):
|
||||
# The catapult server turns off compression in developer mode.
|
||||
data = serialized
|
||||
else:
|
||||
data = zlib.compress(serialized)
|
||||
if url.startswith('http://localhost'):
|
||||
# The catapult server turns off compression in developer mode.
|
||||
data = serialized
|
||||
else:
|
||||
data = zlib.compress(serialized)
|
||||
|
||||
print 'Sending %d bytes to %s.' % (len(data), url + '/add_histograms')
|
||||
print('Sending %d bytes to %s.' % (len(data), url + '/add_histograms'))
|
||||
|
||||
http = httplib2.Http()
|
||||
response, content = http.request(url + '/add_histograms',
|
||||
method='POST',
|
||||
body=data,
|
||||
headers=headers)
|
||||
return response, content
|
||||
http = httplib2.Http()
|
||||
response, content = http.request(url + '/add_histograms',
|
||||
method='POST',
|
||||
body=data,
|
||||
headers=headers)
|
||||
return response, content
|
||||
|
||||
|
||||
def _WaitForUploadConfirmation(url, upload_token, wait_timeout,
|
||||
wait_polling_period):
|
||||
"""Make a HTTP GET requests to the Performance Dashboard untill upload
|
||||
"""Make a HTTP GET requests to the Performance Dashboard untill upload
|
||||
status is known or the time is out.
|
||||
|
||||
Args:
|
||||
|
@ -79,42 +80,43 @@ def _WaitForUploadConfirmation(url, upload_token, wait_timeout,
|
|||
wait_polling_period: (datetime.timedelta) Performance Dashboard will be
|
||||
polled every wait_polling_period amount of time.
|
||||
"""
|
||||
assert wait_polling_period <= wait_timeout
|
||||
assert wait_polling_period <= wait_timeout
|
||||
|
||||
headers = _CreateHeaders(_GenerateOauthToken())
|
||||
http = httplib2.Http()
|
||||
headers = _CreateHeaders(_GenerateOauthToken())
|
||||
http = httplib2.Http()
|
||||
|
||||
oauth_refreshed = False
|
||||
response = None
|
||||
resp_json = None
|
||||
oauth_refreshed = False
|
||||
response = None
|
||||
resp_json = None
|
||||
current_time = datetime.datetime.now()
|
||||
end_time = current_time + wait_timeout
|
||||
next_poll_time = current_time + wait_polling_period
|
||||
while datetime.datetime.now() < end_time:
|
||||
current_time = datetime.datetime.now()
|
||||
end_time = current_time + wait_timeout
|
||||
next_poll_time = current_time + wait_polling_period
|
||||
while datetime.datetime.now() < end_time:
|
||||
current_time = datetime.datetime.now()
|
||||
if next_poll_time > current_time:
|
||||
time.sleep((next_poll_time - current_time).total_seconds())
|
||||
next_poll_time = datetime.datetime.now() + wait_polling_period
|
||||
if next_poll_time > current_time:
|
||||
time.sleep((next_poll_time - current_time).total_seconds())
|
||||
next_poll_time = datetime.datetime.now() + wait_polling_period
|
||||
|
||||
response, content = http.request(url + '/uploads/' + upload_token,
|
||||
method='GET', headers=headers)
|
||||
response, content = http.request(url + '/uploads/' + upload_token,
|
||||
method='GET',
|
||||
headers=headers)
|
||||
|
||||
print 'Upload state polled. Response: %r.' % content
|
||||
print('Upload state polled. Response: %r.' % content)
|
||||
|
||||
if not oauth_refreshed and response.status == 403:
|
||||
print 'Oauth token refreshed. Continue polling.'
|
||||
headers = _CreateHeaders(_GenerateOauthToken())
|
||||
oauth_refreshed = True
|
||||
continue
|
||||
if not oauth_refreshed and response.status == 403:
|
||||
print('Oauth token refreshed. Continue polling.')
|
||||
headers = _CreateHeaders(_GenerateOauthToken())
|
||||
oauth_refreshed = True
|
||||
continue
|
||||
|
||||
if response.status != 200:
|
||||
break
|
||||
if response.status != 200:
|
||||
break
|
||||
|
||||
resp_json = json.loads(content)
|
||||
if resp_json['state'] == 'COMPLETED' or resp_json['state'] == 'FAILED':
|
||||
break
|
||||
resp_json = json.loads(content)
|
||||
if resp_json['state'] == 'COMPLETED' or resp_json['state'] == 'FAILED':
|
||||
break
|
||||
|
||||
return response, resp_json
|
||||
return response, resp_json
|
||||
|
||||
|
||||
# Because of an issues on the Dashboard side few measurements over a large set
|
||||
|
@ -124,7 +126,7 @@ def _WaitForUploadConfirmation(url, upload_token, wait_timeout,
|
|||
def _CheckFullUploadInfo(url, upload_token,
|
||||
min_measurements_amount=50,
|
||||
max_failed_measurements_percent=0.03):
|
||||
"""Make a HTTP GET requests to the Performance Dashboard to get full info
|
||||
"""Make a HTTP GET requests to the Performance Dashboard to get full info
|
||||
about upload (including measurements). Checks if upload is correct despite
|
||||
not having status "COMPLETED".
|
||||
|
||||
|
@ -138,125 +140,123 @@ def _CheckFullUploadInfo(url, upload_token,
|
|||
max_failed_measurements_percent: maximal percent of failured measurements
|
||||
to tolerate.
|
||||
"""
|
||||
headers = _CreateHeaders(_GenerateOauthToken())
|
||||
http = httplib2.Http()
|
||||
headers = _CreateHeaders(_GenerateOauthToken())
|
||||
http = httplib2.Http()
|
||||
|
||||
response, content = http.request(url + '/uploads/' + upload_token +
|
||||
'?additional_info=measurements',
|
||||
method='GET', headers=headers)
|
||||
|
||||
if response.status != 200:
|
||||
print 'Failed to reach the dashboard to get full upload info.'
|
||||
return False
|
||||
|
||||
resp_json = json.loads(content)
|
||||
print 'Full upload info: %s.' % json.dumps(resp_json, indent=4)
|
||||
|
||||
if 'measurements' in resp_json:
|
||||
measurements_cnt = len(resp_json['measurements'])
|
||||
not_completed_state_cnt = len([
|
||||
m for m in resp_json['measurements']
|
||||
if m['state'] != 'COMPLETED'
|
||||
])
|
||||
|
||||
if (measurements_cnt >= min_measurements_amount and
|
||||
(not_completed_state_cnt / (measurements_cnt * 1.0) <=
|
||||
max_failed_measurements_percent)):
|
||||
print('Not all measurements were confirmed to upload. '
|
||||
'Measurements count: %d, failed to upload or timed out: %d' %
|
||||
(measurements_cnt, not_completed_state_cnt))
|
||||
return True
|
||||
response, content = http.request(url + '/uploads/' + upload_token +
|
||||
'?additional_info=measurements',
|
||||
method='GET',
|
||||
headers=headers)
|
||||
|
||||
if response.status != 200:
|
||||
print('Failed to reach the dashboard to get full upload info.')
|
||||
return False
|
||||
|
||||
resp_json = json.loads(content)
|
||||
print('Full upload info: %s.' % json.dumps(resp_json, indent=4))
|
||||
|
||||
if 'measurements' in resp_json:
|
||||
measurements_cnt = len(resp_json['measurements'])
|
||||
not_completed_state_cnt = len(
|
||||
[m for m in resp_json['measurements'] if m['state'] != 'COMPLETED'])
|
||||
|
||||
if (measurements_cnt >= min_measurements_amount
|
||||
and (not_completed_state_cnt /
|
||||
(measurements_cnt * 1.0) <= max_failed_measurements_percent)):
|
||||
print(('Not all measurements were confirmed to upload. '
|
||||
'Measurements count: %d, failed to upload or timed out: %d' %
|
||||
(measurements_cnt, not_completed_state_cnt)))
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# TODO(https://crbug.com/1029452): HACKHACK
|
||||
# Remove once we have doubles in the proto and handle -infinity correctly.
|
||||
def _ApplyHacks(dicts):
|
||||
def _NoInf(value):
|
||||
if value == float('inf'):
|
||||
return histogram.JS_MAX_VALUE
|
||||
if value == float('-inf'):
|
||||
return -histogram.JS_MAX_VALUE
|
||||
return value
|
||||
def _NoInf(value):
|
||||
if value == float('inf'):
|
||||
return histogram.JS_MAX_VALUE
|
||||
if value == float('-inf'):
|
||||
return -histogram.JS_MAX_VALUE
|
||||
return value
|
||||
|
||||
for d in dicts:
|
||||
if 'running' in d:
|
||||
d['running'] = [_NoInf(value) for value in d['running']]
|
||||
if 'sampleValues' in d:
|
||||
d['sampleValues'] = [_NoInf(value) for value in d['sampleValues']]
|
||||
for d in dicts:
|
||||
if 'running' in d:
|
||||
d['running'] = [_NoInf(value) for value in d['running']]
|
||||
if 'sampleValues' in d:
|
||||
d['sampleValues'] = [_NoInf(value) for value in d['sampleValues']]
|
||||
|
||||
return dicts
|
||||
return dicts
|
||||
|
||||
|
||||
def _LoadHistogramSetFromProto(options):
|
||||
hs = histogram_set.HistogramSet()
|
||||
with options.input_results_file as f:
|
||||
hs.ImportProto(f.read())
|
||||
hs = histogram_set.HistogramSet()
|
||||
with options.input_results_file as f:
|
||||
hs.ImportProto(f.read())
|
||||
|
||||
return hs
|
||||
return hs
|
||||
|
||||
|
||||
def _AddBuildInfo(histograms, options):
|
||||
common_diagnostics = {
|
||||
reserved_infos.MASTERS: options.perf_dashboard_machine_group,
|
||||
reserved_infos.BOTS: options.bot,
|
||||
reserved_infos.POINT_ID: options.commit_position,
|
||||
reserved_infos.BENCHMARKS: options.test_suite,
|
||||
reserved_infos.WEBRTC_REVISIONS: str(options.webrtc_git_hash),
|
||||
reserved_infos.BUILD_URLS: options.build_page_url,
|
||||
}
|
||||
common_diagnostics = {
|
||||
reserved_infos.MASTERS: options.perf_dashboard_machine_group,
|
||||
reserved_infos.BOTS: options.bot,
|
||||
reserved_infos.POINT_ID: options.commit_position,
|
||||
reserved_infos.BENCHMARKS: options.test_suite,
|
||||
reserved_infos.WEBRTC_REVISIONS: str(options.webrtc_git_hash),
|
||||
reserved_infos.BUILD_URLS: options.build_page_url,
|
||||
}
|
||||
|
||||
for k, v in common_diagnostics.items():
|
||||
histograms.AddSharedDiagnosticToAllHistograms(
|
||||
k.name, generic_set.GenericSet([v]))
|
||||
for k, v in list(common_diagnostics.items()):
|
||||
histograms.AddSharedDiagnosticToAllHistograms(k.name,
|
||||
generic_set.GenericSet([v]))
|
||||
|
||||
|
||||
def _DumpOutput(histograms, output_file):
|
||||
with output_file:
|
||||
json.dump(_ApplyHacks(histograms.AsDicts()), output_file, indent=4)
|
||||
with output_file:
|
||||
json.dump(_ApplyHacks(histograms.AsDicts()), output_file, indent=4)
|
||||
|
||||
|
||||
def UploadToDashboard(options):
|
||||
histograms = _LoadHistogramSetFromProto(options)
|
||||
_AddBuildInfo(histograms, options)
|
||||
histograms = _LoadHistogramSetFromProto(options)
|
||||
_AddBuildInfo(histograms, options)
|
||||
|
||||
if options.output_json_file:
|
||||
_DumpOutput(histograms, options.output_json_file)
|
||||
if options.output_json_file:
|
||||
_DumpOutput(histograms, options.output_json_file)
|
||||
|
||||
response, content = _SendHistogramSet(options.dashboard_url, histograms)
|
||||
response, content = _SendHistogramSet(options.dashboard_url, histograms)
|
||||
|
||||
if response.status != 200:
|
||||
print('Upload failed with %d: %s\n\n%s' % (response.status,
|
||||
response.reason, content))
|
||||
return 1
|
||||
|
||||
upload_token = json.loads(content).get('token')
|
||||
if not options.wait_for_upload or not upload_token:
|
||||
print('Received 200 from dashboard. ',
|
||||
'Not waiting for the upload status confirmation.')
|
||||
return 0
|
||||
|
||||
response, resp_json = _WaitForUploadConfirmation(
|
||||
options.dashboard_url,
|
||||
upload_token,
|
||||
datetime.timedelta(seconds=options.wait_timeout_sec),
|
||||
datetime.timedelta(seconds=options.wait_polling_period_sec))
|
||||
|
||||
if ((resp_json and resp_json['state'] == 'COMPLETED') or
|
||||
_CheckFullUploadInfo(options.dashboard_url, upload_token)):
|
||||
print 'Upload completed.'
|
||||
return 0
|
||||
|
||||
if response.status != 200:
|
||||
print('Upload status poll failed with %d: %s' % (response.status,
|
||||
response.reason))
|
||||
return 1
|
||||
|
||||
if resp_json['state'] == 'FAILED':
|
||||
print 'Upload failed.'
|
||||
return 1
|
||||
|
||||
print('Upload wasn\'t completed in a given time: %d seconds.' %
|
||||
options.wait_timeout_sec)
|
||||
if response.status != 200:
|
||||
print(('Upload failed with %d: %s\n\n%s' %
|
||||
(response.status, response.reason, content)))
|
||||
return 1
|
||||
|
||||
upload_token = json.loads(content).get('token')
|
||||
if not options.wait_for_upload or not upload_token:
|
||||
print(('Received 200 from dashboard. ',
|
||||
'Not waiting for the upload status confirmation.'))
|
||||
return 0
|
||||
|
||||
response, resp_json = _WaitForUploadConfirmation(
|
||||
options.dashboard_url, upload_token,
|
||||
datetime.timedelta(seconds=options.wait_timeout_sec),
|
||||
datetime.timedelta(seconds=options.wait_polling_period_sec))
|
||||
|
||||
if ((resp_json and resp_json['state'] == 'COMPLETED')
|
||||
or _CheckFullUploadInfo(options.dashboard_url, upload_token)):
|
||||
print('Upload completed.')
|
||||
return 0
|
||||
|
||||
if response.status != 200:
|
||||
print(('Upload status poll failed with %d: %s' %
|
||||
(response.status, response.reason)))
|
||||
return 1
|
||||
|
||||
if resp_json['state'] == 'FAILED':
|
||||
print('Upload failed.')
|
||||
return 1
|
||||
|
||||
print(('Upload wasn\'t completed in a given time: %d seconds.' %
|
||||
options.wait_timeout_sec))
|
||||
return 1
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env vpython
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -28,95 +29,110 @@ import google.protobuf # pylint: disable=unused-import
|
|||
|
||||
|
||||
def _CreateParser():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--perf-dashboard-machine-group', required=True,
|
||||
help='The "master" the bots are grouped under. This '
|
||||
'string is the group in the the perf dashboard path '
|
||||
'group/bot/perf_id/metric/subtest.')
|
||||
parser.add_argument('--bot', required=True,
|
||||
help='The bot running the test (e.g. '
|
||||
'webrtc-win-large-tests).')
|
||||
parser.add_argument('--test-suite', required=True,
|
||||
help='The key for the test in the dashboard (i.e. what '
|
||||
'you select in the top-level test suite selector in '
|
||||
'the dashboard')
|
||||
parser.add_argument('--webrtc-git-hash', required=True,
|
||||
help='webrtc.googlesource.com commit hash.')
|
||||
parser.add_argument('--commit-position', type=int, required=True,
|
||||
help='Commit pos corresponding to the git hash.')
|
||||
parser.add_argument('--build-page-url', required=True,
|
||||
help='URL to the build page for this build.')
|
||||
parser.add_argument('--dashboard-url', required=True,
|
||||
help='Which dashboard to use.')
|
||||
parser.add_argument('--input-results-file', type=argparse.FileType(),
|
||||
required=True,
|
||||
help='A HistogramSet proto file with output from '
|
||||
'WebRTC tests.')
|
||||
parser.add_argument('--output-json-file', type=argparse.FileType('w'),
|
||||
help='Where to write the output (for debugging).')
|
||||
parser.add_argument('--outdir', required=True,
|
||||
help='Path to the local out/ dir (usually out/Default)')
|
||||
parser.add_argument('--wait-for-upload', action='store_true',
|
||||
help='If specified, script will wait untill Chrome '
|
||||
'perf dashboard confirms that the data was succesfully '
|
||||
'proccessed and uploaded')
|
||||
parser.add_argument('--wait-timeout-sec', type=int, default=1200,
|
||||
help='Used only if wait-for-upload is True. Maximum '
|
||||
'amount of time in seconds that the script will wait '
|
||||
'for the confirmation.')
|
||||
parser.add_argument('--wait-polling-period-sec', type=int, default=120,
|
||||
help='Used only if wait-for-upload is True. Status '
|
||||
'will be requested from the Dashboard every '
|
||||
'wait-polling-period-sec seconds.')
|
||||
return parser
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--perf-dashboard-machine-group',
|
||||
required=True,
|
||||
help='The "master" the bots are grouped under. This '
|
||||
'string is the group in the the perf dashboard path '
|
||||
'group/bot/perf_id/metric/subtest.')
|
||||
parser.add_argument('--bot',
|
||||
required=True,
|
||||
help='The bot running the test (e.g. '
|
||||
'webrtc-win-large-tests).')
|
||||
parser.add_argument('--test-suite',
|
||||
required=True,
|
||||
help='The key for the test in the dashboard (i.e. what '
|
||||
'you select in the top-level test suite selector in '
|
||||
'the dashboard')
|
||||
parser.add_argument('--webrtc-git-hash',
|
||||
required=True,
|
||||
help='webrtc.googlesource.com commit hash.')
|
||||
parser.add_argument('--commit-position',
|
||||
type=int,
|
||||
required=True,
|
||||
help='Commit pos corresponding to the git hash.')
|
||||
parser.add_argument('--build-page-url',
|
||||
required=True,
|
||||
help='URL to the build page for this build.')
|
||||
parser.add_argument('--dashboard-url',
|
||||
required=True,
|
||||
help='Which dashboard to use.')
|
||||
parser.add_argument('--input-results-file',
|
||||
type=argparse.FileType(),
|
||||
required=True,
|
||||
help='A HistogramSet proto file with output from '
|
||||
'WebRTC tests.')
|
||||
parser.add_argument('--output-json-file',
|
||||
type=argparse.FileType('w'),
|
||||
help='Where to write the output (for debugging).')
|
||||
parser.add_argument('--outdir',
|
||||
required=True,
|
||||
help='Path to the local out/ dir (usually out/Default)')
|
||||
parser.add_argument('--wait-for-upload',
|
||||
action='store_true',
|
||||
help='If specified, script will wait untill Chrome '
|
||||
'perf dashboard confirms that the data was succesfully '
|
||||
'proccessed and uploaded')
|
||||
parser.add_argument('--wait-timeout-sec',
|
||||
type=int,
|
||||
default=1200,
|
||||
help='Used only if wait-for-upload is True. Maximum '
|
||||
'amount of time in seconds that the script will wait '
|
||||
'for the confirmation.')
|
||||
parser.add_argument('--wait-polling-period-sec',
|
||||
type=int,
|
||||
default=120,
|
||||
help='Used only if wait-for-upload is True. Status '
|
||||
'will be requested from the Dashboard every '
|
||||
'wait-polling-period-sec seconds.')
|
||||
return parser
|
||||
|
||||
|
||||
def _ConfigurePythonPath(options):
|
||||
# We just yank the python scripts we require into the PYTHONPATH. You could
|
||||
# also imagine a solution where we use for instance
|
||||
# protobuf:py_proto_runtime to copy catapult and protobuf code to out/.
|
||||
# This is the convention in Chromium and WebRTC python scripts. We do need
|
||||
# to build histogram_pb2 however, so that's why we add out/ to sys.path
|
||||
# below.
|
||||
#
|
||||
# It would be better if there was an equivalent to py_binary in GN, but
|
||||
# there's not.
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
checkout_root = os.path.abspath(
|
||||
os.path.join(script_dir, os.pardir, os.pardir))
|
||||
# We just yank the python scripts we require into the PYTHONPATH. You could
|
||||
# also imagine a solution where we use for instance
|
||||
# protobuf:py_proto_runtime to copy catapult and protobuf code to out/.
|
||||
# This is the convention in Chromium and WebRTC python scripts. We do need
|
||||
# to build histogram_pb2 however, so that's why we add out/ to sys.path
|
||||
# below.
|
||||
#
|
||||
# It would be better if there was an equivalent to py_binary in GN, but
|
||||
# there's not.
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
checkout_root = os.path.abspath(os.path.join(script_dir, os.pardir,
|
||||
os.pardir))
|
||||
|
||||
sys.path.insert(
|
||||
0, os.path.join(checkout_root, 'third_party', 'catapult', 'tracing'))
|
||||
sys.path.insert(
|
||||
0, os.path.join(checkout_root, 'third_party', 'protobuf', 'python'))
|
||||
sys.path.insert(
|
||||
0, os.path.join(checkout_root, 'third_party', 'catapult', 'tracing'))
|
||||
sys.path.insert(
|
||||
0, os.path.join(checkout_root, 'third_party', 'protobuf', 'python'))
|
||||
|
||||
# The webrtc_dashboard_upload gn rule will build the protobuf stub for
|
||||
# python, so put it in the path for this script before we attempt to import
|
||||
# it.
|
||||
histogram_proto_path = os.path.join(options.outdir, 'pyproto', 'tracing',
|
||||
'tracing', 'proto')
|
||||
sys.path.insert(0, histogram_proto_path)
|
||||
# The webrtc_dashboard_upload gn rule will build the protobuf stub for
|
||||
# python, so put it in the path for this script before we attempt to import
|
||||
# it.
|
||||
histogram_proto_path = os.path.join(options.outdir, 'pyproto', 'tracing',
|
||||
'tracing', 'proto')
|
||||
sys.path.insert(0, histogram_proto_path)
|
||||
|
||||
# Fail early in case the proto hasn't been built.
|
||||
from tracing.proto import histogram_proto
|
||||
if not histogram_proto.HAS_PROTO:
|
||||
raise ImportError(
|
||||
'Could not find histogram_pb2. You need to build the '
|
||||
'webrtc_dashboard_upload target before invoking this '
|
||||
'script. Expected to find '
|
||||
'histogram_pb2.py in %s.' % histogram_proto_path)
|
||||
# Fail early in case the proto hasn't been built.
|
||||
from tracing.proto import histogram_proto
|
||||
if not histogram_proto.HAS_PROTO:
|
||||
raise ImportError('Could not find histogram_pb2. You need to build the '
|
||||
'webrtc_dashboard_upload target before invoking this '
|
||||
'script. Expected to find '
|
||||
'histogram_pb2.py in %s.' % histogram_proto_path)
|
||||
|
||||
|
||||
def main(args):
|
||||
parser = _CreateParser()
|
||||
options = parser.parse_args(args)
|
||||
parser = _CreateParser()
|
||||
options = parser.parse_args(args)
|
||||
|
||||
_ConfigurePythonPath(options)
|
||||
_ConfigurePythonPath(options)
|
||||
|
||||
import catapult_uploader
|
||||
import catapult_uploader
|
||||
|
||||
return catapult_uploader.UploadToDashboard(options)
|
||||
return catapult_uploader.UploadToDashboard(options)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -18,11 +20,11 @@ import tempfile
|
|||
|
||||
|
||||
def FindSrcDirPath():
|
||||
"""Returns the abs path to the src/ dir of the project."""
|
||||
src_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
while os.path.basename(src_dir) != 'src':
|
||||
src_dir = os.path.normpath(os.path.join(src_dir, os.pardir))
|
||||
return src_dir
|
||||
"""Returns the abs path to the src/ dir of the project."""
|
||||
src_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
while os.path.basename(src_dir) != 'src':
|
||||
src_dir = os.path.normpath(os.path.join(src_dir, os.pardir))
|
||||
return src_dir
|
||||
|
||||
|
||||
SRC_DIR = FindSrcDirPath()
|
||||
|
@ -31,16 +33,16 @@ import find_depot_tools
|
|||
|
||||
|
||||
def RunGnCommand(args, root_dir=None):
|
||||
"""Runs `gn` with provided args and return error if any."""
|
||||
try:
|
||||
command = [
|
||||
sys.executable,
|
||||
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py')
|
||||
] + args
|
||||
subprocess.check_output(command, cwd=root_dir)
|
||||
except subprocess.CalledProcessError as err:
|
||||
return err.output
|
||||
return None
|
||||
"""Runs `gn` with provided args and return error if any."""
|
||||
try:
|
||||
command = [
|
||||
sys.executable,
|
||||
os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'gn.py')
|
||||
] + args
|
||||
subprocess.check_output(command, cwd=root_dir)
|
||||
except subprocess.CalledProcessError as err:
|
||||
return err.output
|
||||
return None
|
||||
|
||||
|
||||
# GN_ERROR_RE matches the summary of an error output by `gn check`.
|
||||
|
@ -50,49 +52,49 @@ GN_ERROR_RE = re.compile(r'^ERROR .+(?:\n.*[^_\n].*$)+', re.MULTILINE)
|
|||
|
||||
|
||||
def RunGnCheck(root_dir=None):
|
||||
"""Runs `gn gen --check` with default args to detect mismatches between
|
||||
"""Runs `gn gen --check` with default args to detect mismatches between
|
||||
#includes and dependencies in the BUILD.gn files, as well as general build
|
||||
errors.
|
||||
|
||||
Returns a list of error summary strings.
|
||||
"""
|
||||
out_dir = tempfile.mkdtemp('gn')
|
||||
try:
|
||||
error = RunGnCommand(['gen', '--check', out_dir], root_dir)
|
||||
finally:
|
||||
shutil.rmtree(out_dir, ignore_errors=True)
|
||||
return GN_ERROR_RE.findall(error) if error else []
|
||||
out_dir = tempfile.mkdtemp('gn')
|
||||
try:
|
||||
error = RunGnCommand(['gen', '--check', out_dir], root_dir)
|
||||
finally:
|
||||
shutil.rmtree(out_dir, ignore_errors=True)
|
||||
return GN_ERROR_RE.findall(error.decode('utf-8')) if error else []
|
||||
|
||||
|
||||
def RunNinjaCommand(args, root_dir=None):
|
||||
"""Runs ninja quietly. Any failure (e.g. clang not found) is
|
||||
"""Runs ninja quietly. Any failure (e.g. clang not found) is
|
||||
silently discarded, since this is unlikely an error in submitted CL."""
|
||||
command = [os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja')] + args
|
||||
p = subprocess.Popen(command,
|
||||
cwd=root_dir,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
out, _ = p.communicate()
|
||||
return out
|
||||
command = [os.path.join(find_depot_tools.DEPOT_TOOLS_PATH, 'ninja')] + args
|
||||
p = subprocess.Popen(command,
|
||||
cwd=root_dir,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
out, _ = p.communicate()
|
||||
return out
|
||||
|
||||
|
||||
def GetClangTidyPath():
|
||||
"""POC/WIP! Use the one we have, even it doesn't match clang's version."""
|
||||
tidy = ('third_party/android_ndk/toolchains/'
|
||||
'llvm/prebuilt/linux-x86_64/bin/clang-tidy')
|
||||
return os.path.join(SRC_DIR, tidy)
|
||||
"""POC/WIP! Use the one we have, even it doesn't match clang's version."""
|
||||
tidy = ('third_party/android_ndk/toolchains/'
|
||||
'llvm/prebuilt/linux-x86_64/bin/clang-tidy')
|
||||
return os.path.join(SRC_DIR, tidy)
|
||||
|
||||
|
||||
def GetCompilationDb(root_dir=None):
|
||||
"""Run ninja compdb tool to get proper flags, defines and include paths."""
|
||||
# The compdb tool expect a rule.
|
||||
commands = json.loads(RunNinjaCommand(['-t', 'compdb', 'cxx'], root_dir))
|
||||
# Turns 'file' field into a key.
|
||||
return {v['file']: v for v in commands}
|
||||
"""Run ninja compdb tool to get proper flags, defines and include paths."""
|
||||
# The compdb tool expect a rule.
|
||||
commands = json.loads(RunNinjaCommand(['-t', 'compdb', 'cxx'], root_dir))
|
||||
# Turns 'file' field into a key.
|
||||
return {v['file']: v for v in commands}
|
||||
|
||||
|
||||
def GetCompilationCommand(filepath, gn_args, work_dir):
|
||||
"""Get the whole command used to compile one cc file.
|
||||
"""Get the whole command used to compile one cc file.
|
||||
Typically, clang++ with flags, defines and include paths.
|
||||
|
||||
Args:
|
||||
|
@ -103,30 +105,30 @@ def GetCompilationCommand(filepath, gn_args, work_dir):
|
|||
Returns:
|
||||
Command as a list, ready to be consumed by subprocess.Popen.
|
||||
"""
|
||||
gn_errors = RunGnCommand(['gen'] + gn_args + [work_dir])
|
||||
if gn_errors:
|
||||
raise (RuntimeError('FYI, cannot complete check due to gn error:\n%s\n'
|
||||
'Please open a bug.' % gn_errors))
|
||||
gn_errors = RunGnCommand(['gen'] + gn_args + [work_dir])
|
||||
if gn_errors:
|
||||
raise RuntimeError('FYI, cannot complete check due to gn error:\n%s\n'
|
||||
'Please open a bug.' % gn_errors)
|
||||
|
||||
# Needed for single file compilation.
|
||||
commands = GetCompilationDb(work_dir)
|
||||
# Needed for single file compilation.
|
||||
commands = GetCompilationDb(work_dir)
|
||||
|
||||
# Path as referenced by ninja.
|
||||
rel_path = os.path.relpath(os.path.abspath(filepath), work_dir)
|
||||
# Path as referenced by ninja.
|
||||
rel_path = os.path.relpath(os.path.abspath(filepath), work_dir)
|
||||
|
||||
# Gather defines, include path and flags (such as -std=c++11).
|
||||
try:
|
||||
compilation_entry = commands[rel_path]
|
||||
except KeyError:
|
||||
raise ValueError('%s: Not found in compilation database.\n'
|
||||
'Please check the path.' % filepath)
|
||||
command = compilation_entry['command'].split()
|
||||
# Gather defines, include path and flags (such as -std=c++11).
|
||||
try:
|
||||
compilation_entry = commands[rel_path]
|
||||
except KeyError as not_found:
|
||||
raise ValueError('%s: Not found in compilation database.\n'
|
||||
'Please check the path.' % filepath) from not_found
|
||||
command = compilation_entry['command'].split()
|
||||
|
||||
# Remove troublesome flags. May trigger an error otherwise.
|
||||
if '-MMD' in command:
|
||||
command.remove('-MMD')
|
||||
if '-MF' in command:
|
||||
index = command.index('-MF')
|
||||
del command[index:index + 2] # Remove filename as well.
|
||||
# Remove troublesome flags. May trigger an error otherwise.
|
||||
if '-MMD' in command:
|
||||
command.remove('-MMD')
|
||||
if '-MF' in command:
|
||||
index = command.index('-MF')
|
||||
del command[index:index + 2] # Remove filename as well.
|
||||
|
||||
return command
|
||||
return command
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
|
@ -12,7 +12,6 @@ import re
|
|||
import os
|
||||
import unittest
|
||||
|
||||
#pylint: disable=relative-import
|
||||
import build_helpers
|
||||
|
||||
TESTDATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
|
||||
|
@ -26,7 +25,7 @@ class GnCheckTest(unittest.TestCase):
|
|||
expected_error = re.compile('ERROR Dependency cycle')
|
||||
gn_output = build_helpers.RunGnCheck(test_dir)
|
||||
self.assertEqual(1, len(gn_output))
|
||||
self.assertRegexpMatches(gn_output[0], expected_error)
|
||||
self.assertRegex(gn_output[0], expected_error)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -9,7 +10,6 @@
|
|||
|
||||
import os
|
||||
import re
|
||||
import string
|
||||
|
||||
# TARGET_RE matches a GN target, and extracts the target name and the contents.
|
||||
TARGET_RE = re.compile(
|
||||
|
@ -26,27 +26,27 @@ SOURCE_FILE_RE = re.compile(r'.*\"(?P<source_file>.*)\"')
|
|||
|
||||
|
||||
class NoBuildGnFoundError(Exception):
|
||||
pass
|
||||
pass
|
||||
|
||||
|
||||
class WrongFileTypeError(Exception):
|
||||
pass
|
||||
pass
|
||||
|
||||
|
||||
def _ReadFile(file_path):
|
||||
"""Returns the content of file_path in a string.
|
||||
"""Returns the content of file_path in a string.
|
||||
|
||||
Args:
|
||||
file_path: the path of the file to read.
|
||||
Returns:
|
||||
A string with the content of the file.
|
||||
"""
|
||||
with open(file_path) as f:
|
||||
return f.read()
|
||||
with open(file_path) as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def GetBuildGnPathFromFilePath(file_path, file_exists_check, root_dir_path):
|
||||
"""Returns the BUILD.gn file responsible for file_path.
|
||||
"""Returns the BUILD.gn file responsible for file_path.
|
||||
|
||||
Args:
|
||||
file_path: the absolute path to the .h file to check.
|
||||
|
@ -58,23 +58,21 @@ def GetBuildGnPathFromFilePath(file_path, file_exists_check, root_dir_path):
|
|||
A string with the absolute path to the BUILD.gn file responsible to include
|
||||
file_path in a target.
|
||||
"""
|
||||
if not file_path.endswith('.h'):
|
||||
raise WrongFileTypeError(
|
||||
'File {} is not an header file (.h)'.format(file_path))
|
||||
candidate_dir = os.path.dirname(file_path)
|
||||
while candidate_dir.startswith(root_dir_path):
|
||||
candidate_build_gn_path = os.path.join(candidate_dir, 'BUILD.gn')
|
||||
if file_exists_check(candidate_build_gn_path):
|
||||
return candidate_build_gn_path
|
||||
else:
|
||||
candidate_dir = os.path.abspath(
|
||||
os.path.join(candidate_dir, os.pardir))
|
||||
raise NoBuildGnFoundError(
|
||||
'No BUILD.gn file found for file: `{}`'.format(file_path))
|
||||
if not file_path.endswith('.h'):
|
||||
raise WrongFileTypeError(
|
||||
'File {} is not an header file (.h)'.format(file_path))
|
||||
candidate_dir = os.path.dirname(file_path)
|
||||
while candidate_dir.startswith(root_dir_path):
|
||||
candidate_build_gn_path = os.path.join(candidate_dir, 'BUILD.gn')
|
||||
if file_exists_check(candidate_build_gn_path):
|
||||
return candidate_build_gn_path
|
||||
candidate_dir = os.path.abspath(os.path.join(candidate_dir, os.pardir))
|
||||
raise NoBuildGnFoundError(
|
||||
'No BUILD.gn file found for file: `{}`'.format(file_path))
|
||||
|
||||
|
||||
def IsHeaderInBuildGn(header_path, build_gn_path):
|
||||
"""Returns True if the header is listed in the BUILD.gn file.
|
||||
"""Returns True if the header is listed in the BUILD.gn file.
|
||||
|
||||
Args:
|
||||
header_path: the absolute path to the header to check.
|
||||
|
@ -85,15 +83,15 @@ def IsHeaderInBuildGn(header_path, build_gn_path):
|
|||
at least one GN target in the BUILD.gn file specified by
|
||||
the argument build_gn_path.
|
||||
"""
|
||||
target_abs_path = os.path.dirname(build_gn_path)
|
||||
build_gn_content = _ReadFile(build_gn_path)
|
||||
headers_in_build_gn = GetHeadersInBuildGnFileSources(
|
||||
build_gn_content, target_abs_path)
|
||||
return header_path in headers_in_build_gn
|
||||
target_abs_path = os.path.dirname(build_gn_path)
|
||||
build_gn_content = _ReadFile(build_gn_path)
|
||||
headers_in_build_gn = GetHeadersInBuildGnFileSources(build_gn_content,
|
||||
target_abs_path)
|
||||
return header_path in headers_in_build_gn
|
||||
|
||||
|
||||
def GetHeadersInBuildGnFileSources(file_content, target_abs_path):
|
||||
"""Returns a set with all the .h files in the file_content.
|
||||
"""Returns a set with all the .h files in the file_content.
|
||||
|
||||
Args:
|
||||
file_content: a string with the content of the BUILD.gn file.
|
||||
|
@ -104,15 +102,15 @@ def GetHeadersInBuildGnFileSources(file_content, target_abs_path):
|
|||
A set with all the headers (.h file) in the file_content.
|
||||
The set contains absolute paths.
|
||||
"""
|
||||
headers_in_sources = set([])
|
||||
for target_match in TARGET_RE.finditer(file_content):
|
||||
target_contents = target_match.group('target_contents')
|
||||
for sources_match in SOURCES_RE.finditer(target_contents):
|
||||
sources = sources_match.group('sources')
|
||||
for source_file_match in SOURCE_FILE_RE.finditer(sources):
|
||||
source_file = source_file_match.group('source_file')
|
||||
if source_file.endswith('.h'):
|
||||
source_file_tokens = string.split(source_file, '/')
|
||||
headers_in_sources.add(
|
||||
os.path.join(target_abs_path, *source_file_tokens))
|
||||
return headers_in_sources
|
||||
headers_in_sources = set([])
|
||||
for target_match in TARGET_RE.finditer(file_content):
|
||||
target_contents = target_match.group('target_contents')
|
||||
for sources_match in SOURCES_RE.finditer(target_contents):
|
||||
sources = sources_match.group('sources')
|
||||
for source_file_match in SOURCE_FILE_RE.finditer(sources):
|
||||
source_file = source_file_match.group('source_file')
|
||||
if source_file.endswith('.h'):
|
||||
source_file_tokens = source_file.split('/')
|
||||
headers_in_sources.add(
|
||||
os.path.join(target_abs_path, *source_file_tokens))
|
||||
return headers_in_sources
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -11,72 +12,67 @@ import os
|
|||
import sys
|
||||
import unittest
|
||||
|
||||
#pylint: disable=relative-import
|
||||
import check_orphan_headers
|
||||
|
||||
|
||||
def _GetRootBasedOnPlatform():
|
||||
if sys.platform.startswith('win'):
|
||||
return 'C:\\'
|
||||
else:
|
||||
return '/'
|
||||
if sys.platform.startswith('win'):
|
||||
return 'C:\\'
|
||||
return '/'
|
||||
|
||||
|
||||
def _GetPath(*path_chunks):
|
||||
return os.path.join(_GetRootBasedOnPlatform(), *path_chunks)
|
||||
return os.path.join(_GetRootBasedOnPlatform(), *path_chunks)
|
||||
|
||||
|
||||
class GetBuildGnPathFromFilePathTest(unittest.TestCase):
|
||||
def testGetBuildGnFromSameDirectory(self):
|
||||
file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h')
|
||||
expected_build_path = _GetPath('home', 'projects', 'webrtc', 'base',
|
||||
'BUILD.gn')
|
||||
file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc',
|
||||
'base', 'BUILD.gn')
|
||||
src_dir_path = _GetPath('home', 'projects', 'webrtc')
|
||||
self.assertEqual(
|
||||
expected_build_path,
|
||||
check_orphan_headers.GetBuildGnPathFromFilePath(
|
||||
file_path, file_exists, src_dir_path))
|
||||
def testGetBuildGnFromSameDirectory(self):
|
||||
file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h')
|
||||
expected_build_path = _GetPath('home', 'projects', 'webrtc', 'base',
|
||||
'BUILD.gn')
|
||||
file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc', 'base',
|
||||
'BUILD.gn')
|
||||
src_dir_path = _GetPath('home', 'projects', 'webrtc')
|
||||
self.assertEqual(
|
||||
expected_build_path,
|
||||
check_orphan_headers.GetBuildGnPathFromFilePath(file_path, file_exists,
|
||||
src_dir_path))
|
||||
|
||||
def testGetBuildPathFromParentDirectory(self):
|
||||
file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h')
|
||||
expected_build_path = _GetPath('home', 'projects', 'webrtc',
|
||||
'BUILD.gn')
|
||||
file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc',
|
||||
'BUILD.gn')
|
||||
src_dir_path = _GetPath('home', 'projects', 'webrtc')
|
||||
self.assertEqual(
|
||||
expected_build_path,
|
||||
check_orphan_headers.GetBuildGnPathFromFilePath(
|
||||
file_path, file_exists, src_dir_path))
|
||||
def testGetBuildPathFromParentDirectory(self):
|
||||
file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h')
|
||||
expected_build_path = _GetPath('home', 'projects', 'webrtc', 'BUILD.gn')
|
||||
file_exists = lambda p: p == _GetPath('home', 'projects', 'webrtc',
|
||||
'BUILD.gn')
|
||||
src_dir_path = _GetPath('home', 'projects', 'webrtc')
|
||||
self.assertEqual(
|
||||
expected_build_path,
|
||||
check_orphan_headers.GetBuildGnPathFromFilePath(file_path, file_exists,
|
||||
src_dir_path))
|
||||
|
||||
def testExceptionIfNoBuildGnFilesAreFound(self):
|
||||
with self.assertRaises(check_orphan_headers.NoBuildGnFoundError):
|
||||
file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h')
|
||||
file_exists = lambda p: False
|
||||
src_dir_path = _GetPath('home', 'projects', 'webrtc')
|
||||
check_orphan_headers.GetBuildGnPathFromFilePath(
|
||||
file_path, file_exists, src_dir_path)
|
||||
def testExceptionIfNoBuildGnFilesAreFound(self):
|
||||
with self.assertRaises(check_orphan_headers.NoBuildGnFoundError):
|
||||
file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.h')
|
||||
file_exists = lambda p: False
|
||||
src_dir_path = _GetPath('home', 'projects', 'webrtc')
|
||||
check_orphan_headers.GetBuildGnPathFromFilePath(file_path, file_exists,
|
||||
src_dir_path)
|
||||
|
||||
def testExceptionIfFilePathIsNotAnHeader(self):
|
||||
with self.assertRaises(check_orphan_headers.WrongFileTypeError):
|
||||
file_path = _GetPath('home', 'projects', 'webrtc', 'base',
|
||||
'foo.cc')
|
||||
file_exists = lambda p: False
|
||||
src_dir_path = _GetPath('home', 'projects', 'webrtc')
|
||||
check_orphan_headers.GetBuildGnPathFromFilePath(
|
||||
file_path, file_exists, src_dir_path)
|
||||
def testExceptionIfFilePathIsNotAnHeader(self):
|
||||
with self.assertRaises(check_orphan_headers.WrongFileTypeError):
|
||||
file_path = _GetPath('home', 'projects', 'webrtc', 'base', 'foo.cc')
|
||||
file_exists = lambda p: False
|
||||
src_dir_path = _GetPath('home', 'projects', 'webrtc')
|
||||
check_orphan_headers.GetBuildGnPathFromFilePath(file_path, file_exists,
|
||||
src_dir_path)
|
||||
|
||||
|
||||
class GetHeadersInBuildGnFileSourcesTest(unittest.TestCase):
|
||||
def testEmptyFileReturnsEmptySet(self):
|
||||
self.assertEqual(
|
||||
set([]),
|
||||
check_orphan_headers.GetHeadersInBuildGnFileSources('', '/a/b'))
|
||||
def testEmptyFileReturnsEmptySet(self):
|
||||
self.assertEqual(
|
||||
set([]),
|
||||
check_orphan_headers.GetHeadersInBuildGnFileSources('', '/a/b'))
|
||||
|
||||
def testReturnsSetOfHeadersFromFileContent(self):
|
||||
file_content = """
|
||||
def testReturnsSetOfHeadersFromFileContent(self):
|
||||
file_content = """
|
||||
# Some comments
|
||||
if (is_android) {
|
||||
import("//a/b/c.gni")
|
||||
|
@ -101,17 +97,17 @@ class GetHeadersInBuildGnFileSourcesTest(unittest.TestCase):
|
|||
sources = ["baz/foo.h"]
|
||||
}
|
||||
"""
|
||||
target_abs_path = _GetPath('a', 'b')
|
||||
self.assertEqual(
|
||||
set([
|
||||
_GetPath('a', 'b', 'foo.h'),
|
||||
_GetPath('a', 'b', 'bar.h'),
|
||||
_GetPath('a', 'b', 'public_foo.h'),
|
||||
_GetPath('a', 'b', 'baz', 'foo.h'),
|
||||
]),
|
||||
check_orphan_headers.GetHeadersInBuildGnFileSources(
|
||||
file_content, target_abs_path))
|
||||
target_abs_path = _GetPath('a', 'b')
|
||||
self.assertEqual(
|
||||
set([
|
||||
_GetPath('a', 'b', 'foo.h'),
|
||||
_GetPath('a', 'b', 'bar.h'),
|
||||
_GetPath('a', 'b', 'public_foo.h'),
|
||||
_GetPath('a', 'b', 'baz', 'foo.h'),
|
||||
]),
|
||||
check_orphan_headers.GetHeadersInBuildGnFileSources(
|
||||
file_content, target_abs_path))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
unittest.main()
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
|
@ -33,104 +33,101 @@ class PackageBoundaryViolation(
|
|||
collections.namedtuple(
|
||||
'PackageBoundaryViolation',
|
||||
'build_file_path target_name source_file subpackage')):
|
||||
def __str__(self):
|
||||
return ERROR_MESSAGE.format(**self._asdict())
|
||||
def __str__(self):
|
||||
return ERROR_MESSAGE.format(**self._asdict())
|
||||
|
||||
|
||||
def _BuildSubpackagesPattern(packages, query):
|
||||
"""Returns a regular expression that matches source files inside subpackages
|
||||
"""Returns a regular expression that matches source files inside subpackages
|
||||
of the given query."""
|
||||
query += os.path.sep
|
||||
length = len(query)
|
||||
pattern = r'\s*"(?P<source_file>(?P<subpackage>'
|
||||
pattern += '|'.join(
|
||||
re.escape(package[length:].replace(os.path.sep, '/'))
|
||||
for package in packages if package.startswith(query))
|
||||
pattern += r')/[\w\./]*)"'
|
||||
return re.compile(pattern)
|
||||
query += os.path.sep
|
||||
length = len(query)
|
||||
pattern = r'\s*"(?P<source_file>(?P<subpackage>'
|
||||
pattern += '|'.join(
|
||||
re.escape(package[length:].replace(os.path.sep, '/'))
|
||||
for package in packages if package.startswith(query))
|
||||
pattern += r')/[\w\./]*)"'
|
||||
return re.compile(pattern)
|
||||
|
||||
|
||||
def _ReadFileAndPrependLines(file_path):
|
||||
"""Reads the contents of a file."""
|
||||
with open(file_path) as f:
|
||||
return "".join(f.readlines())
|
||||
"""Reads the contents of a file."""
|
||||
with open(file_path) as f:
|
||||
return "".join(f.readlines())
|
||||
|
||||
|
||||
def _CheckBuildFile(build_file_path, packages):
|
||||
"""Iterates over all the targets of the given BUILD.gn file, and verifies that
|
||||
"""Iterates over all the targets of the given BUILD.gn file, and verifies that
|
||||
the source files referenced by it don't belong to any of it's subpackages.
|
||||
Returns an iterator over PackageBoundaryViolations for this package.
|
||||
"""
|
||||
package = os.path.dirname(build_file_path)
|
||||
subpackages_re = _BuildSubpackagesPattern(packages, package)
|
||||
package = os.path.dirname(build_file_path)
|
||||
subpackages_re = _BuildSubpackagesPattern(packages, package)
|
||||
|
||||
build_file_contents = _ReadFileAndPrependLines(build_file_path)
|
||||
for target_match in TARGET_RE.finditer(build_file_contents):
|
||||
target_name = target_match.group('target_name')
|
||||
target_contents = target_match.group('target_contents')
|
||||
for sources_match in SOURCES_RE.finditer(target_contents):
|
||||
sources = sources_match.group('sources')
|
||||
for subpackages_match in subpackages_re.finditer(sources):
|
||||
subpackage = subpackages_match.group('subpackage')
|
||||
source_file = subpackages_match.group('source_file')
|
||||
if subpackage:
|
||||
yield PackageBoundaryViolation(build_file_path,
|
||||
target_name, source_file,
|
||||
subpackage)
|
||||
build_file_contents = _ReadFileAndPrependLines(build_file_path)
|
||||
for target_match in TARGET_RE.finditer(build_file_contents):
|
||||
target_name = target_match.group('target_name')
|
||||
target_contents = target_match.group('target_contents')
|
||||
for sources_match in SOURCES_RE.finditer(target_contents):
|
||||
sources = sources_match.group('sources')
|
||||
for subpackages_match in subpackages_re.finditer(sources):
|
||||
subpackage = subpackages_match.group('subpackage')
|
||||
source_file = subpackages_match.group('source_file')
|
||||
if subpackage:
|
||||
yield PackageBoundaryViolation(build_file_path, target_name,
|
||||
source_file, subpackage)
|
||||
|
||||
|
||||
def CheckPackageBoundaries(root_dir, build_files=None):
|
||||
packages = [
|
||||
root for root, _, files in os.walk(root_dir) if 'BUILD.gn' in files
|
||||
]
|
||||
packages = [
|
||||
root for root, _, files in os.walk(root_dir) if 'BUILD.gn' in files
|
||||
]
|
||||
|
||||
if build_files is not None:
|
||||
for build_file_path in build_files:
|
||||
assert build_file_path.startswith(root_dir)
|
||||
else:
|
||||
build_files = [
|
||||
os.path.join(package, 'BUILD.gn') for package in packages
|
||||
]
|
||||
|
||||
messages = []
|
||||
if build_files is not None:
|
||||
for build_file_path in build_files:
|
||||
messages.extend(_CheckBuildFile(build_file_path, packages))
|
||||
return messages
|
||||
assert build_file_path.startswith(root_dir)
|
||||
else:
|
||||
build_files = [os.path.join(package, 'BUILD.gn') for package in packages]
|
||||
|
||||
messages = []
|
||||
for build_file_path in build_files:
|
||||
messages.extend(_CheckBuildFile(build_file_path, packages))
|
||||
return messages
|
||||
|
||||
|
||||
def main(argv):
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Script that checks package boundary violations in GN '
|
||||
'build files.')
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Script that checks package boundary violations in GN '
|
||||
'build files.')
|
||||
|
||||
parser.add_argument('root_dir',
|
||||
metavar='ROOT_DIR',
|
||||
help='The root directory that contains all BUILD.gn '
|
||||
'files to be processed.')
|
||||
parser.add_argument('build_files',
|
||||
metavar='BUILD_FILE',
|
||||
nargs='*',
|
||||
help='A list of BUILD.gn files to be processed. If no '
|
||||
'files are given, all BUILD.gn files under ROOT_DIR '
|
||||
'will be processed.')
|
||||
parser.add_argument('--max_messages',
|
||||
type=int,
|
||||
default=None,
|
||||
help='If set, the maximum number of violations to be '
|
||||
'displayed.')
|
||||
parser.add_argument('root_dir',
|
||||
metavar='ROOT_DIR',
|
||||
help='The root directory that contains all BUILD.gn '
|
||||
'files to be processed.')
|
||||
parser.add_argument('build_files',
|
||||
metavar='BUILD_FILE',
|
||||
nargs='*',
|
||||
help='A list of BUILD.gn files to be processed. If no '
|
||||
'files are given, all BUILD.gn files under ROOT_DIR '
|
||||
'will be processed.')
|
||||
parser.add_argument('--max_messages',
|
||||
type=int,
|
||||
default=None,
|
||||
help='If set, the maximum number of violations to be '
|
||||
'displayed.')
|
||||
|
||||
args = parser.parse_args(argv)
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
messages = CheckPackageBoundaries(args.root_dir, args.build_files)
|
||||
messages = messages[:args.max_messages]
|
||||
messages = CheckPackageBoundaries(args.root_dir, args.build_files)
|
||||
messages = messages[:args.max_messages]
|
||||
|
||||
for i, message in enumerate(messages):
|
||||
if i > 0:
|
||||
print
|
||||
print message
|
||||
for i, message in enumerate(messages):
|
||||
if i > 0:
|
||||
print()
|
||||
print(message)
|
||||
|
||||
return bool(messages)
|
||||
return bool(messages)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
|
@ -12,8 +12,7 @@ import ast
|
|||
import os
|
||||
import unittest
|
||||
|
||||
#pylint: disable=relative-import
|
||||
from check_package_boundaries import CheckPackageBoundaries
|
||||
import check_package_boundaries
|
||||
|
||||
MSG_FORMAT = 'ERROR:check_package_boundaries.py: Unexpected %s.'
|
||||
TESTDATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
|
||||
|
@ -21,54 +20,52 @@ TESTDATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
|
|||
|
||||
|
||||
def ReadPylFile(file_path):
|
||||
with open(file_path) as f:
|
||||
return ast.literal_eval(f.read())
|
||||
with open(file_path) as f:
|
||||
return ast.literal_eval(f.read())
|
||||
|
||||
|
||||
class UnitTest(unittest.TestCase):
|
||||
def _RunTest(self, test_dir, check_all_build_files=False):
|
||||
build_files = [os.path.join(test_dir, 'BUILD.gn')]
|
||||
if check_all_build_files:
|
||||
build_files = None
|
||||
def _RunTest(self, test_dir, check_all_build_files=False):
|
||||
build_files = [os.path.join(test_dir, 'BUILD.gn')]
|
||||
if check_all_build_files:
|
||||
build_files = None
|
||||
|
||||
messages = []
|
||||
for violation in CheckPackageBoundaries(test_dir, build_files):
|
||||
build_file_path = os.path.relpath(violation.build_file_path,
|
||||
test_dir)
|
||||
build_file_path = build_file_path.replace(os.path.sep, '/')
|
||||
messages.append(
|
||||
violation._replace(build_file_path=build_file_path))
|
||||
messages = []
|
||||
for violation in check_package_boundaries.CheckPackageBoundaries(
|
||||
test_dir, build_files):
|
||||
build_file_path = os.path.relpath(violation.build_file_path, test_dir)
|
||||
build_file_path = build_file_path.replace(os.path.sep, '/')
|
||||
messages.append(violation._replace(build_file_path=build_file_path))
|
||||
|
||||
expected_messages = ReadPylFile(os.path.join(test_dir, 'expected.pyl'))
|
||||
self.assertListEqual(sorted(expected_messages), sorted(messages))
|
||||
expected_messages = ReadPylFile(os.path.join(test_dir, 'expected.pyl'))
|
||||
self.assertListEqual(sorted(expected_messages), sorted(messages))
|
||||
|
||||
def testNoErrors(self):
|
||||
self._RunTest(os.path.join(TESTDATA_DIR, 'no_errors'))
|
||||
def testNoErrors(self):
|
||||
self._RunTest(os.path.join(TESTDATA_DIR, 'no_errors'))
|
||||
|
||||
def testMultipleErrorsSingleTarget(self):
|
||||
self._RunTest(
|
||||
os.path.join(TESTDATA_DIR, 'multiple_errors_single_target'))
|
||||
def testMultipleErrorsSingleTarget(self):
|
||||
self._RunTest(os.path.join(TESTDATA_DIR, 'multiple_errors_single_target'))
|
||||
|
||||
def testMultipleErrorsMultipleTargets(self):
|
||||
self._RunTest(
|
||||
os.path.join(TESTDATA_DIR, 'multiple_errors_multiple_targets'))
|
||||
def testMultipleErrorsMultipleTargets(self):
|
||||
self._RunTest(os.path.join(TESTDATA_DIR,
|
||||
'multiple_errors_multiple_targets'))
|
||||
|
||||
def testCommonPrefix(self):
|
||||
self._RunTest(os.path.join(TESTDATA_DIR, 'common_prefix'))
|
||||
def testCommonPrefix(self):
|
||||
self._RunTest(os.path.join(TESTDATA_DIR, 'common_prefix'))
|
||||
|
||||
def testAllBuildFiles(self):
|
||||
self._RunTest(os.path.join(TESTDATA_DIR, 'all_build_files'), True)
|
||||
def testAllBuildFiles(self):
|
||||
self._RunTest(os.path.join(TESTDATA_DIR, 'all_build_files'), True)
|
||||
|
||||
def testSanitizeFilename(self):
|
||||
# The `dangerous_filename` test case contains a directory with '++' in its
|
||||
# name. If it's not properly escaped, a regex error would be raised.
|
||||
self._RunTest(os.path.join(TESTDATA_DIR, 'dangerous_filename'), True)
|
||||
def testSanitizeFilename(self):
|
||||
# The `dangerous_filename` test case contains a directory with '++' in its
|
||||
# name. If it's not properly escaped, a regex error would be raised.
|
||||
self._RunTest(os.path.join(TESTDATA_DIR, 'dangerous_filename'), True)
|
||||
|
||||
def testRelativeFilename(self):
|
||||
test_dir = os.path.join(TESTDATA_DIR, 'all_build_files')
|
||||
with self.assertRaises(AssertionError):
|
||||
CheckPackageBoundaries(test_dir, ["BUILD.gn"])
|
||||
def testRelativeFilename(self):
|
||||
test_dir = os.path.join(TESTDATA_DIR, 'all_build_files')
|
||||
with self.assertRaises(AssertionError):
|
||||
check_package_boundaries.CheckPackageBoundaries(test_dir, ["BUILD.gn"])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
unittest.main()
|
||||
|
|
|
@ -9,7 +9,7 @@ by WebRTC follow this instructions:
|
|||
2. Launch the script:
|
||||
|
||||
```
|
||||
$ python tools_webrtc/sslroots/generate_sslroots.py roots.pem
|
||||
$ vpython3 tools_webrtc/sslroots/generate_sslroots.py roots.pem
|
||||
```
|
||||
|
||||
3. Step 2 should have generated an ssl_roots.h file right next to roots.pem.
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#!/usr/bin/env vpython3
|
||||
|
||||
# -*- coding:utf-8 -*-
|
||||
# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
|
@ -17,7 +19,7 @@ Arguments:
|
|||
generated file size.
|
||||
"""
|
||||
|
||||
import commands
|
||||
import subprocess
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import re
|
||||
|
@ -39,180 +41,174 @@ _VERBOSE = 'verbose'
|
|||
|
||||
|
||||
def main():
|
||||
"""The main entrypoint."""
|
||||
parser = OptionParser('usage %prog FILE')
|
||||
parser.add_option('-v', '--verbose', dest='verbose', action='store_true')
|
||||
parser.add_option('-f',
|
||||
'--full_cert',
|
||||
dest='full_cert',
|
||||
action='store_true')
|
||||
options, args = parser.parse_args()
|
||||
if len(args) < 1:
|
||||
parser.error('No crt file specified.')
|
||||
return
|
||||
root_dir = _SplitCrt(args[0], options)
|
||||
_GenCFiles(root_dir, options)
|
||||
_Cleanup(root_dir)
|
||||
"""The main entrypoint."""
|
||||
parser = OptionParser('usage %prog FILE')
|
||||
parser.add_option('-v', '--verbose', dest='verbose', action='store_true')
|
||||
parser.add_option('-f', '--full_cert', dest='full_cert', action='store_true')
|
||||
options, args = parser.parse_args()
|
||||
if len(args) < 1:
|
||||
parser.error('No crt file specified.')
|
||||
return
|
||||
root_dir = _SplitCrt(args[0], options)
|
||||
_GenCFiles(root_dir, options)
|
||||
_Cleanup(root_dir)
|
||||
|
||||
|
||||
def _SplitCrt(source_file, options):
|
||||
sub_file_blocks = []
|
||||
label_name = ''
|
||||
root_dir = os.path.dirname(os.path.abspath(source_file)) + '/'
|
||||
_PrintOutput(root_dir, options)
|
||||
f = open(source_file)
|
||||
for line in f:
|
||||
if line.startswith('# Label: '):
|
||||
sub_file_blocks.append(line)
|
||||
label = re.search(r'\".*\"', line)
|
||||
temp_label = label.group(0)
|
||||
end = len(temp_label) - 1
|
||||
label_name = _SafeName(temp_label[1:end])
|
||||
elif line.startswith('-----END CERTIFICATE-----'):
|
||||
sub_file_blocks.append(line)
|
||||
new_file_name = root_dir + _PREFIX + label_name + _EXTENSION
|
||||
_PrintOutput('Generating: ' + new_file_name, options)
|
||||
new_file = open(new_file_name, 'w')
|
||||
for out_line in sub_file_blocks:
|
||||
new_file.write(out_line)
|
||||
new_file.close()
|
||||
sub_file_blocks = []
|
||||
else:
|
||||
sub_file_blocks.append(line)
|
||||
f.close()
|
||||
return root_dir
|
||||
sub_file_blocks = []
|
||||
label_name = ''
|
||||
root_dir = os.path.dirname(os.path.abspath(source_file)) + '/'
|
||||
_PrintOutput(root_dir, options)
|
||||
f = open(source_file)
|
||||
for line in f:
|
||||
if line.startswith('# Label: '):
|
||||
sub_file_blocks.append(line)
|
||||
label = re.search(r'\".*\"', line)
|
||||
temp_label = label.group(0)
|
||||
end = len(temp_label) - 1
|
||||
label_name = _SafeName(temp_label[1:end])
|
||||
elif line.startswith('-----END CERTIFICATE-----'):
|
||||
sub_file_blocks.append(line)
|
||||
new_file_name = root_dir + _PREFIX + label_name + _EXTENSION
|
||||
_PrintOutput('Generating: ' + new_file_name, options)
|
||||
new_file = open(new_file_name, 'w')
|
||||
for out_line in sub_file_blocks:
|
||||
new_file.write(out_line)
|
||||
new_file.close()
|
||||
sub_file_blocks = []
|
||||
else:
|
||||
sub_file_blocks.append(line)
|
||||
f.close()
|
||||
return root_dir
|
||||
|
||||
|
||||
def _GenCFiles(root_dir, options):
|
||||
output_header_file = open(root_dir + _GENERATED_FILE, 'w')
|
||||
output_header_file.write(_CreateOutputHeader())
|
||||
if options.full_cert:
|
||||
subject_name_list = _CreateArraySectionHeader(_SUBJECT_NAME_VARIABLE,
|
||||
_CHAR_TYPE, options)
|
||||
public_key_list = _CreateArraySectionHeader(_PUBLIC_KEY_VARIABLE,
|
||||
_CHAR_TYPE, options)
|
||||
certificate_list = _CreateArraySectionHeader(_CERTIFICATE_VARIABLE,
|
||||
_CHAR_TYPE, options)
|
||||
certificate_size_list = _CreateArraySectionHeader(
|
||||
_CERTIFICATE_SIZE_VARIABLE, _INT_TYPE, options)
|
||||
output_header_file = open(root_dir + _GENERATED_FILE, 'w')
|
||||
output_header_file.write(_CreateOutputHeader())
|
||||
if options.full_cert:
|
||||
subject_name_list = _CreateArraySectionHeader(_SUBJECT_NAME_VARIABLE,
|
||||
_CHAR_TYPE, options)
|
||||
public_key_list = _CreateArraySectionHeader(_PUBLIC_KEY_VARIABLE,
|
||||
_CHAR_TYPE, options)
|
||||
certificate_list = _CreateArraySectionHeader(_CERTIFICATE_VARIABLE,
|
||||
_CHAR_TYPE, options)
|
||||
certificate_size_list = _CreateArraySectionHeader(_CERTIFICATE_SIZE_VARIABLE,
|
||||
_INT_TYPE, options)
|
||||
|
||||
for _, _, files in os.walk(root_dir):
|
||||
for current_file in files:
|
||||
if current_file.startswith(_PREFIX):
|
||||
prefix_length = len(_PREFIX)
|
||||
length = len(current_file) - len(_EXTENSION)
|
||||
label = current_file[prefix_length:length]
|
||||
filtered_output, cert_size = _CreateCertSection(
|
||||
root_dir, current_file, label, options)
|
||||
output_header_file.write(filtered_output + '\n\n\n')
|
||||
if options.full_cert:
|
||||
subject_name_list += _AddLabelToArray(
|
||||
label, _SUBJECT_NAME_ARRAY)
|
||||
public_key_list += _AddLabelToArray(
|
||||
label, _PUBLIC_KEY_ARRAY)
|
||||
certificate_list += _AddLabelToArray(label, _CERTIFICATE_ARRAY)
|
||||
certificate_size_list += (' %s,\n') % (cert_size)
|
||||
for _, _, files in os.walk(root_dir):
|
||||
for current_file in files:
|
||||
if current_file.startswith(_PREFIX):
|
||||
prefix_length = len(_PREFIX)
|
||||
length = len(current_file) - len(_EXTENSION)
|
||||
label = current_file[prefix_length:length]
|
||||
filtered_output, cert_size = _CreateCertSection(root_dir, current_file,
|
||||
label, options)
|
||||
output_header_file.write(filtered_output + '\n\n\n')
|
||||
if options.full_cert:
|
||||
subject_name_list += _AddLabelToArray(label, _SUBJECT_NAME_ARRAY)
|
||||
public_key_list += _AddLabelToArray(label, _PUBLIC_KEY_ARRAY)
|
||||
certificate_list += _AddLabelToArray(label, _CERTIFICATE_ARRAY)
|
||||
certificate_size_list += (' %s,\n') % (cert_size)
|
||||
|
||||
if options.full_cert:
|
||||
subject_name_list += _CreateArraySectionFooter()
|
||||
output_header_file.write(subject_name_list)
|
||||
public_key_list += _CreateArraySectionFooter()
|
||||
output_header_file.write(public_key_list)
|
||||
certificate_list += _CreateArraySectionFooter()
|
||||
output_header_file.write(certificate_list)
|
||||
certificate_size_list += _CreateArraySectionFooter()
|
||||
output_header_file.write(certificate_size_list)
|
||||
output_header_file.write(_CreateOutputFooter())
|
||||
output_header_file.close()
|
||||
if options.full_cert:
|
||||
subject_name_list += _CreateArraySectionFooter()
|
||||
output_header_file.write(subject_name_list)
|
||||
public_key_list += _CreateArraySectionFooter()
|
||||
output_header_file.write(public_key_list)
|
||||
certificate_list += _CreateArraySectionFooter()
|
||||
output_header_file.write(certificate_list)
|
||||
certificate_size_list += _CreateArraySectionFooter()
|
||||
output_header_file.write(certificate_size_list)
|
||||
output_header_file.write(_CreateOutputFooter())
|
||||
output_header_file.close()
|
||||
|
||||
|
||||
def _Cleanup(root_dir):
|
||||
for f in os.listdir(root_dir):
|
||||
if f.startswith(_PREFIX):
|
||||
os.remove(root_dir + f)
|
||||
for f in os.listdir(root_dir):
|
||||
if f.startswith(_PREFIX):
|
||||
os.remove(root_dir + f)
|
||||
|
||||
|
||||
def _CreateCertSection(root_dir, source_file, label, options):
|
||||
command = 'openssl x509 -in %s%s -noout -C' % (root_dir, source_file)
|
||||
_PrintOutput(command, options)
|
||||
output = commands.getstatusoutput(command)[1]
|
||||
renamed_output = output.replace('unsigned char XXX_',
|
||||
'const unsigned char ' + label + '_')
|
||||
filtered_output = ''
|
||||
cert_block = '^const unsigned char.*?};$'
|
||||
prog = re.compile(cert_block, re.IGNORECASE | re.MULTILINE | re.DOTALL)
|
||||
if not options.full_cert:
|
||||
filtered_output = prog.sub('', renamed_output, count=2)
|
||||
else:
|
||||
filtered_output = renamed_output
|
||||
command = 'openssl x509 -in %s%s -noout -C' % (root_dir, source_file)
|
||||
_PrintOutput(command, options)
|
||||
output = subprocess.getstatusoutput(command)[1]
|
||||
renamed_output = output.replace('unsigned char XXX_',
|
||||
'const unsigned char ' + label + '_')
|
||||
filtered_output = ''
|
||||
cert_block = '^const unsigned char.*?};$'
|
||||
prog = re.compile(cert_block, re.IGNORECASE | re.MULTILINE | re.DOTALL)
|
||||
if not options.full_cert:
|
||||
filtered_output = prog.sub('', renamed_output, count=2)
|
||||
else:
|
||||
filtered_output = renamed_output
|
||||
|
||||
cert_size_block = r'\d\d\d+'
|
||||
prog2 = re.compile(cert_size_block, re.MULTILINE | re.VERBOSE)
|
||||
result = prog2.findall(renamed_output)
|
||||
cert_size = result[len(result) - 1]
|
||||
cert_size_block = r'\d\d\d+'
|
||||
prog2 = re.compile(cert_size_block, re.MULTILINE | re.VERBOSE)
|
||||
result = prog2.findall(renamed_output)
|
||||
cert_size = result[len(result) - 1]
|
||||
|
||||
return filtered_output, cert_size
|
||||
return filtered_output, cert_size
|
||||
|
||||
|
||||
def _CreateOutputHeader():
|
||||
output = (
|
||||
'/*\n'
|
||||
' * Copyright 2004 The WebRTC Project Authors. All rights '
|
||||
'reserved.\n'
|
||||
' *\n'
|
||||
' * Use of this source code is governed by a BSD-style license\n'
|
||||
' * that can be found in the LICENSE file in the root of the '
|
||||
'source\n'
|
||||
' * tree. An additional intellectual property rights grant can be '
|
||||
'found\n'
|
||||
' * in the file PATENTS. All contributing project authors may\n'
|
||||
' * be found in the AUTHORS file in the root of the source tree.\n'
|
||||
' */\n\n'
|
||||
'#ifndef RTC_BASE_SSL_ROOTS_H_\n'
|
||||
'#define RTC_BASE_SSL_ROOTS_H_\n\n'
|
||||
'// This file is the root certificates in C form that are needed to'
|
||||
' connect to\n// Google.\n\n'
|
||||
'// It was generated with the following command line:\n'
|
||||
'// > python tools_webrtc/sslroots/generate_sslroots.py'
|
||||
'\n// https://pki.goog/roots.pem\n\n'
|
||||
'// clang-format off\n'
|
||||
'// Don\'t bother formatting generated code,\n'
|
||||
'// also it would breaks subject/issuer lines.\n\n')
|
||||
return output
|
||||
output = ('/*\n'
|
||||
' * Copyright 2004 The WebRTC Project Authors. All rights '
|
||||
'reserved.\n'
|
||||
' *\n'
|
||||
' * Use of this source code is governed by a BSD-style license\n'
|
||||
' * that can be found in the LICENSE file in the root of the '
|
||||
'source\n'
|
||||
' * tree. An additional intellectual property rights grant can be '
|
||||
'found\n'
|
||||
' * in the file PATENTS. All contributing project authors may\n'
|
||||
' * be found in the AUTHORS file in the root of the source tree.\n'
|
||||
' */\n\n'
|
||||
'#ifndef RTC_BASE_SSL_ROOTS_H_\n'
|
||||
'#define RTC_BASE_SSL_ROOTS_H_\n\n'
|
||||
'// This file is the root certificates in C form that are needed to'
|
||||
' connect to\n// Google.\n\n'
|
||||
'// It was generated with the following command line:\n'
|
||||
'// > vpython3 tools_webrtc/sslroots/generate_sslroots.py'
|
||||
'\n// https://pki.goog/roots.pem\n\n'
|
||||
'// clang-format off\n'
|
||||
'// Don\'t bother formatting generated code,\n'
|
||||
'// also it would breaks subject/issuer lines.\n\n')
|
||||
return output
|
||||
|
||||
|
||||
def _CreateOutputFooter():
|
||||
output = ('// clang-format on\n\n' '#endif // RTC_BASE_SSL_ROOTS_H_\n')
|
||||
return output
|
||||
output = ('// clang-format on\n\n#endif // RTC_BASE_SSL_ROOTS_H_\n')
|
||||
return output
|
||||
|
||||
|
||||
def _CreateArraySectionHeader(type_name, type_type, options):
|
||||
output = ('const %s kSSLCert%sList[] = {\n') % (type_type, type_name)
|
||||
_PrintOutput(output, options)
|
||||
return output
|
||||
output = ('const %s kSSLCert%sList[] = {\n') % (type_type, type_name)
|
||||
_PrintOutput(output, options)
|
||||
return output
|
||||
|
||||
|
||||
def _AddLabelToArray(label, type_name):
|
||||
return ' %s_%s,\n' % (label, type_name)
|
||||
return ' %s_%s,\n' % (label, type_name)
|
||||
|
||||
|
||||
def _CreateArraySectionFooter():
|
||||
return '};\n\n'
|
||||
return '};\n\n'
|
||||
|
||||
|
||||
def _SafeName(original_file_name):
|
||||
bad_chars = ' -./\\()áéíőú'
|
||||
replacement_chars = ''
|
||||
for _ in bad_chars:
|
||||
replacement_chars += '_'
|
||||
translation_table = string.maketrans(bad_chars, replacement_chars)
|
||||
return original_file_name.translate(translation_table)
|
||||
bad_chars = ' -./\\()áéíőú'
|
||||
replacement_chars = ''
|
||||
for _ in bad_chars:
|
||||
replacement_chars += '_'
|
||||
translation_table = string.maketrans(bad_chars, replacement_chars)
|
||||
return original_file_name.translate(translation_table)
|
||||
|
||||
|
||||
def _PrintOutput(output, options):
|
||||
if options.verbose:
|
||||
print output
|
||||
if options.verbose:
|
||||
print(output)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main()
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -19,11 +20,11 @@ import sys
|
|||
|
||||
|
||||
def FindSrcDirPath():
|
||||
"""Returns the abs path to the src/ dir of the project."""
|
||||
src_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
while os.path.basename(src_dir) != 'src':
|
||||
src_dir = os.path.normpath(os.path.join(src_dir, os.pardir))
|
||||
return src_dir
|
||||
"""Returns the abs path to the src/ dir of the project."""
|
||||
src_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
while os.path.basename(src_dir) != 'src':
|
||||
src_dir = os.path.normpath(os.path.join(src_dir, os.pardir))
|
||||
return src_dir
|
||||
|
||||
|
||||
UPDATE_BRANCH_NAME = 'webrtc_version_update'
|
||||
|
@ -33,140 +34,132 @@ NOTIFY_EMAIL = 'mbonadei@webrtc.org'
|
|||
|
||||
|
||||
def _RemovePreviousUpdateBranch():
|
||||
active_branch, branches = _GetBranches()
|
||||
if active_branch == UPDATE_BRANCH_NAME:
|
||||
active_branch = 'master'
|
||||
if UPDATE_BRANCH_NAME in branches:
|
||||
logging.info('Removing previous update branch (%s)',
|
||||
UPDATE_BRANCH_NAME)
|
||||
subprocess.check_call(['git', 'checkout', active_branch])
|
||||
subprocess.check_call(['git', 'branch', '-D', UPDATE_BRANCH_NAME])
|
||||
logging.info('No branch to remove')
|
||||
active_branch, branches = _GetBranches()
|
||||
if active_branch == UPDATE_BRANCH_NAME:
|
||||
active_branch = 'master'
|
||||
if UPDATE_BRANCH_NAME in branches:
|
||||
logging.info('Removing previous update branch (%s)', UPDATE_BRANCH_NAME)
|
||||
subprocess.check_call(['git', 'checkout', active_branch])
|
||||
subprocess.check_call(['git', 'branch', '-D', UPDATE_BRANCH_NAME])
|
||||
logging.info('No branch to remove')
|
||||
|
||||
|
||||
def _GetLastAuthor():
|
||||
"""Returns a string with the author of the last commit."""
|
||||
author = subprocess.check_output(['git', 'log',
|
||||
'-1',
|
||||
'--pretty=format:"%an"']).splitlines()
|
||||
return author
|
||||
"""Returns a string with the author of the last commit."""
|
||||
author = subprocess.check_output(
|
||||
['git', 'log', '-1', '--pretty=format:"%an"']).splitlines()
|
||||
return author
|
||||
|
||||
|
||||
def _GetBranches():
|
||||
"""Returns a tuple (active, branches).
|
||||
"""Returns a tuple (active, branches).
|
||||
|
||||
'active' is a string with name of the currently active branch, while
|
||||
'branches' is the list of all branches.
|
||||
"""
|
||||
lines = subprocess.check_output(['git', 'branch']).splitlines()
|
||||
branches = []
|
||||
active = ''
|
||||
for line in lines:
|
||||
if '*' in line:
|
||||
# The assumption is that the first char will always be the '*'.
|
||||
active = line[1:].strip()
|
||||
branches.append(active)
|
||||
else:
|
||||
branch = line.strip()
|
||||
if branch:
|
||||
branches.append(branch)
|
||||
return active, branches
|
||||
lines = subprocess.check_output(['git', 'branch']).splitlines()
|
||||
branches = []
|
||||
active = ''
|
||||
for line in lines:
|
||||
if '*' in line:
|
||||
# The assumption is that the first char will always be the '*'.
|
||||
active = line[1:].strip()
|
||||
branches.append(active)
|
||||
else:
|
||||
branch = line.strip()
|
||||
if branch:
|
||||
branches.append(branch)
|
||||
return active, branches
|
||||
|
||||
|
||||
def _CreateUpdateBranch():
|
||||
logging.info('Creating update branch: %s', UPDATE_BRANCH_NAME)
|
||||
subprocess.check_call(['git', 'checkout', '-b', UPDATE_BRANCH_NAME])
|
||||
logging.info('Creating update branch: %s', UPDATE_BRANCH_NAME)
|
||||
subprocess.check_call(['git', 'checkout', '-b', UPDATE_BRANCH_NAME])
|
||||
|
||||
|
||||
def _UpdateWebRTCVersion(filename):
|
||||
with open(filename) as f:
|
||||
content = f.read()
|
||||
d = datetime.datetime.utcnow()
|
||||
# pylint: disable=line-too-long
|
||||
new_content = re.sub(
|
||||
r'WebRTC source stamp [0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}',
|
||||
r'WebRTC source stamp %02d-%02d-%02dT%02d:%02d:%02d' % (d.year,
|
||||
d.month,
|
||||
d.day,
|
||||
d.hour,
|
||||
d.minute,
|
||||
d.second),
|
||||
content,
|
||||
flags=re.MULTILINE)
|
||||
# pylint: enable=line-too-long
|
||||
with open(filename, 'w') as f:
|
||||
f.write(new_content)
|
||||
with open(filename) as f:
|
||||
content = f.read()
|
||||
d = datetime.datetime.utcnow()
|
||||
# pylint: disable=line-too-long
|
||||
new_content = re.sub(
|
||||
r'WebRTC source stamp [0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}',
|
||||
r'WebRTC source stamp %02d-%02d-%02dT%02d:%02d:%02d' %
|
||||
(d.year, d.month, d.day, d.hour, d.minute, d.second),
|
||||
content,
|
||||
flags=re.MULTILINE)
|
||||
# pylint: enable=line-too-long
|
||||
with open(filename, 'w') as f:
|
||||
f.write(new_content)
|
||||
|
||||
|
||||
def _IsTreeClean():
|
||||
stdout = subprocess.check_output(['git', 'status', '--porcelain'])
|
||||
if len(stdout) == 0:
|
||||
return True
|
||||
return False
|
||||
stdout = subprocess.check_output(['git', 'status', '--porcelain'])
|
||||
if len(stdout) == 0:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _LocalCommit():
|
||||
logging.info('Committing changes locally.')
|
||||
d = datetime.datetime.utcnow()
|
||||
logging.info('Committing changes locally.')
|
||||
d = datetime.datetime.utcnow()
|
||||
|
||||
git_author = subprocess.check_output(['git', 'config',
|
||||
'user.email']).strip()
|
||||
commit_msg = ('Update WebRTC code version (%02d-%02d-%02dT%02d:%02d:%02d).'
|
||||
'\n\nBug: None')
|
||||
commit_msg = commit_msg % (d.year, d.month, d.day, d.hour, d.minute,
|
||||
d.second)
|
||||
subprocess.check_call(['git', 'add', '--update', '.'])
|
||||
subprocess.check_call(['git', 'commit', '-m', commit_msg])
|
||||
commit_msg = ('Update WebRTC code version (%02d-%02d-%02dT%02d:%02d:%02d).'
|
||||
'\n\nBug: None')
|
||||
commit_msg = commit_msg % (d.year, d.month, d.day, d.hour, d.minute, d.second)
|
||||
subprocess.check_call(['git', 'add', '--update', '.'])
|
||||
subprocess.check_call(['git', 'commit', '-m', commit_msg])
|
||||
|
||||
|
||||
def _UploadCL(commit_queue_mode):
|
||||
"""Upload the committed changes as a changelist to Gerrit.
|
||||
"""Upload the committed changes as a changelist to Gerrit.
|
||||
|
||||
commit_queue_mode:
|
||||
- 2: Submit to commit queue.
|
||||
- 1: Run trybots but do not submit to CQ.
|
||||
- 0: Skip CQ, upload only.
|
||||
"""
|
||||
cmd = ['git', 'cl', 'upload', '--force', '--bypass-hooks',
|
||||
'--bypass-watchlist']
|
||||
if commit_queue_mode >= 2:
|
||||
logging.info('Sending the CL to the CQ...')
|
||||
cmd.extend(['-o', 'label=Bot-Commit+1'])
|
||||
cmd.extend(['-o', 'label=Commit-Queue+2'])
|
||||
cmd.extend(['--send-mail', '--cc', NOTIFY_EMAIL])
|
||||
elif commit_queue_mode >= 1:
|
||||
logging.info('Starting CQ dry run...')
|
||||
cmd.extend(['-o', 'label=Commit-Queue+1'])
|
||||
subprocess.check_call(cmd)
|
||||
cmd = [
|
||||
'git', 'cl', 'upload', '--force', '--bypass-hooks', '--bypass-watchlist'
|
||||
]
|
||||
if commit_queue_mode >= 2:
|
||||
logging.info('Sending the CL to the CQ...')
|
||||
cmd.extend(['-o', 'label=Bot-Commit+1'])
|
||||
cmd.extend(['-o', 'label=Commit-Queue+2'])
|
||||
cmd.extend(['--send-mail', '--cc', NOTIFY_EMAIL])
|
||||
elif commit_queue_mode >= 1:
|
||||
logging.info('Starting CQ dry run...')
|
||||
cmd.extend(['-o', 'label=Commit-Queue+1'])
|
||||
subprocess.check_call(cmd)
|
||||
|
||||
|
||||
def main():
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
p = argparse.ArgumentParser()
|
||||
p.add_argument('--clean',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Removes any previous local update branch.')
|
||||
opts = p.parse_args()
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
p = argparse.ArgumentParser()
|
||||
p.add_argument('--clean',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Removes any previous local update branch.')
|
||||
opts = p.parse_args()
|
||||
|
||||
if opts.clean:
|
||||
_RemovePreviousUpdateBranch()
|
||||
if opts.clean:
|
||||
_RemovePreviousUpdateBranch()
|
||||
|
||||
if _GetLastAuthor() == 'webrtc-version-updater':
|
||||
logging.info('Last commit is a version change, skipping CL.')
|
||||
return 0
|
||||
|
||||
version_filename = os.path.join(CHECKOUT_SRC_DIR, 'call', 'version.cc')
|
||||
_CreateUpdateBranch()
|
||||
_UpdateWebRTCVersion(version_filename)
|
||||
if _IsTreeClean():
|
||||
logging.info('No WebRTC version change detected, skipping CL.')
|
||||
else:
|
||||
_LocalCommit()
|
||||
logging.info('Uploading CL...')
|
||||
_UploadCL(2)
|
||||
if _GetLastAuthor() == 'webrtc-version-updater':
|
||||
logging.info('Last commit is a version change, skipping CL.')
|
||||
return 0
|
||||
|
||||
version_filename = os.path.join(CHECKOUT_SRC_DIR, 'call', 'version.cc')
|
||||
_CreateUpdateBranch()
|
||||
_UpdateWebRTCVersion(version_filename)
|
||||
if _IsTreeClean():
|
||||
logging.info('No WebRTC version change detected, skipping CL.')
|
||||
else:
|
||||
_LocalCommit()
|
||||
logging.info('Uploading CL...')
|
||||
_UploadCL(2)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#!/usr/bin/env vpython3
|
||||
|
||||
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
|
@ -75,12 +77,8 @@ _EXTENSION_FLAGS = {
|
|||
}
|
||||
|
||||
|
||||
def PathExists(*args):
|
||||
return os.path.exists(os.path.join(*args))
|
||||
|
||||
|
||||
def FindWebrtcSrcFromFilename(filename):
|
||||
"""Searches for the root of the WebRTC checkout.
|
||||
"""Searches for the root of the WebRTC checkout.
|
||||
|
||||
Simply checks parent directories until it finds .gclient and src/.
|
||||
|
||||
|
@ -90,20 +88,20 @@ def FindWebrtcSrcFromFilename(filename):
|
|||
Returns:
|
||||
(String) Path of 'src/', or None if unable to find.
|
||||
"""
|
||||
curdir = os.path.normpath(os.path.dirname(filename))
|
||||
while not (os.path.basename(curdir) == 'src'
|
||||
and PathExists(curdir, 'DEPS') and
|
||||
(PathExists(curdir, '..', '.gclient')
|
||||
or PathExists(curdir, '.git'))):
|
||||
nextdir = os.path.normpath(os.path.join(curdir, '..'))
|
||||
if nextdir == curdir:
|
||||
return None
|
||||
curdir = nextdir
|
||||
return curdir
|
||||
curdir = os.path.normpath(os.path.dirname(filename))
|
||||
while not (os.path.basename(curdir) == 'src'
|
||||
and os.path.exists(os.path.join(curdir, 'DEPS')) and
|
||||
(os.path.exists(os.path.join(curdir, '..', '.gclient'))
|
||||
or os.path.exists(os.path.join(curdir, '.git')))):
|
||||
nextdir = os.path.normpath(os.path.join(curdir, '..'))
|
||||
if nextdir == curdir:
|
||||
return None
|
||||
curdir = nextdir
|
||||
return curdir
|
||||
|
||||
|
||||
def GetDefaultSourceFile(webrtc_root, filename):
|
||||
"""Returns the default source file to use as an alternative to `filename`.
|
||||
"""Returns the default source file to use as an alternative to `filename`.
|
||||
|
||||
Compile flags used to build the default source file is assumed to be a
|
||||
close-enough approximation for building `filename`.
|
||||
|
@ -115,13 +113,13 @@ def GetDefaultSourceFile(webrtc_root, filename):
|
|||
Returns:
|
||||
(String) Absolute path to substitute source file.
|
||||
"""
|
||||
if 'test.' in filename:
|
||||
return os.path.join(webrtc_root, 'base', 'logging_unittest.cc')
|
||||
return os.path.join(webrtc_root, 'base', 'logging.cc')
|
||||
if 'test.' in filename:
|
||||
return os.path.join(webrtc_root, 'base', 'logging_unittest.cc')
|
||||
return os.path.join(webrtc_root, 'base', 'logging.cc')
|
||||
|
||||
|
||||
def GetNinjaBuildOutputsForSourceFile(out_dir, filename):
|
||||
"""Returns a list of build outputs for filename.
|
||||
"""Returns a list of build outputs for filename.
|
||||
|
||||
The list is generated by invoking 'ninja -t query' tool to retrieve a list of
|
||||
inputs and outputs of `filename`. This list is then filtered to only include
|
||||
|
@ -135,35 +133,35 @@ def GetNinjaBuildOutputsForSourceFile(out_dir, filename):
|
|||
(List of Strings) List of target names. Will return [] if `filename` doesn't
|
||||
yield any .o or .obj outputs.
|
||||
"""
|
||||
# Ninja needs the path to the source file relative to the output build
|
||||
# directory.
|
||||
rel_filename = os.path.relpath(filename, out_dir)
|
||||
# Ninja needs the path to the source file relative to the output build
|
||||
# directory.
|
||||
rel_filename = os.path.relpath(filename, out_dir)
|
||||
|
||||
p = subprocess.Popen(['ninja', '-C', out_dir, '-t', 'query', rel_filename],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=True)
|
||||
stdout, _ = p.communicate()
|
||||
if p.returncode != 0:
|
||||
return []
|
||||
p = subprocess.Popen(['ninja', '-C', out_dir, '-t', 'query', rel_filename],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=True)
|
||||
stdout, _ = p.communicate()
|
||||
if p.returncode != 0:
|
||||
return []
|
||||
|
||||
# The output looks like:
|
||||
# ../../relative/path/to/source.cc:
|
||||
# outputs:
|
||||
# obj/reative/path/to/target.source.o
|
||||
# obj/some/other/target2.source.o
|
||||
# another/target.txt
|
||||
#
|
||||
outputs_text = stdout.partition('\n outputs:\n')[2]
|
||||
output_lines = [line.strip() for line in outputs_text.split('\n')]
|
||||
return [
|
||||
target for target in output_lines
|
||||
if target and (target.endswith('.o') or target.endswith('.obj'))
|
||||
]
|
||||
# The output looks like:
|
||||
# ../../relative/path/to/source.cc:
|
||||
# outputs:
|
||||
# obj/reative/path/to/target.source.o
|
||||
# obj/some/other/target2.source.o
|
||||
# another/target.txt
|
||||
#
|
||||
outputs_text = stdout.partition('\n outputs:\n')[2]
|
||||
output_lines = [line.strip() for line in outputs_text.split('\n')]
|
||||
return [
|
||||
target for target in output_lines
|
||||
if target and (target.endswith('.o') or target.endswith('.obj'))
|
||||
]
|
||||
|
||||
|
||||
def GetClangCommandLineForNinjaOutput(out_dir, build_target):
|
||||
"""Returns the Clang command line for building `build_target`
|
||||
"""Returns the Clang command line for building `build_target`
|
||||
|
||||
Asks ninja for the list of commands used to build `filename` and returns the
|
||||
final Clang invocation.
|
||||
|
@ -176,25 +174,25 @@ def GetClangCommandLineForNinjaOutput(out_dir, build_target):
|
|||
(String or None) Clang command line or None if a Clang command line couldn't
|
||||
be determined.
|
||||
"""
|
||||
p = subprocess.Popen(
|
||||
['ninja', '-v', '-C', out_dir, '-t', 'commands', build_target],
|
||||
stdout=subprocess.PIPE,
|
||||
universal_newlines=True)
|
||||
stdout, _ = p.communicate()
|
||||
if p.returncode != 0:
|
||||
return None
|
||||
|
||||
# Ninja will return multiple build steps for all dependencies up to
|
||||
# `build_target`. The build step we want is the last Clang invocation, which
|
||||
# is expected to be the one that outputs `build_target`.
|
||||
for line in reversed(stdout.split('\n')):
|
||||
if 'clang' in line:
|
||||
return line
|
||||
p = subprocess.Popen(
|
||||
['ninja', '-v', '-C', out_dir, '-t', 'commands', build_target],
|
||||
stdout=subprocess.PIPE,
|
||||
universal_newlines=True)
|
||||
stdout, _ = p.communicate()
|
||||
if p.returncode != 0:
|
||||
return None
|
||||
|
||||
# Ninja will return multiple build steps for all dependencies up to
|
||||
# `build_target`. The build step we want is the last Clang invocation, which
|
||||
# is expected to be the one that outputs `build_target`.
|
||||
for line in reversed(stdout.split('\n')):
|
||||
if 'clang' in line:
|
||||
return line
|
||||
return None
|
||||
|
||||
|
||||
def GetClangCommandLineFromNinjaForSource(out_dir, filename):
|
||||
"""Returns a Clang command line used to build `filename`.
|
||||
"""Returns a Clang command line used to build `filename`.
|
||||
|
||||
The same source file could be built multiple times using different tool
|
||||
chains. In such cases, this command returns the first Clang invocation. We
|
||||
|
@ -210,17 +208,17 @@ def GetClangCommandLineFromNinjaForSource(out_dir, filename):
|
|||
(String or None): Command line for Clang invocation using `filename` as a
|
||||
source. Returns None if no such command line could be found.
|
||||
"""
|
||||
build_targets = GetNinjaBuildOutputsForSourceFile(out_dir, filename)
|
||||
for build_target in build_targets:
|
||||
command_line = GetClangCommandLineForNinjaOutput(out_dir, build_target)
|
||||
if command_line:
|
||||
return command_line
|
||||
return None
|
||||
build_targets = GetNinjaBuildOutputsForSourceFile(out_dir, filename)
|
||||
for build_target in build_targets:
|
||||
command_line = GetClangCommandLineForNinjaOutput(out_dir, build_target)
|
||||
if command_line:
|
||||
return command_line
|
||||
return None
|
||||
|
||||
|
||||
def GetClangOptionsFromCommandLine(clang_commandline, out_dir,
|
||||
additional_flags):
|
||||
"""Extracts relevant command line options from `clang_commandline`
|
||||
"""Extracts relevant command line options from `clang_commandline`
|
||||
|
||||
Args:
|
||||
clang_commandline: (String) Full Clang invocation.
|
||||
|
@ -232,48 +230,46 @@ def GetClangOptionsFromCommandLine(clang_commandline, out_dir,
|
|||
(List of Strings) The list of command line flags for this source file. Can
|
||||
be empty.
|
||||
"""
|
||||
clang_flags = [] + additional_flags
|
||||
clang_flags = [] + additional_flags
|
||||
|
||||
# Parse flags that are important for YCM's purposes.
|
||||
clang_tokens = shlex.split(clang_commandline)
|
||||
for flag_index, flag in enumerate(clang_tokens):
|
||||
if flag.startswith('-I'):
|
||||
# Relative paths need to be resolved, because they're relative to
|
||||
# the output dir, not the source.
|
||||
if flag[2] == '/':
|
||||
clang_flags.append(flag)
|
||||
else:
|
||||
abs_path = os.path.normpath(os.path.join(out_dir, flag[2:]))
|
||||
clang_flags.append('-I' + abs_path)
|
||||
elif flag.startswith('-std'):
|
||||
clang_flags.append(flag)
|
||||
elif flag.startswith('-') and flag[1] in 'DWFfmO':
|
||||
if (flag == '-Wno-deprecated-register' or
|
||||
flag == '-Wno-header-guard'):
|
||||
# These flags causes libclang (3.3) to crash. Remove it until
|
||||
# things are fixed.
|
||||
continue
|
||||
clang_flags.append(flag)
|
||||
elif flag == '-isysroot':
|
||||
# On Mac -isysroot <path> is used to find the system headers.
|
||||
# Copy over both flags.
|
||||
if flag_index + 1 < len(clang_tokens):
|
||||
clang_flags.append(flag)
|
||||
clang_flags.append(clang_tokens[flag_index + 1])
|
||||
elif flag.startswith('--sysroot='):
|
||||
# On Linux we use a sysroot image.
|
||||
sysroot_path = flag.lstrip('--sysroot=')
|
||||
if sysroot_path.startswith('/'):
|
||||
clang_flags.append(flag)
|
||||
else:
|
||||
abs_path = os.path.normpath(os.path.join(
|
||||
out_dir, sysroot_path))
|
||||
clang_flags.append('--sysroot=' + abs_path)
|
||||
return clang_flags
|
||||
# Parse flags that are important for YCM's purposes.
|
||||
clang_tokens = shlex.split(clang_commandline)
|
||||
for flag_index, flag in enumerate(clang_tokens):
|
||||
if flag.startswith('-I'):
|
||||
# Relative paths need to be resolved, because they're relative to
|
||||
# the output dir, not the source.
|
||||
if flag[2] == '/':
|
||||
clang_flags.append(flag)
|
||||
else:
|
||||
abs_path = os.path.normpath(os.path.join(out_dir, flag[2:]))
|
||||
clang_flags.append('-I' + abs_path)
|
||||
elif flag.startswith('-std'):
|
||||
clang_flags.append(flag)
|
||||
elif flag.startswith('-') and flag[1] in 'DWFfmO':
|
||||
if flag in ['-Wno-deprecated-register', '-Wno-header-guard']:
|
||||
# These flags causes libclang (3.3) to crash. Remove it until
|
||||
# things are fixed.
|
||||
continue
|
||||
clang_flags.append(flag)
|
||||
elif flag == '-isysroot':
|
||||
# On Mac -isysroot <path> is used to find the system headers.
|
||||
# Copy over both flags.
|
||||
if flag_index + 1 < len(clang_tokens):
|
||||
clang_flags.append(flag)
|
||||
clang_flags.append(clang_tokens[flag_index + 1])
|
||||
elif flag.startswith('--sysroot='):
|
||||
# On Linux we use a sysroot image.
|
||||
sysroot_path = flag.lstrip('--sysroot=')
|
||||
if sysroot_path.startswith('/'):
|
||||
clang_flags.append(flag)
|
||||
else:
|
||||
abs_path = os.path.normpath(os.path.join(out_dir, sysroot_path))
|
||||
clang_flags.append('--sysroot=' + abs_path)
|
||||
return clang_flags
|
||||
|
||||
|
||||
def GetClangOptionsFromNinjaForFilename(webrtc_root, filename):
|
||||
"""Returns the Clang command line options needed for building `filename`.
|
||||
"""Returns the Clang command line options needed for building `filename`.
|
||||
|
||||
Command line options are based on the command used by ninja for building
|
||||
`filename`. If `filename` is a .h file, uses its companion .cc or .cpp file.
|
||||
|
@ -289,55 +285,54 @@ def GetClangOptionsFromNinjaForFilename(webrtc_root, filename):
|
|||
(List of Strings) The list of command line flags for this source file. Can
|
||||
be empty.
|
||||
"""
|
||||
if not webrtc_root:
|
||||
return []
|
||||
if not webrtc_root:
|
||||
return []
|
||||
|
||||
# Generally, everyone benefits from including WebRTC's src/, because all of
|
||||
# WebRTC's includes are relative to that.
|
||||
additional_flags = ['-I' + os.path.join(webrtc_root)]
|
||||
# Generally, everyone benefits from including WebRTC's src/, because all of
|
||||
# WebRTC's includes are relative to that.
|
||||
additional_flags = ['-I' + os.path.join(webrtc_root)]
|
||||
|
||||
# Version of Clang used to compile WebRTC can be newer then version of
|
||||
# libclang that YCM uses for completion. So it's possible that YCM's
|
||||
# libclang doesn't know about some used warning options, which causes
|
||||
# compilation warnings (and errors, because of '-Werror');
|
||||
additional_flags.append('-Wno-unknown-warning-option')
|
||||
# Version of Clang used to compile WebRTC can be newer then version of
|
||||
# libclang that YCM uses for completion. So it's possible that YCM's
|
||||
# libclang doesn't know about some used warning options, which causes
|
||||
# compilation warnings (and errors, because of '-Werror');
|
||||
additional_flags.append('-Wno-unknown-warning-option')
|
||||
|
||||
sys.path.append(os.path.join(webrtc_root, 'tools', 'vim'))
|
||||
from ninja_output import GetNinjaOutputDirectory
|
||||
out_dir = GetNinjaOutputDirectory(webrtc_root)
|
||||
sys.path.append(os.path.join(webrtc_root, 'tools', 'vim'))
|
||||
from ninja_output import GetNinjaOutputDirectory
|
||||
out_dir = GetNinjaOutputDirectory(webrtc_root)
|
||||
|
||||
basename, extension = os.path.splitext(filename)
|
||||
if extension == '.h':
|
||||
candidates = [basename + ext for ext in _HEADER_ALTERNATES]
|
||||
else:
|
||||
candidates = [filename]
|
||||
basename, extension = os.path.splitext(filename)
|
||||
if extension == '.h':
|
||||
candidates = [basename + ext for ext in _HEADER_ALTERNATES]
|
||||
else:
|
||||
candidates = [filename]
|
||||
|
||||
clang_line = None
|
||||
buildable_extension = extension
|
||||
for candidate in candidates:
|
||||
clang_line = GetClangCommandLineFromNinjaForSource(out_dir, candidate)
|
||||
if clang_line:
|
||||
buildable_extension = os.path.splitext(candidate)[1]
|
||||
break
|
||||
clang_line = None
|
||||
buildable_extension = extension
|
||||
for candidate in candidates:
|
||||
clang_line = GetClangCommandLineFromNinjaForSource(out_dir, candidate)
|
||||
if clang_line:
|
||||
buildable_extension = os.path.splitext(candidate)[1]
|
||||
break
|
||||
|
||||
additional_flags += _EXTENSION_FLAGS.get(buildable_extension, [])
|
||||
additional_flags += _EXTENSION_FLAGS.get(buildable_extension, [])
|
||||
|
||||
if not clang_line:
|
||||
# If ninja didn't know about filename or it's companion files, then try
|
||||
# a default build target. It is possible that the file is new, or
|
||||
# build.ninja is stale.
|
||||
clang_line = GetClangCommandLineFromNinjaForSource(
|
||||
out_dir, GetDefaultSourceFile(webrtc_root, filename))
|
||||
if not clang_line:
|
||||
# If ninja didn't know about filename or it's companion files, then try
|
||||
# a default build target. It is possible that the file is new, or
|
||||
# build.ninja is stale.
|
||||
clang_line = GetClangCommandLineFromNinjaForSource(
|
||||
out_dir, GetDefaultSourceFile(webrtc_root, filename))
|
||||
|
||||
if not clang_line:
|
||||
return additional_flags
|
||||
if not clang_line:
|
||||
return additional_flags
|
||||
|
||||
return GetClangOptionsFromCommandLine(clang_line, out_dir,
|
||||
additional_flags)
|
||||
return GetClangOptionsFromCommandLine(clang_line, out_dir, additional_flags)
|
||||
|
||||
|
||||
def FlagsForFile(filename):
|
||||
"""This is the main entry point for YCM. Its interface is fixed.
|
||||
"""This is the main entry point for YCM. Its interface is fixed.
|
||||
|
||||
Args:
|
||||
filename: (String) Path to source file being edited.
|
||||
|
@ -347,16 +342,15 @@ def FlagsForFile(filename):
|
|||
'flags': (List of Strings) Command line flags.
|
||||
'do_cache': (Boolean) True if the result should be cached.
|
||||
"""
|
||||
abs_filename = os.path.abspath(filename)
|
||||
webrtc_root = FindWebrtcSrcFromFilename(abs_filename)
|
||||
clang_flags = GetClangOptionsFromNinjaForFilename(webrtc_root,
|
||||
abs_filename)
|
||||
abs_filename = os.path.abspath(filename)
|
||||
webrtc_root = FindWebrtcSrcFromFilename(abs_filename)
|
||||
clang_flags = GetClangOptionsFromNinjaForFilename(webrtc_root, abs_filename)
|
||||
|
||||
# If clang_flags could not be determined, then assume that was due to a
|
||||
# transient failure. Preventing YCM from caching the flags allows us to
|
||||
# try to determine the flags again.
|
||||
should_cache_flags_for_file = bool(clang_flags)
|
||||
# If clang_flags could not be determined, then assume that was due to a
|
||||
# transient failure. Preventing YCM from caching the flags allows us to
|
||||
# try to determine the flags again.
|
||||
should_cache_flags_for_file = bool(clang_flags)
|
||||
|
||||
final_flags = _DEFAULT_FLAGS + clang_flags
|
||||
final_flags = _DEFAULT_FLAGS + clang_flags
|
||||
|
||||
return {'flags': final_flags, 'do_cache': should_cache_flags_for_file}
|
||||
return {'flags': final_flags, 'do_cache': should_cache_flags_for_file}
|
||||
|
|
Loading…
Reference in a new issue