Use 'et format' in CI. Check formatting of all files in CI (flutter/engine#50810)

This PR changes the format check on CI to use the command added in
https://github.com/flutter/engine/pull/50747.

Additionally, while making this change, I noticed that the CI check was
not checking the formatting of all files, and that as a result, files
were present in the repo with incorrect formatting. I have fixed the
formatting and fixed the check to always check all files.
This commit is contained in:
Zachary Anderson 2024-02-21 17:38:08 +00:00 committed by GitHub
parent 9d7ba5629a
commit f6629ffe5c
93 changed files with 566 additions and 1235 deletions

View File

@ -23,9 +23,7 @@ import git_revision
def get_clang_version():
clang_executable = str(
os.path.join(
'..', '..', 'buildtools', 'mac-x64', 'clang', 'bin', 'clang++'
)
os.path.join('..', '..', 'buildtools', 'mac-x64', 'clang', 'bin', 'clang++')
)
version = subprocess.check_output([clang_executable, '--version'])
return version.splitlines()[0]
@ -39,20 +37,12 @@ def main():
)
parser.add_argument(
'--source',
help='Path to Info.plist source template',
type=str,
required=True
'--source', help='Path to Info.plist source template', type=str, required=True
)
parser.add_argument(
'--destination',
help='Path to destination Info.plist',
type=str,
required=True
)
parser.add_argument(
'--minversion', help='Minimum device OS version like "9.0"', type=str
'--destination', help='Path to destination Info.plist', type=str, required=True
)
parser.add_argument('--minversion', help='Minimum device OS version like "9.0"', type=str)
args = parser.parse_args()
@ -60,11 +50,7 @@ def main():
engine_path = os.path.join(os.getcwd(), '..', '..', 'flutter')
revision = git_revision.get_repository_version(engine_path)
clang_version = get_clang_version()
text = text.format(
revision=revision,
clang_version=clang_version,
min_version=args.minversion
)
text = text.format(revision=revision, clang_version=clang_version, min_version=args.minversion)
with open(args.destination, 'w') as outfile:
outfile.write(text)

View File

@ -16,9 +16,7 @@ import sys
USE_LINKS = sys.platform != 'win32'
DART_ANALYZE = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'dart_analyze.py'
)
DART_ANALYZE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'dart_analyze.py')
def dart_filter(path):
@ -164,10 +162,7 @@ def analyze_entrypoints(dart_sdk, package_root, entrypoints):
def main():
parser = argparse.ArgumentParser(description='Generate a dart-pkg')
parser.add_argument(
'--dart-sdk',
action='store',
metavar='dart_sdk',
help='Path to the Dart SDK.'
'--dart-sdk', action='store', metavar='dart_sdk', help='Path to the Dart SDK.'
)
parser.add_argument(
'--package-name',
@ -183,28 +178,14 @@ def main():
required=True
)
parser.add_argument(
'--package-root',
metavar='package_root',
help='packages/ directory',
required=True
'--package-root', metavar='package_root', help='packages/ directory', required=True
)
parser.add_argument('--stamp-file', metavar='stamp_file', help='timestamp file', required=True)
parser.add_argument(
'--entries-file', metavar='entries_file', help='script entries file', required=True
)
parser.add_argument(
'--stamp-file',
metavar='stamp_file',
help='timestamp file',
required=True
)
parser.add_argument(
'--entries-file',
metavar='entries_file',
help='script entries file',
required=True
)
parser.add_argument(
'--package-sources',
metavar='package_sources',
help='Package sources',
nargs='+'
'--package-sources', metavar='package_sources', help='Package sources', nargs='+'
)
parser.add_argument(
'--package-entrypoints',
@ -259,16 +240,12 @@ def main():
sdkext_path = os.path.join(lib_path, '_sdkext')
if mappings:
with open(sdkext_path, 'w') as stream:
json.dump(
mappings, stream, sort_keys=True, indent=2, separators=(',', ': ')
)
json.dump(mappings, stream, sort_keys=True, indent=2, separators=(',', ': '))
else:
remove_if_exists(sdkext_path)
# Copy or symlink package sources into pkg directory.
common_source_prefix = os.path.dirname(
os.path.commonprefix(args.package_sources)
)
common_source_prefix = os.path.dirname(os.path.commonprefix(args.package_sources))
for source in args.package_sources:
relative_source = os.path.relpath(source, common_source_prefix)
target = os.path.join(target_dir, relative_source)
@ -291,9 +268,7 @@ def main():
target = os.path.join(sdk_ext_dir, relative_source)
copy_or_link(source, target)
common_source_prefix = os.path.dirname(
os.path.commonprefix(args.sdk_ext_files)
)
common_source_prefix = os.path.dirname(os.path.commonprefix(args.sdk_ext_files))
for source in args.sdk_ext_files:
relative_source = os.path.relpath(source, common_source_prefix)
target = os.path.join(sdk_ext_dir, relative_source)

View File

@ -13,9 +13,7 @@ import shutil
def get_llvm_bin_directory():
buildtool_dir = os.path.join(
os.path.dirname(os.path.realpath(__file__)), '../../buildtools'
)
buildtool_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../buildtools')
platform_dir = ''
if sys.platform.startswith('linux'):
platform_dir = 'linux-x64'
@ -23,9 +21,7 @@ def get_llvm_bin_directory():
platform_dir = 'mac-x64'
else:
raise Exception('Unknown/Unsupported platform.')
llvm_bin_dir = os.path.abspath(
os.path.join(buildtool_dir, platform_dir, 'clang/bin')
)
llvm_bin_dir = os.path.abspath(os.path.join(buildtool_dir, platform_dir, 'clang/bin'))
if not os.path.exists(llvm_bin_dir):
raise Exception('LLVM directory %s double not be located.' % llvm_bin_dir)
return llvm_bin_dir
@ -61,9 +57,7 @@ def collect_profiles(args):
print('Path %s does not exist.' % absolute_test_path)
return -1
unstripped_test_path = os.path.join(
absolute_test_dir, 'exe.unstripped', test_name
)
unstripped_test_path = os.path.join(absolute_test_dir, 'exe.unstripped', test_name)
if os.path.exists(unstripped_test_path):
binaries.append(unstripped_test_path)
@ -74,10 +68,7 @@ def collect_profiles(args):
remove_if_exists(raw_profile)
print(
'Running test %s to gather profile.' %
os.path.basename(absolute_test_path)
)
print('Running test %s to gather profile.' % os.path.basename(absolute_test_path))
test_command = [absolute_test_path]
@ -105,8 +96,7 @@ def merge_profiles(llvm_bin_dir, raw_profiles, output):
print('Merging %d raw profile(s) into single profile.' % len(raw_profiles))
merged_profile_path = os.path.join(output, 'all.profile')
remove_if_exists(merged_profile_path)
merge_command = [profdata_binary, 'merge', '-sparse'
] + raw_profiles + ['-o', merged_profile_path]
merge_command = [profdata_binary, 'merge', '-sparse'] + raw_profiles + ['-o', merged_profile_path]
subprocess.check_call(merge_command)
print('Done.')
return merged_profile_path

View File

@ -40,10 +40,7 @@ def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--repository',
action='store',
help='Path to the Git repository.',
required=True
'--repository', action='store', help='Path to the Git repository.', required=True
)
args = parser.parse_args()

View File

@ -25,14 +25,10 @@ def _zip_dir(path, zip_file, prefix):
for file in files:
if os.path.islink(os.path.join(root, file)):
add_symlink(
zip_file, os.path.join(root, file),
os.path.join(root.replace(path, prefix), file)
zip_file, os.path.join(root, file), os.path.join(root.replace(path, prefix), file)
)
continue
zip_file.write(
os.path.join(root, file),
os.path.join(root.replace(path, prefix), file)
)
zip_file.write(os.path.join(root, file), os.path.join(root.replace(path, prefix), file))
def add_symlink(zip_file, source, target):
@ -80,12 +76,7 @@ def main(args):
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='This script creates zip files.')
parser.add_argument(
'-o',
dest='output',
action='store',
help='The name of the output zip file.'
)
parser.add_argument('-o', dest='output', action='store', help='The name of the output zip file.')
parser.add_argument(
'-i',
dest='input_pairs',
@ -94,9 +85,6 @@ if __name__ == '__main__':
help='The input file and its destination location in the zip archive.'
)
parser.add_argument(
'-f',
dest='source_file',
action='store',
help='The path to the file list to zip.'
'-f', dest='source_file', action='store', help='The path to the file list to zip.'
)
sys.exit(main(parser.parse_args()))

View File

@ -37,8 +37,13 @@
},
"tests": [
{
"name": "test:format_and_dart_test",
"script": "flutter/ci/format.sh"
"name": "test: Check formatting",
"script": "flutter/bin/et",
"parameters": [
"format",
"--dry-run",
"--all"
]
},
{
"nane": "test:GeneratedPluginRegistant.java omitted",

View File

@ -21,5 +21,4 @@ def byte_str_decode(str_or_bytes):
the possible value changes depending on the version of python
used.
"""
return str_or_bytes if isinstance(str_or_bytes,
str) else str_or_bytes.decode(ENCODING)
return str_or_bytes if isinstance(str_or_bytes, str) else str_or_bytes.decode(ENCODING)

View File

@ -15,9 +15,7 @@ import sys
# out/impeller-cmake-example, so the build can then be performed with
# e.g. ninja -C out/impeller-cmake-example-out.
SRC_ROOT = os.path.dirname(
os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
)
SRC_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def parse_args(argv):
@ -122,9 +120,7 @@ def main(argv):
return 0
if args.cmake:
cmake_path = os.path.join(
SRC_ROOT, 'buildtools', 'mac-x64', 'cmake', 'bin', 'cmake'
)
cmake_path = os.path.join(SRC_ROOT, 'buildtools', 'mac-x64', 'cmake', 'bin', 'cmake')
cmake_command = [
cmake_path,
'--preset',

View File

@ -79,8 +79,7 @@ def extract_deps(deps_file):
continue
dep_split = dep.rsplit('@', 1)
ancestor_result = get_common_ancestor([dep_split[0], dep_split[1]],
deps_list)
ancestor_result = get_common_ancestor([dep_split[0], dep_split[1]], deps_list)
if ancestor_result:
filtered_osv_deps.append({
'package': {'name': ancestor_result[1], 'commit': ancestor_result[0]}
@ -88,18 +87,12 @@ def extract_deps(deps_file):
try:
# Clean up cloned upstream dependency directory.
shutil.rmtree(
DEP_CLONE_DIR
) # Use shutil.rmtree since dir could be non-empty.
shutil.rmtree(DEP_CLONE_DIR) # Use shutil.rmtree since dir could be non-empty.
except OSError as clone_dir_error:
print(
'Error cleaning up clone directory: %s : %s' %
(DEP_CLONE_DIR, clone_dir_error.strerror)
)
print('Error cleaning up clone directory: %s : %s' % (DEP_CLONE_DIR, clone_dir_error.strerror))
osv_result = {
'packageSource': {'path': deps_file, 'type': 'lockfile'},
'packages': filtered_osv_deps
'packageSource': {'path': deps_file, 'type': 'lockfile'}, 'packages': filtered_osv_deps
}
return osv_result
@ -150,19 +143,12 @@ def get_common_ancestor(dep, deps_list):
upstream = deps_list.get(UPSTREAM_PREFIX + dep_name)
temp_dep_dir = DEP_CLONE_DIR + '/' + dep_name
# Clone dependency from mirror.
subprocess.check_output(['git', 'clone', '--quiet', '--', dep[0], dep_name],
cwd=DEP_CLONE_DIR)
subprocess.check_output(['git', 'clone', '--quiet', '--', dep[0], dep_name], cwd=DEP_CLONE_DIR)
# Create branch that will track the upstream dep.
print(
'attempting to add upstream remote from: {upstream}'.format(
upstream=upstream
)
)
subprocess.check_output(['git', 'remote', 'add', 'upstream', upstream],
cwd=temp_dep_dir)
subprocess.check_output(['git', 'fetch', '--quiet', 'upstream'],
cwd=temp_dep_dir)
print('attempting to add upstream remote from: {upstream}'.format(upstream=upstream))
subprocess.check_output(['git', 'remote', 'add', 'upstream', upstream], cwd=temp_dep_dir)
subprocess.check_output(['git', 'fetch', '--quiet', 'upstream'], cwd=temp_dep_dir)
# Get name of the default branch for upstream (e.g. main/master/etc.).
default_branch = subprocess.check_output(
'git remote show upstream ' + "| sed -n \'/HEAD branch/s/.*: //p\'",
@ -174,14 +160,12 @@ def get_common_ancestor(dep, deps_list):
# Make upstream branch track the upstream dep.
subprocess.check_output([
'git', 'checkout', '--force', '-b', 'upstream', '--track',
'upstream/' + default_branch
'git', 'checkout', '--force', '-b', 'upstream', '--track', 'upstream/' + default_branch
],
cwd=temp_dep_dir)
# Get the most recent commit from default branch of upstream.
commit = subprocess.check_output(
'git for-each-ref ' +
"--format=\'%(objectname:short)\' refs/heads/upstream",
'git for-each-ref ' + "--format=\'%(objectname:short)\' refs/heads/upstream",
cwd=temp_dep_dir,
shell=True
)
@ -211,9 +195,7 @@ def get_common_ancestor(dep, deps_list):
def parse_args(args):
args = args[1:]
parser = argparse.ArgumentParser(
description='A script to find common ancestor commit SHAs'
)
parser = argparse.ArgumentParser(description='A script to find common ancestor commit SHAs')
parser.add_argument(
'--deps',

View File

@ -142,9 +142,9 @@ TEST(DisplayListSkConversions, ToSkSamplingOptions) {
FUNC(kLastSeparableMode) \
FUNC(kLastMode)
TEST(DisplayListSkConversions, ToSkBlendMode) {
TEST(DisplayListSkConversions, ToSkBlendMode){
#define CHECK_TO_SKENUM(V) ASSERT_EQ(ToSk(DlBlendMode::V), SkBlendMode::V);
FOR_EACH_BLEND_MODE_ENUM(CHECK_TO_SKENUM)
FOR_EACH_BLEND_MODE_ENUM(CHECK_TO_SKENUM)
#undef CHECK_TO_SKENUM
}

View File

@ -2232,8 +2232,8 @@ class CanvasCompareTester {
return DirectoryStatus::kCreated;
}
FML_LOG(ERROR) << "Could not create directory (" << dir
<< ") for impeller failure images"
<< ", ret = " << ret.get() << ", errno = " << errno;
<< ") for impeller failure images" << ", ret = " << ret.get()
<< ", errno = " << errno;
return DirectoryStatus::kFailed;
}

View File

@ -95,8 +95,7 @@ sk_sp<SkTextBlob> PerformanceOverlayLayer::MakeStatisticsText(
std::stringstream stream;
stream.setf(std::ios::fixed | std::ios::showpoint);
stream << std::setprecision(1);
stream << label_prefix << " "
<< "max " << max_ms_per_frame << " ms/frame, "
stream << label_prefix << " " << "max " << max_ms_per_frame << " ms/frame, "
<< "avg " << average_ms_per_frame << " ms/frame";
auto text = stream.str();
return SkTextBlob::MakeFromText(text.c_str(), text.size(), font,

View File

@ -38,9 +38,8 @@ static std::string GetGoldenFilePath(int refresh_rate, bool is_new) {
std::stringstream ss;
// This unit test should only be run on Linux (not even on Mac since it's a
// golden test). Hence we don't have to worry about the "/" vs. "\".
ss << flutter::GetGoldenDir() << "/"
<< "performance_overlay_gold_" << refresh_rate << "fps"
<< (is_new ? "_new" : "") << ".png";
ss << flutter::GetGoldenDir() << "/" << "performance_overlay_gold_"
<< refresh_rate << "fps" << (is_new ? "_new" : "") << ".png";
return ss.str();
}
@ -118,9 +117,9 @@ static void TestPerformanceOverlayLayerGold(int refresh_rate) {
b64_char[b64_size] = 0; // make it null terminated for printing
EXPECT_TRUE(golden_data_matches)
<< "Golden file mismatch. Please check "
<< "the difference between " << golden_file_path << " and "
<< new_golden_file_path << ", and replace the former "
<< "Golden file mismatch. Please check " << "the difference between "
<< golden_file_path << " and " << new_golden_file_path
<< ", and replace the former "
<< "with the latter if the difference looks good.\nS\n"
<< "See also the base64 encoded " << new_golden_file_path << ":\n"
<< b64_char;

View File

@ -31,7 +31,7 @@ class MockRasterCacheResult : public RasterCacheResult {
void draw(DlCanvas& canvas,
const DlPaint* paint = nullptr,
bool preserve_rtree = false) const override{};
bool preserve_rtree = false) const override {};
SkISize image_dimensions() const override {
return SkSize::Make(device_rect_.width(), device_rect_.height()).toCeil();

View File

@ -54,13 +54,11 @@ TEST(MessageLoopTaskQueueMergeUnmerge,
auto queue_id_1 = task_queue->CreateTaskQueue();
auto queue_id_2 = task_queue->CreateTaskQueue();
task_queue->RegisterTask(
queue_id_1, []() {}, ChronoTicksSinceEpoch());
task_queue->RegisterTask(queue_id_1, []() {}, ChronoTicksSinceEpoch());
ASSERT_EQ(1u, task_queue->GetNumPendingTasks(queue_id_1));
task_queue->Merge(queue_id_1, queue_id_2);
task_queue->RegisterTask(
queue_id_1, []() {}, ChronoTicksSinceEpoch());
task_queue->RegisterTask(queue_id_1, []() {}, ChronoTicksSinceEpoch());
ASSERT_EQ(2u, task_queue->GetNumPendingTasks(queue_id_1));
ASSERT_EQ(0u, task_queue->GetNumPendingTasks(queue_id_2));
@ -73,8 +71,7 @@ TEST(MessageLoopTaskQueueMergeUnmerge,
auto queue_id_1 = task_queue->CreateTaskQueue();
auto queue_id_2 = task_queue->CreateTaskQueue();
task_queue->RegisterTask(
queue_id_2, []() {}, ChronoTicksSinceEpoch());
task_queue->RegisterTask(queue_id_2, []() {}, ChronoTicksSinceEpoch());
ASSERT_EQ(1u, task_queue->GetNumPendingTasks(queue_id_2));
task_queue->Merge(queue_id_1, queue_id_2);
@ -88,10 +85,8 @@ TEST(MessageLoopTaskQueueMergeUnmerge, MergeUnmergeTasksPreserved) {
auto queue_id_1 = task_queue->CreateTaskQueue();
auto queue_id_2 = task_queue->CreateTaskQueue();
task_queue->RegisterTask(
queue_id_1, []() {}, ChronoTicksSinceEpoch());
task_queue->RegisterTask(
queue_id_2, []() {}, ChronoTicksSinceEpoch());
task_queue->RegisterTask(queue_id_1, []() {}, ChronoTicksSinceEpoch());
task_queue->RegisterTask(queue_id_2, []() {}, ChronoTicksSinceEpoch());
ASSERT_EQ(1u, task_queue->GetNumPendingTasks(queue_id_1));
ASSERT_EQ(1u, task_queue->GetNumPendingTasks(queue_id_2));
@ -200,8 +195,7 @@ TEST(MessageLoopTaskQueueMergeUnmerge, MergeInvokesBothWakeables) {
task_queue->SetWakeable(queue_id_1, wakeable1.get());
task_queue->SetWakeable(queue_id_2, wakeable2.get());
task_queue->RegisterTask(
queue_id_1, []() {}, ChronoTicksSinceEpoch());
task_queue->RegisterTask(queue_id_1, []() {}, ChronoTicksSinceEpoch());
task_queue->Merge(queue_id_1, queue_id_2);
@ -227,10 +221,8 @@ TEST(MessageLoopTaskQueueMergeUnmerge,
task_queue->SetWakeable(queue_id_1, wakeable1.get());
task_queue->SetWakeable(queue_id_2, wakeable2.get());
task_queue->RegisterTask(
queue_id_1, []() {}, ChronoTicksSinceEpoch());
task_queue->RegisterTask(
queue_id_2, []() {}, ChronoTicksSinceEpoch());
task_queue->RegisterTask(queue_id_1, []() {}, ChronoTicksSinceEpoch());
task_queue->RegisterTask(queue_id_2, []() {}, ChronoTicksSinceEpoch());
task_queue->Merge(queue_id_1, queue_id_2);
task_queue->Unmerge(queue_id_1, queue_id_2);
@ -258,8 +250,7 @@ TEST(MessageLoopTaskQueueMergeUnmerge, GetTasksToRunNowBlocksMerge) {
wake_up_end.Wait();
});
task_queue->RegisterTask(
queue_id_1, []() {}, ChronoTicksSinceEpoch());
task_queue->RegisterTask(queue_id_1, []() {}, ChronoTicksSinceEpoch());
task_queue->SetWakeable(queue_id_1, wakeable.get());
std::thread tasks_to_run_now_thread(
@ -307,8 +298,7 @@ TEST(MessageLoopTaskQueueMergeUnmerge,
queue_id_2, [&]() { task_queue->Merge(queue_id_1, queue_id_2); },
ChronoTicksSinceEpoch());
task_queue->RegisterTask(
queue_id_2, []() {}, ChronoTicksSinceEpoch());
task_queue->RegisterTask(queue_id_2, []() {}, ChronoTicksSinceEpoch());
ASSERT_EQ(CountRemainingTasks(task_queue, queue_id_2, true), 1);
ASSERT_EQ(CountRemainingTasks(task_queue, queue_id_1, true), 1);

View File

@ -46,8 +46,7 @@ TEST(MessageLoopTaskQueue, RegisterOneTask) {
[&time](fml::TimePoint wake_time) { ASSERT_TRUE(wake_time == time); });
task_queue->SetWakeable(queue_id, wakeable.get());
task_queue->RegisterTask(
queue_id, [] {}, time);
task_queue->RegisterTask(queue_id, [] {}, time);
ASSERT_TRUE(task_queue->HasPendingTasks(queue_id));
ASSERT_TRUE(task_queue->GetNumPendingTasks(queue_id) == 1);
}
@ -55,10 +54,8 @@ TEST(MessageLoopTaskQueue, RegisterOneTask) {
TEST(MessageLoopTaskQueue, RegisterTwoTasksAndCount) {
auto task_queue = fml::MessageLoopTaskQueues::GetInstance();
auto queue_id = task_queue->CreateTaskQueue();
task_queue->RegisterTask(
queue_id, [] {}, ChronoTicksSinceEpoch());
task_queue->RegisterTask(
queue_id, [] {}, fml::TimePoint::Max());
task_queue->RegisterTask(queue_id, [] {}, ChronoTicksSinceEpoch());
task_queue->RegisterTask(queue_id, [] {}, fml::TimePoint::Max());
ASSERT_TRUE(task_queue->HasPendingTasks(queue_id));
ASSERT_TRUE(task_queue->GetNumPendingTasks(queue_id) == 2);
}
@ -68,11 +65,9 @@ TEST(MessageLoopTaskQueue, RegisterTasksOnMergedQueuesAndCount) {
auto platform_queue = task_queue->CreateTaskQueue();
auto raster_queue = task_queue->CreateTaskQueue();
// A task in platform_queue
task_queue->RegisterTask(
platform_queue, []() {}, fml::TimePoint::Now());
task_queue->RegisterTask(platform_queue, []() {}, fml::TimePoint::Now());
// A task in raster_queue
task_queue->RegisterTask(
raster_queue, []() {}, fml::TimePoint::Now());
task_queue->RegisterTask(raster_queue, []() {}, fml::TimePoint::Now());
ASSERT_TRUE(task_queue->GetNumPendingTasks(platform_queue) == 1);
ASSERT_TRUE(task_queue->GetNumPendingTasks(raster_queue) == 1);
@ -270,10 +265,8 @@ TEST(MessageLoopTaskQueue, WakeUpIndependentOfTime) {
[&num_wakes](fml::TimePoint wake_time) { ++num_wakes; });
task_queue->SetWakeable(queue_id, wakeable.get());
task_queue->RegisterTask(
queue_id, []() {}, ChronoTicksSinceEpoch());
task_queue->RegisterTask(
queue_id, []() {}, fml::TimePoint::Max());
task_queue->RegisterTask(queue_id, []() {}, ChronoTicksSinceEpoch());
task_queue->RegisterTask(queue_id, []() {}, fml::TimePoint::Max());
ASSERT_TRUE(num_wakes == 2);
}
@ -293,13 +286,11 @@ TEST(MessageLoopTaskQueue, WokenUpWithNewerTime) {
task_queue->SetWakeable(queue_id, wakeable.get());
task_queue->RegisterTask(
queue_id, []() {}, fml::TimePoint::Max());
task_queue->RegisterTask(queue_id, []() {}, fml::TimePoint::Max());
const auto now = ChronoTicksSinceEpoch();
expected = now;
task_queue->RegisterTask(
queue_id, []() {}, now);
task_queue->RegisterTask(queue_id, []() {}, now);
latch.Wait();
}
@ -435,16 +426,14 @@ TEST(MessageLoopTaskQueue, RegisterTaskWakesUpOwnerQueue) {
ASSERT_EQ(0UL, wakes.size());
task_queue->RegisterTask(
platform_queue, []() {}, time1);
task_queue->RegisterTask(platform_queue, []() {}, time1);
ASSERT_EQ(1UL, wakes.size());
ASSERT_EQ(time1, wakes[0]);
task_queue->Merge(platform_queue, raster_queue);
task_queue->RegisterTask(
raster_queue, []() {}, time2);
task_queue->RegisterTask(raster_queue, []() {}, time2);
ASSERT_EQ(3UL, wakes.size());
ASSERT_EQ(time1, wakes[1]);

View File

@ -67,8 +67,7 @@ bool GoldenDigest::Write(WorkingDirectory* working_directory) {
}
is_first = false;
fout << " { "
<< "\"testName\" : \"" << entry.test_name << "\", "
fout << " { " << "\"testName\" : \"" << entry.test_name << "\", "
<< "\"filename\" : \"" << entry.filename << "\", "
<< "\"width\" : " << entry.width << ", "
<< "\"height\" : " << entry.height << ", ";

View File

@ -80,8 +80,7 @@ ProcTableGLES::ProcTableGLES(Resolver resolver) {
auto error_fn = reinterpret_cast<PFNGLGETERRORPROC>(resolver("glGetError"));
if (!error_fn) {
VALIDATION_LOG << "Could not resolve "
<< "glGetError";
VALIDATION_LOG << "Could not resolve " << "glGetError";
return;
}

View File

@ -37,12 +37,10 @@ struct AutoErrorCheck {
}
if (GLErrorIsFatal(error)) {
FML_LOG(FATAL) << "Fatal GL Error " << GLErrorToString(error) << "("
<< error << ")"
<< " encountered on call to " << name;
<< error << ")" << " encountered on call to " << name;
} else {
FML_LOG(ERROR) << "GL Error " << GLErrorToString(error) << "(" << error
<< ")"
<< " encountered on call to " << name;
<< ")" << " encountered on call to " << name;
}
}
}

View File

@ -248,8 +248,8 @@ class ReactorGLES {
LiveHandles handles_ IPLR_GUARDED_BY(handles_mutex_);
mutable Mutex workers_mutex_;
mutable std::map<WorkerID, std::weak_ptr<Worker>> workers_
IPLR_GUARDED_BY(workers_mutex_);
mutable std::map<WorkerID, std::weak_ptr<Worker>> workers_ IPLR_GUARDED_BY(
workers_mutex_);
bool can_set_debug_labels_ = false;
bool is_valid_ = false;

View File

@ -45,8 +45,8 @@ class Context {
EGLDisplay display_ = EGL_NO_DISPLAY;
EGLContext context_ = EGL_NO_CONTEXT;
mutable RWMutex listeners_mutex_;
std::map<UniqueID, LifecycleListener> listeners_
IPLR_GUARDED_BY(listeners_mutex_);
std::map<UniqueID, LifecycleListener> listeners_ IPLR_GUARDED_BY(
listeners_mutex_);
void DispatchLifecyleEvent(LifecycleEvent event) const;

View File

@ -23,14 +23,9 @@ def make_directories(path):
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--output',
type=str,
required=True,
help='The location to generate the Metal library to.'
)
parser.add_argument(
'--depfile', type=str, required=True, help='The location of the depfile.'
'--output', type=str, required=True, help='The location to generate the Metal library to.'
)
parser.add_argument('--depfile', type=str, required=True, help='The location of the depfile.')
parser.add_argument(
'--source',
type=str,
@ -45,9 +40,7 @@ def main():
help='Select the platform.'
)
parser.add_argument(
'--metal-version',
required=True,
help='The language standard version to compile for.'
'--metal-version', required=True, help='The language standard version to compile for.'
)
args = parser.parse_args()

View File

@ -54,9 +54,7 @@ def is_source_file(path):
# Checks that all source files have the same license preamble.
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--source-root', type=str, required=True, help='The source root.'
)
parser.add_argument('--source-root', type=str, required=True, help='The source root.')
args = parser.parse_args()
assert os.path.exists(args.source_root)
@ -71,9 +69,7 @@ def main():
for source_file in source_files:
if not contains_license_block(source_file):
raise Exception(
'Could not find valid license block in source ', source_file
)
raise Exception('Could not find valid license block in source ', source_file)
if __name__ == '__main__':

View File

@ -34,9 +34,7 @@ import sys
# negative, the exit code for this script will be 1, and 0 otherwise.
SRC_ROOT = os.path.dirname(
os.path.dirname(
os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
)
os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
)
CORES = [
@ -46,9 +44,7 @@ CORES = [
# Path to the engine root checkout. This is used to calculate absolute
# paths if relative ones are passed to the script.
BUILD_ROOT_DIR = os.path.abspath(
os.path.join(os.path.realpath(__file__), '..', '..', '..', '..')
)
BUILD_ROOT_DIR = os.path.abspath(os.path.join(os.path.realpath(__file__), '..', '..', '..', '..'))
def parse_args(argv):
@ -118,12 +114,8 @@ def validate_args(args):
# Generate full paths if relative ones are provided with before and
# after taking precedence.
args.before = (
args.before or os.path.join(BUILD_ROOT_DIR, args.before_relative_to_src)
)
args.after = (
args.after or os.path.join(BUILD_ROOT_DIR, args.after_relative_to_src)
)
args.before = (args.before or os.path.join(BUILD_ROOT_DIR, args.before_relative_to_src))
args.after = (args.after or os.path.join(BUILD_ROOT_DIR, args.after_relative_to_src))
if not args.after or not os.path.isdir(args.after):
print('The --after argument must refer to a directory.')
@ -141,13 +133,11 @@ def read_malioc_file_performance(performance_json):
longest_path_cycles = performance_json['longest_path_cycles']
performance['longest_path_cycles'] = longest_path_cycles['cycle_count']
performance['longest_path_bound_pipelines'] = longest_path_cycles[
'bound_pipelines']
performance['longest_path_bound_pipelines'] = longest_path_cycles['bound_pipelines']
shortest_path_cycles = performance_json['shortest_path_cycles']
performance['shortest_path_cycles'] = shortest_path_cycles['cycle_count']
performance['shortest_path_bound_pipelines'] = shortest_path_cycles[
'bound_pipelines']
performance['shortest_path_bound_pipelines'] = shortest_path_cycles['bound_pipelines']
total_cycles = performance_json['total_cycles']
performance['total_cycles'] = total_cycles['cycle_count']
@ -219,13 +209,9 @@ def read_malioc_tree(malioc_tree):
# a space of `width` characters, and separated by `sep`. The separator does not
# count against the `width`. If `width` is 0, then the width is unconstrained.
def pretty_list(lst, fmt='s', sep='', width=12):
formats = [
'{:<{width}{fmt}}' if ele is not None else '{:<{width}s}' for ele in lst
]
formats = ['{:<{width}{fmt}}' if ele is not None else '{:<{width}s}' for ele in lst]
sanitized_list = [x if x is not None else 'null' for x in lst]
return (sep.join(formats)).format(
width='' if width == 0 else width, fmt=fmt, *sanitized_list
)
return (sep.join(formats)).format(width='' if width == 0 else width, fmt=fmt, *sanitized_list)
def compare_performance(variant, before, after):
@ -264,13 +250,10 @@ def compare_variants(befores, afters):
for variant_key, before_variant_val in before_variant.items():
after_variant_val = after_variant[variant_key]
if variant_key == 'performance':
differences += compare_performance(
variant_name, before_variant_val, after_variant_val
)
differences += compare_performance(variant_name, before_variant_val, after_variant_val)
elif before_variant_val != after_variant_val:
differences += [
'In variant {}:\n {vkey}: {} <- before\n {vkey}: {} <- after'
.format(
'In variant {}:\n {vkey}: {} <- before\n {vkey}: {} <- after'.format(
variant_name,
before_variant_val,
after_variant_val,
@ -291,11 +274,7 @@ def compare_shaders(malioc_tree, before_shader, after_shader):
elif key == 'performance':
differences += compare_performance('Default', before_val, after_val)
elif before_val != after_val:
differences += [
'{}:\n {} <- before\n {} <- after'.format(
key, before_val, after_val
)
]
differences += ['{}:\n {} <- before\n {} <- after'.format(key, before_val, after_val)]
if bool(differences):
build_gen_dir = os.path.dirname(malioc_tree)
@ -357,22 +336,15 @@ def main(argv):
'changes to existing shaders. The golden file must be updated after a '
'build of android_debug_unopt using the --malioc-path flag to the '
'flutter/tools/gn script.\n\n'
'$ ./flutter/impeller/tools/malioc_diff.py --before {} --after {} --update'
.format(args.before, args.after)
'$ ./flutter/impeller/tools/malioc_diff.py --before {} --after {} --update'.format(
args.before, args.after
)
)
if args.print_diff:
before_lines = json.dumps(
before_json, sort_keys=True, indent=2
).splitlines(keepends=True)
after_lines = json.dumps(
after_json, sort_keys=True, indent=2
).splitlines(keepends=True)
before_path = os.path.relpath(
os.path.abspath(args.before), start=SRC_ROOT
)
diff = difflib.unified_diff(
before_lines, after_lines, fromfile=before_path
)
before_lines = json.dumps(before_json, sort_keys=True, indent=2).splitlines(keepends=True)
after_lines = json.dumps(after_json, sort_keys=True, indent=2).splitlines(keepends=True)
before_path = os.path.relpath(os.path.abspath(args.before), start=SRC_ROOT)
diff = difflib.unified_diff(before_lines, after_lines, fromfile=before_path)
print('\nYou can alternately apply the diff below:')
print('patch -p0 <<DONE')
print(*diff, sep='')

View File

@ -22,10 +22,7 @@ def make_directories(path):
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--symbol-name',
type=str,
required=True,
help='The name of the symbol referencing the data.'
'--symbol-name', type=str, required=True, help='The name of the symbol referencing the data.'
)
parser.add_argument(
'--output-header',
@ -34,10 +31,7 @@ def main():
help='The header file containing the symbol reference.'
)
parser.add_argument(
'--output-source',
type=str,
required=True,
help='The source file containing the file bytes.'
'--output-source', type=str, required=True, help='The source file containing the file bytes.'
)
parser.add_argument(
'--source',
@ -72,9 +66,7 @@ def main():
data_len += 1
output.write(f'{ord(byte)},')
output.write('};\n')
output.write(
f'const unsigned long impeller_{args.symbol_name}_length = {data_len};\n'
)
output.write(f'const unsigned long impeller_{args.symbol_name}_length = {data_len};\n')
with open(output_header, 'w') as output:
output.write('#pragma once\n')
@ -82,12 +74,8 @@ def main():
output.write('extern "C" {\n')
output.write('#endif\n\n')
output.write(
f'extern const unsigned char impeller_{args.symbol_name}_data[];\n'
)
output.write(
f'extern const unsigned long impeller_{args.symbol_name}_length;\n\n'
)
output.write(f'extern const unsigned char impeller_{args.symbol_name}_data[];\n')
output.write(f'extern const unsigned long impeller_{args.symbol_name}_length;\n\n')
output.write('#ifdef __cplusplus\n')
output.write('}\n')

View File

@ -10,21 +10,19 @@ layout(location = 0) out vec4 fragColor;
layout(location = 0) uniform float a;
// For a given incident vector I and surface normal N reflect returns the reflection direction calculated as I - 2.0 * dot(N, I) * N.
// For a given incident vector I and surface normal N reflect returns the
// reflection direction calculated as I - 2.0 * dot(N, I) * N.
void main() {
// To get [0.0, 1.0] as the output, choose [0.6, 0.8] as N, and solve for I.
// Since the reflection is symmetric:
// I = reflect(I)
// I = I - 2 dot(N, I) N
// I = I - 2 dot(N, I) N
// N = [0.6, 0.8]
// I = [0, 1]
// I = [0, 1] - 2 * 0.8 [0.6, 0.8]
// I = [-0.96, -0.28]
fragColor = vec4(
reflect(vec2(a * -0.96, -0.28), vec2(0.6, 0.8))[0],
reflect(vec2(a * -0.96, -0.28), vec2(0.6, 0.8))[1],
0.0,
1.0
);
// To get [0.0, 1.0] as the output, choose [0.6, 0.8] as N, and solve for I.
// Since the reflection is symmetric:
// I = reflect(I)
// I = I - 2 dot(N, I) N
// I = I - 2 dot(N, I) N
// N = [0.6, 0.8]
// I = [0, 1]
// I = [0, 1] - 2 * 0.8 [0.6, 0.8]
// I = [-0.96, -0.28]
fragColor =
vec4(reflect(vec2(a * -0.96, -0.28), vec2(0.6, 0.8))[0],
reflect(vec2(a * -0.96, -0.28), vec2(0.6, 0.8))[1], 0.0, 1.0);
}

View File

@ -16,7 +16,7 @@
namespace flutter {
class Fixture : public testing::FixtureTest {
void TestBody() override{};
void TestBody() override {};
};
static void BM_PlatformMessageResponseDartComplete(benchmark::State& state) {

View File

@ -95,15 +95,14 @@ bool operator==(const ViewportMetrics& a, const ViewportMetrics& b) {
}
std::ostream& operator<<(std::ostream& os, const ViewportMetrics& a) {
os << "DPR: " << a.device_pixel_ratio << " "
<< "Size: [" << a.physical_width << "W " << a.physical_height << "H] "
<< "Padding: [" << a.physical_padding_top << "T "
<< a.physical_padding_right << "R " << a.physical_padding_bottom << "B "
<< a.physical_padding_left << "L] "
os << "DPR: " << a.device_pixel_ratio << " " << "Size: [" << a.physical_width
<< "W " << a.physical_height << "H] " << "Padding: ["
<< a.physical_padding_top << "T " << a.physical_padding_right << "R "
<< a.physical_padding_bottom << "B " << a.physical_padding_left << "L] "
<< "Insets: [" << a.physical_view_inset_top << "T "
<< a.physical_view_inset_right << "R " << a.physical_view_inset_bottom
<< "B " << a.physical_view_inset_left << "L] "
<< "Gesture Insets: [" << a.physical_system_gesture_inset_top << "T "
<< "B " << a.physical_view_inset_left << "L] " << "Gesture Insets: ["
<< a.physical_system_gesture_inset_top << "T "
<< a.physical_system_gesture_inset_right << "R "
<< a.physical_system_gesture_inset_bottom << "B "
<< a.physical_system_gesture_inset_left << "L] "

View File

@ -95,8 +95,8 @@ FLUTTER_EXPORT void FlutterDesktopMessengerSetCallback(
// Operation is thread-safe.
//
// See also: |FlutterDesktopMessengerRelease|
FLUTTER_EXPORT FlutterDesktopMessengerRef
FlutterDesktopMessengerAddRef(FlutterDesktopMessengerRef messenger);
FLUTTER_EXPORT FlutterDesktopMessengerRef FlutterDesktopMessengerAddRef(
FlutterDesktopMessengerRef messenger);
// Decrements the reference count for the |messenger|.
//
@ -126,8 +126,8 @@ FLUTTER_EXPORT bool FlutterDesktopMessengerIsAvailable(
// Returns the |messenger| value.
//
// See also: |FlutterDesktopMessengerUnlock|
FLUTTER_EXPORT FlutterDesktopMessengerRef
FlutterDesktopMessengerLock(FlutterDesktopMessengerRef messenger);
FLUTTER_EXPORT FlutterDesktopMessengerRef FlutterDesktopMessengerLock(
FlutterDesktopMessengerRef messenger);
// Unlocks the `FlutterDesktopMessengerRef`.
//

View File

@ -206,8 +206,9 @@ static NSString* const kRestorationStateAppModificationKey = @"mod-date";
- (BOOL)application:(UIApplication*)application
continueUserActivity:(NSUserActivity*)userActivity
restorationHandler:(void (^)(NSArray<id<UIUserActivityRestoring>>* __nullable
restorableObjects))restorationHandler {
restorationHandler:
(void (^)(NSArray<id<UIUserActivityRestoring>>* __nullable restorableObjects))
restorationHandler {
if ([_lifeCycleDelegate application:application
continueUserActivity:userActivity
restorationHandler:restorationHandler]) {

View File

@ -129,8 +129,7 @@
<< "as hot reload and DevTools. To make your Flutter app or module "
<< "attachable and debuggable, add a '" << registrationType << "' value "
<< "to the 'NSBonjourServices' key in your Info.plist for the Debug/"
<< "Profile configurations. "
<< "For more information, see "
<< "Profile configurations. " << "For more information, see "
<< "https://flutter.dev/docs/development/add-to-app/ios/"
"project-setup#local-network-privacy-permissions";
}

View File

@ -91,13 +91,13 @@ void IOIteratorReset(io_iterator_t it);
CFMutableDictionaryRef IOServiceMatching(const char* name) CF_RETURNS_RETAINED;
CFMutableDictionaryRef IOServiceNameMatching(const char* name)
CF_RETURNS_RETAINED;
io_service_t IOServiceGetMatchingService(mach_port_t master,
CFDictionaryRef matching
CF_RELEASES_ARGUMENT);
kern_return_t IOServiceGetMatchingServices(mach_port_t master,
CFDictionaryRef matching
CF_RELEASES_ARGUMENT,
io_iterator_t* it);
io_service_t IOServiceGetMatchingService(
mach_port_t master,
CFDictionaryRef matching CF_RELEASES_ARGUMENT);
kern_return_t IOServiceGetMatchingServices(
mach_port_t master,
CFDictionaryRef matching CF_RELEASES_ARGUMENT,
io_iterator_t* it);
#if __cplusplus
}

View File

@ -18,10 +18,10 @@ extern "C" {
ZIRCON_FFI_EXPORT zircon_dart_handle_pair_t* zircon_dart_channel_create(
uint32_t options);
ZIRCON_FFI_EXPORT int32_t
zircon_dart_channel_write(zircon_dart_handle_t* handle,
zircon_dart_byte_array_t* bytes,
zircon_dart_handle_list_t* handles);
ZIRCON_FFI_EXPORT int32_t zircon_dart_channel_write(
zircon_dart_handle_t* handle,
zircon_dart_byte_array_t* bytes,
zircon_dart_handle_list_t* handles);
#ifdef __cplusplus
}

View File

@ -43,13 +43,13 @@ ZIRCON_FFI_EXPORT void zircon_dart_handle_list_free(
zircon_dart_handle_list_t* list);
// Returns 1 if the handle is valid.
ZIRCON_FFI_EXPORT int32_t
zircon_dart_handle_is_valid(zircon_dart_handle_t* handle);
ZIRCON_FFI_EXPORT int32_t zircon_dart_handle_is_valid(
zircon_dart_handle_t* handle);
// Closes the handle, but doesn't release any ffi-associated memory. Returns 1
// on success.
ZIRCON_FFI_EXPORT int32_t
zircon_dart_handle_close(zircon_dart_handle_t* handle);
ZIRCON_FFI_EXPORT int32_t zircon_dart_handle_close(
zircon_dart_handle_t* handle);
// Closes the zircon handle if valid and frees the memory.
ZIRCON_FFI_EXPORT void zircon_dart_handle_free(zircon_dart_handle_t* handle);

View File

@ -13,34 +13,17 @@ import sys
def main():
parser = argparse.ArgumentParser(description='Package a Flutter application')
parser.add_argument('--flutter-root', type=str, required=True, help='The root of the Flutter SDK')
parser.add_argument(
'--flutter-root',
type=str,
required=True,
help='The root of the Flutter SDK'
'--flutter-tools', type=str, required=True, help='The executable for the Flutter tool'
)
parser.add_argument(
'--flutter-tools',
type=str,
required=True,
help='The executable for the Flutter tool'
)
parser.add_argument(
'--asset-dir',
type=str,
required=True,
help='The directory where to put intermediate files'
)
parser.add_argument(
'--app-dir', type=str, required=True, help='The root of the app'
)
parser.add_argument(
'--packages', type=str, required=True, help='The package map to use'
'--asset-dir', type=str, required=True, help='The directory where to put intermediate files'
)
parser.add_argument('--app-dir', type=str, required=True, help='The root of the app')
parser.add_argument('--packages', type=str, required=True, help='The package map to use')
parser.add_argument('--manifest', type=str, help='The application manifest')
parser.add_argument(
'--component-name', type=str, help='The name of the component'
)
parser.add_argument('--component-name', type=str, help='The name of the component')
parser.add_argument(
'--asset-manifest-out',
type=str,

View File

@ -13,12 +13,8 @@ def main():
parser = argparse.ArgumentParser(
sys.argv[0], description="Generate main file for Fuchsia dart test"
)
parser.add_argument(
"--out", help="Path to .dart file to generate", required=True
)
parser.add_argument(
"--main-dart", help="Path to main.dart file to import", required=True
)
parser.add_argument("--out", help="Path to .dart file to generate", required=True)
parser.add_argument("--main-dart", help="Path to main.dart file to import", required=True)
args = parser.parse_args()
out_dir = os.path.dirname(args.out)
assert os.path.isfile(os.path.join(os.path.dirname(args.out), args.main_dart))

View File

@ -143,8 +143,7 @@ TEST_F(FocusDelegateTest, RequestFocusTest) {
// Create the platform message request.
std::ostringstream message;
message << "{"
<< " \"method\":\"View.focus.request\","
message << "{" << " \"method\":\"View.focus.request\","
<< " \"args\": {"
<< " \"viewRef\":" << view_ref.reference.get() << " }"
<< "}";
@ -176,8 +175,7 @@ TEST_F(FocusDelegateTest, RequestFocusFailTest) {
focuser_->fail_request_focus();
// Create the platform message request.
std::ostringstream message;
message << "{"
<< " \"method\":\"View.focus.request\","
message << "{" << " \"method\":\"View.focus.request\","
<< " \"args\": {"
<< " \"viewRef\":" << view_ref.reference.get() << " }"
<< "}";

View File

@ -381,15 +381,12 @@ void PlatformView::OnChildViewStatus(
FML_DCHECK(child_view_info_.count(content_id) == 1);
std::ostringstream out;
out << "{"
<< "\"method\":\"View.viewStateChanged\","
<< "\"args\":{"
out << "{" << "\"method\":\"View.viewStateChanged\"," << "\"args\":{"
<< " \"viewId\":" << child_view_info_.at(content_id).view_id
<< "," // ViewId
<< " \"is_rendering\":true," // IsViewRendering
<< " \"state\":true" // IsViewRendering
<< " }"
<< "}";
<< " }" << "}";
auto call = out.str();
std::unique_ptr<flutter::PlatformMessage> message =
@ -518,9 +515,7 @@ void PlatformView::OnDisposeView(int64_t view_id_raw) {
void PlatformView::OnChildViewConnected(uint64_t content_id) {
FML_CHECK(child_view_info_.count(content_id) == 1);
std::ostringstream out;
out << "{"
<< "\"method\":\"View.viewConnected\","
<< "\"args\":{"
out << "{" << "\"method\":\"View.viewConnected\"," << "\"args\":{"
<< " \"viewId\":" << child_view_info_.at(content_id).view_id << " }"
<< "}";
auto call = out.str();
@ -535,9 +530,7 @@ void PlatformView::OnChildViewConnected(uint64_t content_id) {
void PlatformView::OnChildViewDisconnected(uint64_t content_id) {
FML_CHECK(child_view_info_.count(content_id) == 1);
std::ostringstream out;
out << "{"
<< "\"method\":\"View.viewDisconnected\","
<< "\"args\":{"
out << "{" << "\"method\":\"View.viewDisconnected\"," << "\"args\":{"
<< " \"viewId\":" << child_view_info_.at(content_id).view_id << " }"
<< "}";
auto call = out.str();

View File

@ -108,11 +108,9 @@ class PlatformMessageBuilder {
rapidjson::Value Build() {
std::ostringstream message;
message << "{"
<< " \"method\":\""
message << "{" << " \"method\":\""
<< PointerInjectorDelegate::kPointerInjectorMethodPrefix << "\","
<< " \"args\": {"
<< " \"viewId\":" << view_id_ << ","
<< " \"args\": {" << " \"viewId\":" << view_id_ << ","
<< " \"x\":" << pointer_x_ << ","
<< " \"y\":" << pointer_y_ << ","
<< " \"phase\":" << phase_ << ","
@ -121,8 +119,7 @@ class PlatformMessageBuilder {
<< " \"viewRef\":" << view_ref_.reference.get() << ","
<< " \"logicalWidth\":" << width_ << ","
<< " \"logicalHeight\":" << height_ << ","
<< " \"timestamp\":" << timestamp_ << " }"
<< "}";
<< " \"timestamp\":" << timestamp_ << " }" << "}";
return ParsePlatformMessage(message.str());
}

View File

@ -44,8 +44,7 @@ Screenshot::Screenshot(const zx::vmo& screenshot_vmo,
}
std::ostream& operator<<(std::ostream& stream, const Pixel& pixel) {
return stream << "{Pixel:"
<< " r:" << static_cast<unsigned int>(pixel.red)
return stream << "{Pixel:" << " r:" << static_cast<unsigned int>(pixel.red)
<< " g:" << static_cast<unsigned int>(pixel.green)
<< " b:" << static_cast<unsigned int>(pixel.blue)
<< " a:" << static_cast<unsigned int>(pixel.alpha) << "}";

View File

@ -803,14 +803,10 @@ TEST_F(PlatformViewTests, CreateViewTest) {
// JSON for the message to be passed into the PlatformView.
std::ostringstream create_view_message;
create_view_message << "{"
<< " \"method\":\"View.create\","
<< " \"args\":{"
<< " \"viewId\":" << view_id << ","
<< " \"hitTestable\":true,"
<< " \"focusable\":true"
<< " }"
<< "}";
create_view_message << "{" << " \"method\":\"View.create\","
<< " \"args\":{" << " \"viewId\":" << view_id << ","
<< " \"hitTestable\":true," << " \"focusable\":true"
<< " }" << "}";
std::string create_view_call = create_view_message.str();
std::unique_ptr<flutter::PlatformMessage> message =
@ -828,11 +824,9 @@ TEST_F(PlatformViewTests, CreateViewTest) {
// Platform view forwards the 'View.viewConnected' message on the
// 'flutter/platform_views' channel when a view gets created.
std::ostringstream view_connected_expected_out;
view_connected_expected_out << "{"
<< "\"method\":\"View.viewConnected\","
<< "\"args\":{"
<< " \"viewId\":" << view_id << " }"
<< "}";
view_connected_expected_out << "{" << "\"method\":\"View.viewConnected\","
<< "\"args\":{" << " \"viewId\":" << view_id
<< " }" << "}";
ASSERT_NE(delegate.message(), nullptr);
EXPECT_EQ(view_connected_expected_out.str(),
@ -1002,14 +996,10 @@ TEST_F(PlatformViewTests, DestroyViewTest) {
EXPECT_TRUE(base_view);
std::ostringstream create_message;
create_message << "{"
<< " \"method\":\"View.create\","
<< " \"args\": {"
<< " \"viewId\":" << view_id << ","
create_message << "{" << " \"method\":\"View.create\","
<< " \"args\": {" << " \"viewId\":" << view_id << ","
<< " \"hitTestable\":true,"
<< " \"focusable\":true"
<< " }"
<< "}";
<< " \"focusable\":true" << " }" << "}";
auto create_response = FakePlatformMessageResponse::Create();
base_view->HandlePlatformMessage(create_response->WithMessage(
@ -1020,11 +1010,9 @@ TEST_F(PlatformViewTests, DestroyViewTest) {
// JSON for the message to be passed into the PlatformView.
std::ostringstream dispose_message;
dispose_message << "{"
<< " \"method\":\"View.dispose\","
<< " \"args\": {"
<< " \"viewId\":" << view_id << " }"
<< "}";
dispose_message << "{" << " \"method\":\"View.dispose\","
<< " \"args\": {" << " \"viewId\":" << view_id
<< " }" << "}";
std::string dispose_view_call = dispose_message.str();
std::unique_ptr<flutter::PlatformMessage> message =
@ -1042,11 +1030,9 @@ TEST_F(PlatformViewTests, DestroyViewTest) {
// Platform view forwards the 'View.viewDisconnected' message on the
// 'flutter/platform_views' channel when a view gets destroyed.
std::ostringstream view_disconnected_expected_out;
view_disconnected_expected_out << "{"
<< "\"method\":\"View.viewDisconnected\","
<< "\"args\":{"
<< " \"viewId\":" << view_id << " }"
<< "}";
view_disconnected_expected_out
<< "{" << "\"method\":\"View.viewDisconnected\"," << "\"args\":{"
<< " \"viewId\":" << view_id << " }" << "}";
ASSERT_NE(delegate.message(), nullptr);
EXPECT_EQ(view_disconnected_expected_out.str(),
@ -1155,14 +1141,10 @@ TEST_F(PlatformViewTests, RequestFocusTest) {
uint64_t view_id = 42;
std::ostringstream create_message;
create_message << "{"
<< " \"method\":\"View.create\","
<< " \"args\": {"
<< " \"viewId\":" << view_id << ","
create_message << "{" << " \"method\":\"View.create\","
<< " \"args\": {" << " \"viewId\":" << view_id << ","
<< " \"hitTestable\":true,"
<< " \"focusable\":true"
<< " }"
<< "}";
<< " \"focusable\":true" << " }" << "}";
// Dispatch the plaform message request.
auto create_response = FakePlatformMessageResponse::Create();
@ -1173,11 +1155,9 @@ TEST_F(PlatformViewTests, RequestFocusTest) {
// JSON for the message to be passed into the PlatformView.
std::ostringstream focus_message;
focus_message << "{"
<< " \"method\":\"View.focus.requestById\","
<< " \"args\": {"
<< " \"viewId\":" << view_id << " }"
<< "}";
focus_message << "{" << " \"method\":\"View.focus.requestById\","
<< " \"args\": {" << " \"viewId\":" << view_id
<< " }" << "}";
// Dispatch the plaform message request.
auto focus_response = FakePlatformMessageResponse::Create();
@ -1228,11 +1208,9 @@ TEST_F(PlatformViewTests, RequestFocusNeverCreatedTest) {
uint64_t view_id = 42;
std::ostringstream focus_message;
focus_message << "{"
<< " \"method\":\"View.focus.requestById\","
<< " \"args\": {"
<< " \"viewId\":" << view_id << " }"
<< "}";
focus_message << "{" << " \"method\":\"View.focus.requestById\","
<< " \"args\": {" << " \"viewId\":" << view_id
<< " }" << "}";
// Dispatch the plaform message request.
auto focus_response = FakePlatformMessageResponse::Create();
@ -1294,14 +1272,10 @@ TEST_F(PlatformViewTests, RequestFocusDisposedTest) {
// Create a new view
std::ostringstream create_message;
create_message << "{"
<< " \"method\":\"View.create\","
<< " \"args\": {"
<< " \"viewId\":" << view_id << ","
create_message << "{" << " \"method\":\"View.create\","
<< " \"args\": {" << " \"viewId\":" << view_id << ","
<< " \"hitTestable\":true,"
<< " \"focusable\":true"
<< " }"
<< "}";
<< " \"focusable\":true" << " }" << "}";
auto create_response = FakePlatformMessageResponse::Create();
base_view->HandlePlatformMessage(create_response->WithMessage(
@ -1311,11 +1285,9 @@ TEST_F(PlatformViewTests, RequestFocusDisposedTest) {
EXPECT_FALSE(destroy_view_called);
// Dispose of the view
std::ostringstream dispose_message;
dispose_message << "{"
<< " \"method\":\"View.dispose\","
<< " \"args\": {"
<< " \"viewId\":" << view_id << " }"
<< "}";
dispose_message << "{" << " \"method\":\"View.dispose\","
<< " \"args\": {" << " \"viewId\":" << view_id
<< " }" << "}";
auto dispose_response = FakePlatformMessageResponse::Create();
base_view->HandlePlatformMessage(dispose_response->WithMessage(
@ -1325,11 +1297,9 @@ TEST_F(PlatformViewTests, RequestFocusDisposedTest) {
// Request focus on newly disposed view
std::ostringstream focus_message;
focus_message << "{"
<< " \"method\":\"View.focus.requestById\","
<< " \"args\": {"
<< " \"viewId\":" << view_id << " }"
<< "}";
focus_message << "{" << " \"method\":\"View.focus.requestById\","
<< " \"args\": {" << " \"viewId\":" << view_id
<< " }" << "}";
auto focus_response = FakePlatformMessageResponse::Create();
base_view->HandlePlatformMessage(focus_response->WithMessage(
@ -1468,13 +1438,11 @@ TEST_F(PlatformViewTests, OnShaderWarmup) {
// Create initial view for testing.
std::ostringstream warmup_shaders_ostream;
warmup_shaders_ostream << "{"
<< " \"method\":\"WarmupSkps\","
warmup_shaders_ostream << "{" << " \"method\":\"WarmupSkps\","
<< " \"args\":{"
<< " \"shaders\":" << shaders_array_string << ","
<< " \"width\":" << width << ","
<< " \"height\":" << height << " }"
<< "}\n";
<< " \"height\":" << height << " }" << "}\n";
std::string warmup_shaders_string = warmup_shaders_ostream.str();
fml::RefPtr<TestPlatformMessageResponse> response(

View File

@ -120,15 +120,15 @@ FLUTTER_EXPORT bool FlutterDesktopRunWindowEventLoopWithTimeout(
// FlutterDesktopWindowControllerRef.
//
// Its lifetime is the same as the |controller|'s.
FLUTTER_EXPORT FlutterDesktopWindowRef
FlutterDesktopGetWindow(FlutterDesktopWindowControllerRef controller);
FLUTTER_EXPORT FlutterDesktopWindowRef FlutterDesktopGetWindow(
FlutterDesktopWindowControllerRef controller);
// Returns the handle for the engine running in
// FlutterDesktopWindowControllerRef.
//
// Its lifetime is the same as the |controller|'s.
FLUTTER_EXPORT FlutterDesktopEngineRef
FlutterDesktopGetEngine(FlutterDesktopWindowControllerRef controller);
FLUTTER_EXPORT FlutterDesktopEngineRef FlutterDesktopGetEngine(
FlutterDesktopWindowControllerRef controller);
// Returns the plugin registrar handle for the plugin with the given name.
//
@ -201,8 +201,8 @@ FLUTTER_EXPORT void FlutterDesktopWindowSetSizeLimits(
// Runs an instance of a headless Flutter engine.
//
// Returns a null pointer in the event of an error.
FLUTTER_EXPORT FlutterDesktopEngineRef
FlutterDesktopRunEngine(const FlutterDesktopEngineProperties& properties);
FLUTTER_EXPORT FlutterDesktopEngineRef FlutterDesktopRunEngine(
const FlutterDesktopEngineProperties& properties);
// Waits for and processes the next event before |timeout_milliseconds|.
//

View File

@ -234,10 +234,10 @@ G_MODULE_EXPORT gboolean fl_event_channel_send_error(FlEventChannel* self,
return TRUE;
}
G_MODULE_EXPORT gboolean
fl_event_channel_send_end_of_stream(FlEventChannel* self,
GCancellable* cancellable,
GError** error) {
G_MODULE_EXPORT gboolean fl_event_channel_send_end_of_stream(
FlEventChannel* self,
GCancellable* cancellable,
GError** error) {
g_return_val_if_fail(FL_IS_EVENT_CHANNEL(self), FALSE);
fl_binary_messenger_send_on_channel(self->messenger, self->name, nullptr,
cancellable, nullptr, nullptr);

View File

@ -122,8 +122,9 @@ G_MODULE_EXPORT gboolean fl_method_call_respond_error(FlMethodCall* self,
response, error);
}
G_MODULE_EXPORT gboolean
fl_method_call_respond_not_implemented(FlMethodCall* self, GError** error) {
G_MODULE_EXPORT gboolean fl_method_call_respond_not_implemented(
FlMethodCall* self,
GError** error) {
g_return_val_if_fail(FL_IS_METHOD_CALL(self), FALSE);
g_autoptr(FlMethodResponse) response =

View File

@ -179,9 +179,9 @@ static void fl_texture_registrar_impl_init(FlTextureRegistrarImpl* self) {
g_mutex_init(&self->textures_mutex);
}
G_MODULE_EXPORT gboolean
fl_texture_registrar_register_texture(FlTextureRegistrar* self,
FlTexture* texture) {
G_MODULE_EXPORT gboolean fl_texture_registrar_register_texture(
FlTextureRegistrar* self,
FlTexture* texture) {
g_return_val_if_fail(FL_IS_TEXTURE_REGISTRAR(self), FALSE);
g_return_val_if_fail(FL_IS_TEXTURE(texture), FALSE);
@ -195,18 +195,18 @@ FlTexture* fl_texture_registrar_lookup_texture(FlTextureRegistrar* self,
return FL_TEXTURE_REGISTRAR_GET_IFACE(self)->lookup_texture(self, texture_id);
}
G_MODULE_EXPORT gboolean
fl_texture_registrar_mark_texture_frame_available(FlTextureRegistrar* self,
FlTexture* texture) {
G_MODULE_EXPORT gboolean fl_texture_registrar_mark_texture_frame_available(
FlTextureRegistrar* self,
FlTexture* texture) {
g_return_val_if_fail(FL_IS_TEXTURE_REGISTRAR(self), FALSE);
return FL_TEXTURE_REGISTRAR_GET_IFACE(self)->mark_texture_frame_available(
self, texture);
}
G_MODULE_EXPORT gboolean
fl_texture_registrar_unregister_texture(FlTextureRegistrar* self,
FlTexture* texture) {
G_MODULE_EXPORT gboolean fl_texture_registrar_unregister_texture(
FlTextureRegistrar* self,
FlTexture* texture) {
g_return_val_if_fail(FL_IS_TEXTURE_REGISTRAR(self), FALSE);
return FL_TEXTURE_REGISTRAR_GET_IFACE(self)->unregister_texture(self,

View File

@ -394,14 +394,15 @@ int epoxy_gl_version(void) {
#define CONSTRUCT(_func) static void _func(void) __attribute__((constructor));
#define DESTRUCT(_func) static void _func(void) __attribute__((destructor));
#elif defined(_MSC_VER) && (_MSC_VER >= 1500)
#define CONSTRUCT(_func) \
static void _func(void); \
static int _func##_wrapper(void) { \
_func(); \
return 0; \
} \
__pragma(section(".CRT$XCU", read)) __declspec(allocate( \
".CRT$XCU")) static int (*_array##_func)(void) = _func##_wrapper;
#define CONSTRUCT(_func) \
static void _func(void); \
static int _func##_wrapper(void) { \
_func(); \
return 0; \
} \
__pragma(section(".CRT$XCU", read)) \
__declspec(allocate(".CRT$XCU")) static int (*_array##_func)(void) = \
_func##_wrapper;
#else
#error "You will need constructor support for your compiler"

View File

@ -86,24 +86,24 @@ class DirectManipulationEventHandler
ULONG STDMETHODCALLTYPE Release() override;
// |IDirectManipulationViewportEventHandler|
HRESULT STDMETHODCALLTYPE
OnViewportStatusChanged(IDirectManipulationViewport* viewport,
DIRECTMANIPULATION_STATUS current,
DIRECTMANIPULATION_STATUS previous) override;
HRESULT STDMETHODCALLTYPE OnViewportStatusChanged(
IDirectManipulationViewport* viewport,
DIRECTMANIPULATION_STATUS current,
DIRECTMANIPULATION_STATUS previous) override;
// |IDirectManipulationViewportEventHandler|
HRESULT STDMETHODCALLTYPE
OnViewportUpdated(IDirectManipulationViewport* viewport) override;
HRESULT STDMETHODCALLTYPE OnViewportUpdated(
IDirectManipulationViewport* viewport) override;
// |IDirectManipulationViewportEventHandler|
HRESULT STDMETHODCALLTYPE
OnContentUpdated(IDirectManipulationViewport* viewport,
IDirectManipulationContent* content) override;
HRESULT STDMETHODCALLTYPE OnContentUpdated(
IDirectManipulationViewport* viewport,
IDirectManipulationContent* content) override;
// |IDirectManipulationInteractionEventHandler|
HRESULT STDMETHODCALLTYPE
OnInteraction(IDirectManipulationViewport2* viewport,
DIRECTMANIPULATION_INTERACTION_TYPE interaction) override;
HRESULT STDMETHODCALLTYPE OnInteraction(
IDirectManipulationViewport2* viewport,
DIRECTMANIPULATION_INTERACTION_TYPE interaction) override;
private:
struct GestureData {

View File

@ -104,8 +104,8 @@ FLUTTER_EXPORT FlutterDesktopEngineRef FlutterDesktopViewControllerGetEngine(
FlutterDesktopViewControllerRef controller);
// Returns the view managed by the given controller.
FLUTTER_EXPORT FlutterDesktopViewRef
FlutterDesktopViewControllerGetView(FlutterDesktopViewControllerRef controller);
FLUTTER_EXPORT FlutterDesktopViewRef FlutterDesktopViewControllerGetView(
FlutterDesktopViewControllerRef controller);
// Requests new frame from the engine and repaints the view.
FLUTTER_EXPORT void FlutterDesktopViewControllerForceRedraw(
@ -167,8 +167,8 @@ FLUTTER_EXPORT bool FlutterDesktopEngineRun(FlutterDesktopEngineRef engine,
// This should be called on every run of the application-level runloop, and
// a wait for native events in the runloop should never be longer than the
// last return value from this function.
FLUTTER_EXPORT uint64_t
FlutterDesktopEngineProcessMessages(FlutterDesktopEngineRef engine);
FLUTTER_EXPORT uint64_t FlutterDesktopEngineProcessMessages(
FlutterDesktopEngineRef engine);
FLUTTER_EXPORT void FlutterDesktopEngineReloadSystemFonts(
FlutterDesktopEngineRef engine);
@ -187,8 +187,8 @@ FlutterDesktopEngineGetPluginRegistrar(FlutterDesktopEngineRef engine,
//
// Callers should use |FlutterDesktopMessengerAddRef| if the returned pointer
// will potentially outlive 'engine', such as when passing it to another thread.
FLUTTER_EXPORT FlutterDesktopMessengerRef
FlutterDesktopEngineGetMessenger(FlutterDesktopEngineRef engine);
FLUTTER_EXPORT FlutterDesktopMessengerRef FlutterDesktopEngineGetMessenger(
FlutterDesktopEngineRef engine);
// Returns the texture registrar associated with the engine.
FLUTTER_EXPORT FlutterDesktopTextureRegistrarRef

View File

@ -11,23 +11,19 @@ import shutil
import sys
import os
buildroot_dir = os.path.abspath(
os.path.join(os.path.realpath(__file__), '..', '..', '..', '..')
)
buildroot_dir = os.path.abspath(os.path.join(os.path.realpath(__file__), '..', '..', '..', '..'))
ARCH_SUBPATH = 'mac-arm64' if platform.processor() == 'arm' else 'mac-x64'
DSYMUTIL = os.path.join(
os.path.dirname(__file__), '..', '..', '..', 'buildtools', ARCH_SUBPATH,
'clang', 'bin', 'dsymutil'
os.path.dirname(__file__), '..', '..', '..', 'buildtools', ARCH_SUBPATH, 'clang', 'bin',
'dsymutil'
)
out_dir = os.path.join(buildroot_dir, 'out')
def main():
parser = argparse.ArgumentParser(
description='Creates FlutterEmbedder.framework for macOS'
)
parser = argparse.ArgumentParser(description='Creates FlutterEmbedder.framework for macOS')
parser.add_argument('--dst', type=str, required=True)
parser.add_argument('--arm64-out-dir', type=str, required=True)
@ -39,17 +35,14 @@ def main():
args = parser.parse_args()
dst = (
args.dst
if os.path.isabs(args.dst) else os.path.join(buildroot_dir, args.dst)
)
dst = (args.dst if os.path.isabs(args.dst) else os.path.join(buildroot_dir, args.dst))
arm64_out_dir = (
args.arm64_out_dir if os.path.isabs(args.arm64_out_dir) else
os.path.join(buildroot_dir, args.arm64_out_dir)
args.arm64_out_dir
if os.path.isabs(args.arm64_out_dir) else os.path.join(buildroot_dir, args.arm64_out_dir)
)
x64_out_dir = (
args.x64_out_dir if os.path.isabs(args.x64_out_dir) else
os.path.join(buildroot_dir, args.x64_out_dir)
args.x64_out_dir
if os.path.isabs(args.x64_out_dir) else os.path.join(buildroot_dir, args.x64_out_dir)
)
fat_framework = os.path.join(dst, 'FlutterEmbedder.framework')
@ -83,9 +76,7 @@ def main():
shutil.copytree(arm64_framework, fat_framework, symlinks=True)
regenerate_symlinks(fat_framework)
fat_framework_binary = os.path.join(
fat_framework, 'Versions', 'A', 'FlutterEmbedder'
)
fat_framework_binary = os.path.join(fat_framework, 'Versions', 'A', 'FlutterEmbedder')
# Create the arm64/x64 fat framework.
subprocess.check_call([
@ -115,17 +106,10 @@ def regenerate_symlinks(fat_framework):
os.path.join('Versions', 'Current', 'FlutterEmbedder'),
os.path.join(fat_framework, 'FlutterEmbedder')
)
os.symlink(os.path.join('Versions', 'Current', 'Headers'), os.path.join(fat_framework, 'Headers'))
os.symlink(os.path.join('Versions', 'Current', 'Modules'), os.path.join(fat_framework, 'Modules'))
os.symlink(
os.path.join('Versions', 'Current', 'Headers'),
os.path.join(fat_framework, 'Headers')
)
os.symlink(
os.path.join('Versions', 'Current', 'Modules'),
os.path.join(fat_framework, 'Modules')
)
os.symlink(
os.path.join('Versions', 'Current', 'Resources'),
os.path.join(fat_framework, 'Resources')
os.path.join('Versions', 'Current', 'Resources'), os.path.join(fat_framework, 'Resources')
)
@ -135,10 +119,7 @@ def process_framework(dst, args, fat_framework, fat_framework_binary):
subprocess.check_call([DSYMUTIL, '-o', dsym_out, fat_framework_binary])
if args.zip:
dsym_dst = os.path.join(dst, 'FlutterEmbedder.dSYM')
subprocess.check_call([
'zip', '-r', '-y', 'FlutterEmbedder.dSYM.zip', '.'
],
cwd=dsym_dst)
subprocess.check_call(['zip', '-r', '-y', 'FlutterEmbedder.dSYM.zip', '.'], cwd=dsym_dst)
dsym_final_src_path = os.path.join(dsym_dst, 'FlutterEmbedder.dSYM.zip')
dsym_final_dst_path = os.path.join(dst, 'FlutterEmbedder.dSYM.zip')
shutil.move(dsym_final_src_path, dsym_final_dst_path)
@ -160,9 +141,7 @@ def process_framework(dst, args, fat_framework, fat_framework_binary):
'.',
],
cwd=framework_dst)
final_src_path = os.path.join(
framework_dst, 'FlutterEmbedder.framework.zip'
)
final_src_path = os.path.join(framework_dst, 'FlutterEmbedder.framework.zip')
final_dst_path = os.path.join(dst, 'FlutterEmbedder.framework.zip')
shutil.move(final_src_path, final_dst_path)

View File

@ -18,13 +18,11 @@ from create_xcframework import create_xcframework # pylint: disable=import-erro
ARCH_SUBPATH = 'mac-arm64' if platform.processor() == 'arm' else 'mac-x64'
DSYMUTIL = os.path.join(
os.path.dirname(__file__), '..', '..', '..', 'buildtools', ARCH_SUBPATH,
'clang', 'bin', 'dsymutil'
os.path.dirname(__file__), '..', '..', '..', 'buildtools', ARCH_SUBPATH, 'clang', 'bin',
'dsymutil'
)
buildroot_dir = os.path.abspath(
os.path.join(os.path.realpath(__file__), '..', '..', '..', '..')
)
buildroot_dir = os.path.abspath(os.path.join(os.path.realpath(__file__), '..', '..', '..', '..'))
def main():
@ -46,49 +44,41 @@ def main():
args = parser.parse_args()
dst = (
args.dst
if os.path.isabs(args.dst) else os.path.join(buildroot_dir, args.dst)
)
dst = (args.dst if os.path.isabs(args.dst) else os.path.join(buildroot_dir, args.dst))
arm64_out_dir = (
args.arm64_out_dir if os.path.isabs(args.arm64_out_dir) else
os.path.join(buildroot_dir, args.arm64_out_dir)
args.arm64_out_dir
if os.path.isabs(args.arm64_out_dir) else os.path.join(buildroot_dir, args.arm64_out_dir)
)
x64_out_dir = None
if args.x64_out_dir:
x64_out_dir = (
args.x64_out_dir if os.path.isabs(args.x64_out_dir) else
os.path.join(buildroot_dir, args.x64_out_dir)
args.x64_out_dir
if os.path.isabs(args.x64_out_dir) else os.path.join(buildroot_dir, args.x64_out_dir)
)
simulator_x64_out_dir = None
if args.simulator_x64_out_dir:
simulator_x64_out_dir = (
args.simulator_x64_out_dir if os.path.isabs(args.simulator_x64_out_dir)
else os.path.join(buildroot_dir, args.simulator_x64_out_dir)
args.simulator_x64_out_dir if os.path.isabs(args.simulator_x64_out_dir) else
os.path.join(buildroot_dir, args.simulator_x64_out_dir)
)
framework = os.path.join(dst, 'Flutter.framework')
simulator_framework = os.path.join(dst, 'sim', 'Flutter.framework')
arm64_framework = os.path.join(arm64_out_dir, 'Flutter.framework')
simulator_x64_framework = os.path.join(
simulator_x64_out_dir, 'Flutter.framework'
)
simulator_x64_framework = os.path.join(simulator_x64_out_dir, 'Flutter.framework')
simulator_arm64_out_dir = None
if args.simulator_arm64_out_dir:
simulator_arm64_out_dir = (
args.simulator_arm64_out_dir if os.path.isabs(
args.simulator_arm64_out_dir
) else os.path.join(buildroot_dir, args.simulator_arm64_out_dir)
args.simulator_arm64_out_dir if os.path.isabs(args.simulator_arm64_out_dir) else
os.path.join(buildroot_dir, args.simulator_arm64_out_dir)
)
if args.simulator_arm64_out_dir is not None:
simulator_arm64_framework = os.path.join(
simulator_arm64_out_dir, 'Flutter.framework'
)
simulator_arm64_framework = os.path.join(simulator_arm64_out_dir, 'Flutter.framework')
if not os.path.isdir(arm64_framework):
print('Cannot find iOS arm64 Framework at %s' % arm64_framework)
@ -103,15 +93,14 @@ def main():
return 1
create_framework(
args, dst, framework, arm64_framework, simulator_framework,
simulator_x64_framework, simulator_arm64_framework
args, dst, framework, arm64_framework, simulator_framework, simulator_x64_framework,
simulator_arm64_framework
)
extension_safe_dst = os.path.join(dst, 'extension_safe')
create_extension_safe_framework(
args, extension_safe_dst, '%s_extension_safe' % arm64_out_dir,
'%s_extension_safe' % simulator_x64_out_dir,
'%s_extension_safe' % simulator_arm64_out_dir
'%s_extension_safe' % simulator_x64_out_dir, '%s_extension_safe' % simulator_arm64_out_dir
)
generate_gen_snapshot(args, dst, x64_out_dir, arm64_out_dir)
@ -124,29 +113,20 @@ def create_extension_safe_framework( # pylint: disable=too-many-arguments
framework = os.path.join(dst, 'Flutter.framework')
simulator_framework = os.path.join(dst, 'sim', 'Flutter.framework')
arm64_framework = os.path.join(arm64_out_dir, 'Flutter.framework')
simulator_x64_framework = os.path.join(
simulator_x64_out_dir, 'Flutter.framework'
)
simulator_arm64_framework = os.path.join(
simulator_arm64_out_dir, 'Flutter.framework'
)
simulator_x64_framework = os.path.join(simulator_x64_out_dir, 'Flutter.framework')
simulator_arm64_framework = os.path.join(simulator_arm64_out_dir, 'Flutter.framework')
if not os.path.isdir(arm64_framework):
print(
'Cannot find extension safe iOS arm64 Framework at %s' % arm64_framework
)
print('Cannot find extension safe iOS arm64 Framework at %s' % arm64_framework)
return 1
if not os.path.isdir(simulator_x64_framework):
print(
'Cannot find extension safe iOS x64 simulator Framework at %s' %
simulator_x64_framework
)
print('Cannot find extension safe iOS x64 simulator Framework at %s' % simulator_x64_framework)
return 1
create_framework(
args, dst, framework, arm64_framework, simulator_framework,
simulator_x64_framework, simulator_arm64_framework
args, dst, framework, arm64_framework, simulator_framework, simulator_x64_framework,
simulator_arm64_framework
)
return 0
@ -178,12 +158,10 @@ def create_framework( # pylint: disable=too-many-arguments
# Create the arm64/x64 simulator fat framework.
subprocess.check_call([
'lipo', simulator_x64_dylib, simulator_arm64_dylib, '-create',
'-output', simulator_framework_binary
'lipo', simulator_x64_dylib, simulator_arm64_dylib, '-create', '-output',
simulator_framework_binary
])
process_framework(
args, dst, simulator_framework, simulator_framework_binary
)
process_framework(args, dst, simulator_framework, simulator_framework_binary)
else:
simulator_framework = simulator_x64_framework
@ -195,8 +173,7 @@ def create_framework( # pylint: disable=too-many-arguments
# Add the x64 simulator into the fat framework
subprocess.check_call([
'lipo', arm64_dylib, simulator_x64_dylib, '-create', '-output',
framework_binary
'lipo', arm64_dylib, simulator_x64_dylib, '-create', '-output', framework_binary
])
process_framework(args, dst, framework, framework_binary)
@ -216,13 +193,10 @@ def zip_archive(dst):
'extension_safe/Flutter.xcframework/ios-arm64/Flutter.framework/Flutter',
'extension_safe/Flutter.xcframework/ios-arm64_x86_64-simulator/Flutter.framework/Flutter'
]
embed_codesign_configuration(
os.path.join(dst, 'entitlements.txt'), ios_file_with_entitlements
)
embed_codesign_configuration(os.path.join(dst, 'entitlements.txt'), ios_file_with_entitlements)
embed_codesign_configuration(
os.path.join(dst, 'without_entitlements.txt'),
ios_file_without_entitlements
os.path.join(dst, 'without_entitlements.txt'), ios_file_without_entitlements
)
subprocess.check_call([
@ -237,14 +211,10 @@ def zip_archive(dst):
],
cwd=dst)
if os.path.exists(os.path.join(dst, 'Flutter.dSYM')):
subprocess.check_call(['zip', '-r', 'Flutter.dSYM.zip', 'Flutter.dSYM'],
cwd=dst)
subprocess.check_call(['zip', '-r', 'Flutter.dSYM.zip', 'Flutter.dSYM'], cwd=dst)
if os.path.exists(os.path.join(dst, 'extension_safe', 'Flutter.dSYM')):
subprocess.check_call([
'zip', '-r', 'extension_safe_Flutter.dSYM.zip', 'Flutter.dSYM'
],
cwd=dst)
subprocess.check_call(['zip', '-r', 'extension_safe_Flutter.dSYM.zip', 'Flutter.dSYM'], cwd=dst)
def process_framework(args, dst, framework, framework_binary):
@ -266,8 +236,7 @@ def generate_gen_snapshot(args, dst, x64_out_dir, arm64_out_dir):
if arm64_out_dir:
_generate_gen_snapshot(
os.path.join(arm64_out_dir, args.clang_dir),
os.path.join(dst, 'gen_snapshot_arm64')
os.path.join(arm64_out_dir, args.clang_dir), os.path.join(dst, 'gen_snapshot_arm64')
)
@ -277,9 +246,7 @@ def _generate_gen_snapshot(directory, destination):
print('Cannot find gen_snapshot at %s' % gen_snapshot_dir)
sys.exit(1)
subprocess.check_call([
'xcrun', 'bitcode_strip', '-r', gen_snapshot_dir, '-o', destination
])
subprocess.check_call(['xcrun', 'bitcode_strip', '-r', gen_snapshot_dir, '-o', destination])
if __name__ == '__main__':

View File

@ -15,23 +15,19 @@ from create_xcframework import create_xcframework # pylint: disable=import-erro
ARCH_SUBPATH = 'mac-arm64' if platform.processor() == 'arm' else 'mac-x64'
DSYMUTIL = os.path.join(
os.path.dirname(__file__), '..', '..', '..', 'buildtools', ARCH_SUBPATH,
'clang', 'bin', 'dsymutil'
os.path.dirname(__file__), '..', '..', '..', 'buildtools', ARCH_SUBPATH, 'clang', 'bin',
'dsymutil'
)
def main():
parser = argparse.ArgumentParser(
description='Creates Flutter.framework and Flutter.xcframework'
)
parser = argparse.ArgumentParser(description='Creates Flutter.framework and Flutter.xcframework')
parser.add_argument('--dst', type=str, required=True)
parser.add_argument('--arm64-out-dir', type=str, required=True)
parser.add_argument('--armv7-out-dir', type=str, required=False)
# TODO(gw280): Remove --simulator-out-dir alias when all recipes are updated
parser.add_argument(
'--simulator-x64-out-dir', '--simulator-out-dir', type=str, required=True
)
parser.add_argument('--simulator-x64-out-dir', '--simulator-out-dir', type=str, required=True)
parser.add_argument('--simulator-arm64-out-dir', type=str, required=False)
parser.add_argument('--strip', action='store_true', default=False)
parser.add_argument('--dsym', action='store_true', default=False)
@ -41,13 +37,9 @@ def main():
framework = os.path.join(args.dst, 'Flutter.framework')
simulator_framework = os.path.join(args.dst, 'sim', 'Flutter.framework')
arm64_framework = os.path.join(args.arm64_out_dir, 'Flutter.framework')
simulator_x64_framework = os.path.join(
args.simulator_x64_out_dir, 'Flutter.framework'
)
simulator_x64_framework = os.path.join(args.simulator_x64_out_dir, 'Flutter.framework')
if args.simulator_arm64_out_dir is not None:
simulator_arm64_framework = os.path.join(
args.simulator_arm64_out_dir, 'Flutter.framework'
)
simulator_arm64_framework = os.path.join(args.simulator_arm64_out_dir, 'Flutter.framework')
simulator_arm64_dylib = os.path.join(simulator_arm64_framework, 'Flutter')
arm64_dylib = os.path.join(arm64_framework, 'Flutter')
@ -86,8 +78,8 @@ def main():
# Create the arm64/x64 simulator fat framework.
subprocess.check_call([
'lipo', simulator_x64_dylib, simulator_arm64_dylib, '-create',
'-output', simulator_framework_binary
'lipo', simulator_x64_dylib, simulator_arm64_dylib, '-create', '-output',
simulator_framework_binary
])
process_framework(args, simulator_framework, simulator_framework_binary)
else:
@ -101,8 +93,7 @@ def main():
# Add the x64 simulator into the fat framework
subprocess.check_call([
'lipo', arm64_dylib, simulator_x64_dylib, '-create', '-output',
framework_binary
'lipo', arm64_dylib, simulator_x64_dylib, '-create', '-output', framework_binary
])
process_framework(args, framework, framework_binary)

View File

@ -11,23 +11,19 @@ import shutil
import sys
import os
buildroot_dir = os.path.abspath(
os.path.join(os.path.realpath(__file__), '..', '..', '..', '..')
)
buildroot_dir = os.path.abspath(os.path.join(os.path.realpath(__file__), '..', '..', '..', '..'))
ARCH_SUBPATH = 'mac-arm64' if platform.processor() == 'arm' else 'mac-x64'
DSYMUTIL = os.path.join(
os.path.dirname(__file__), '..', '..', '..', 'buildtools', ARCH_SUBPATH,
'clang', 'bin', 'dsymutil'
os.path.dirname(__file__), '..', '..', '..', 'buildtools', ARCH_SUBPATH, 'clang', 'bin',
'dsymutil'
)
out_dir = os.path.join(buildroot_dir, 'out')
def main():
parser = argparse.ArgumentParser(
description='Creates FlutterMacOS.framework for macOS'
)
parser = argparse.ArgumentParser(description='Creates FlutterMacOS.framework for macOS')
parser.add_argument('--dst', type=str, required=True)
parser.add_argument('--arm64-out-dir', type=str, required=True)
@ -39,17 +35,14 @@ def main():
args = parser.parse_args()
dst = (
args.dst
if os.path.isabs(args.dst) else os.path.join(buildroot_dir, args.dst)
)
dst = (args.dst if os.path.isabs(args.dst) else os.path.join(buildroot_dir, args.dst))
arm64_out_dir = (
args.arm64_out_dir if os.path.isabs(args.arm64_out_dir) else
os.path.join(buildroot_dir, args.arm64_out_dir)
args.arm64_out_dir
if os.path.isabs(args.arm64_out_dir) else os.path.join(buildroot_dir, args.arm64_out_dir)
)
x64_out_dir = (
args.x64_out_dir if os.path.isabs(args.x64_out_dir) else
os.path.join(buildroot_dir, args.x64_out_dir)
args.x64_out_dir
if os.path.isabs(args.x64_out_dir) else os.path.join(buildroot_dir, args.x64_out_dir)
)
fat_framework = os.path.join(dst, 'FlutterMacOS.framework')
@ -84,9 +77,7 @@ def main():
regenerate_symlinks(fat_framework)
fat_framework_binary = os.path.join(
fat_framework, 'Versions', 'A', 'FlutterMacOS'
)
fat_framework_binary = os.path.join(fat_framework, 'Versions', 'A', 'FlutterMacOS')
# Create the arm64/x64 fat framework.
subprocess.check_call([
@ -97,9 +88,7 @@ def main():
versions_path = os.path.join(fat_framework, 'Versions')
subprocess.check_call(['chmod', '-R', 'og+r', versions_path])
# Find all the files below the target dir with owner execute permission
find_subprocess = subprocess.Popen([
'find', versions_path, '-perm', '-100', '-print0'
],
find_subprocess = subprocess.Popen(['find', versions_path, '-perm', '-100', '-print0'],
stdout=subprocess.PIPE)
# Add execute permission for other and group for all files that had it for owner.
xargs_subprocess = subprocess.Popen(['xargs', '-0', 'chmod', 'og+x'],
@ -131,17 +120,10 @@ def regenerate_symlinks(fat_framework):
os.path.join('Versions', 'Current', 'FlutterMacOS'),
os.path.join(fat_framework, 'FlutterMacOS')
)
os.symlink(os.path.join('Versions', 'Current', 'Headers'), os.path.join(fat_framework, 'Headers'))
os.symlink(os.path.join('Versions', 'Current', 'Modules'), os.path.join(fat_framework, 'Modules'))
os.symlink(
os.path.join('Versions', 'Current', 'Headers'),
os.path.join(fat_framework, 'Headers')
)
os.symlink(
os.path.join('Versions', 'Current', 'Modules'),
os.path.join(fat_framework, 'Modules')
)
os.symlink(
os.path.join('Versions', 'Current', 'Resources'),
os.path.join(fat_framework, 'Resources')
os.path.join('Versions', 'Current', 'Resources'), os.path.join(fat_framework, 'Resources')
)
@ -156,8 +138,7 @@ def process_framework(dst, args, fat_framework, fat_framework_binary):
subprocess.check_call([DSYMUTIL, '-o', dsym_out, fat_framework_binary])
if args.zip:
dsym_dst = os.path.join(dst, 'FlutterMacOS.dSYM')
subprocess.check_call(['zip', '-r', '-y', 'FlutterMacOS.dSYM.zip', '.'],
cwd=dsym_dst)
subprocess.check_call(['zip', '-r', '-y', 'FlutterMacOS.dSYM.zip', '.'], cwd=dsym_dst)
# Double zip to make it consistent with legacy artifacts.
# TODO(fujino): remove this once https://github.com/flutter/flutter/issues/125067 is resolved
subprocess.check_call([
@ -188,13 +169,11 @@ def process_framework(dst, args, fat_framework, fat_framework_binary):
filepath_without_entitlements = 'FlutterMacOS.framework.zip/Versions/A/FlutterMacOS'
embed_codesign_configuration(
os.path.join(framework_dst, 'entitlements.txt'),
filepath_with_entitlements
os.path.join(framework_dst, 'entitlements.txt'), filepath_with_entitlements
)
embed_codesign_configuration(
os.path.join(framework_dst, 'without_entitlements.txt'),
filepath_without_entitlements
os.path.join(framework_dst, 'without_entitlements.txt'), filepath_without_entitlements
)
subprocess.check_call([
'zip',

View File

@ -9,9 +9,7 @@ import subprocess
import sys
import os
buildroot_dir = os.path.abspath(
os.path.join(os.path.realpath(__file__), '..', '..', '..', '..')
)
buildroot_dir = os.path.abspath(os.path.join(os.path.realpath(__file__), '..', '..', '..', '..'))
def main():
@ -28,10 +26,7 @@ def main():
args = parser.parse_args()
dst = (
args.dst
if os.path.isabs(args.dst) else os.path.join(buildroot_dir, args.dst)
)
dst = (args.dst if os.path.isabs(args.dst) else os.path.join(buildroot_dir, args.dst))
# if dst folder does not exist create it.
if not os.path.exists(dst):
@ -39,29 +34,27 @@ def main():
if args.x64_out_dir:
x64_out_dir = (
args.x64_out_dir if os.path.isabs(args.x64_out_dir) else
os.path.join(buildroot_dir, args.x64_out_dir)
args.x64_out_dir
if os.path.isabs(args.x64_out_dir) else os.path.join(buildroot_dir, args.x64_out_dir)
)
generate_gen_snapshot(x64_out_dir, os.path.join(dst, 'gen_snapshot_x64'))
if args.arm64_out_dir:
arm64_out_dir = (
args.arm64_out_dir if os.path.isabs(args.arm64_out_dir) else
os.path.join(buildroot_dir, args.arm64_out_dir)
args.arm64_out_dir
if os.path.isabs(args.arm64_out_dir) else os.path.join(buildroot_dir, args.arm64_out_dir)
)
generate_gen_snapshot(
os.path.join(arm64_out_dir, args.clang_dir),
os.path.join(dst, 'gen_snapshot_arm64')
os.path.join(arm64_out_dir, args.clang_dir), os.path.join(dst, 'gen_snapshot_arm64')
)
if args.armv7_out_dir:
armv7_out_dir = (
args.armv7_out_dir if os.path.isabs(args.armv7_out_dir) else
os.path.join(buildroot_dir, args.armv7_out_dir)
args.armv7_out_dir
if os.path.isabs(args.armv7_out_dir) else os.path.join(buildroot_dir, args.armv7_out_dir)
)
generate_gen_snapshot(
os.path.join(armv7_out_dir, args.clang_dir),
os.path.join(dst, 'gen_snapshot_armv7')
os.path.join(armv7_out_dir, args.clang_dir), os.path.join(dst, 'gen_snapshot_armv7')
)
if args.zip:
zip_archive(dst)
@ -75,9 +68,7 @@ def embed_codesign_configuration(config_path, contents):
def zip_archive(dst):
snapshot_filepath = ['gen_snapshot_arm64', 'gen_snapshot_x64']
embed_codesign_configuration(
os.path.join(dst, 'entitlements.txt'), snapshot_filepath
)
embed_codesign_configuration(os.path.join(dst, 'entitlements.txt'), snapshot_filepath)
subprocess.check_call([
'zip',
@ -93,9 +84,7 @@ def generate_gen_snapshot(directory, destination):
print('Cannot find gen_snapshot at %s' % gen_snapshot_dir)
sys.exit(1)
subprocess.check_call([
'xcrun', 'bitcode_strip', '-r', gen_snapshot_dir, '-o', destination
])
subprocess.check_call(['xcrun', 'bitcode_strip', '-r', gen_snapshot_dir, '-o', destination])
if __name__ == '__main__':

View File

@ -22,12 +22,8 @@ def main():
help='The framework paths used to create the XCFramework.',
required=True
)
parser.add_argument(
'--name', help='Name of the XCFramework', type=str, required=True
)
parser.add_argument(
'--location', help='Output directory', type=str, required=True
)
parser.add_argument('--name', help='Name of the XCFramework', type=str, required=True)
parser.add_argument('--location', help='Output directory', type=str, required=True)
args = parser.parse_args()

View File

@ -15,12 +15,8 @@ import sys
def main():
parser = argparse.ArgumentParser(description='Copy a Dart package')
parser.add_argument(
'--source', type=str, help='Source directory assembled by dart_pkg.py'
)
parser.add_argument(
'--dest', type=str, help='Destination directory for the package'
)
parser.add_argument('--source', type=str, help='Source directory assembled by dart_pkg.py')
parser.add_argument('--dest', type=str, help='Destination directory for the package')
args = parser.parse_args()
@ -29,9 +25,7 @@ def main():
# dart_pkg.py will create a packages directory within the package.
# Do not copy this into the release output.
shutil.copytree(
args.source, args.dest, ignore=shutil.ignore_patterns('packages')
)
shutil.copytree(args.source, args.dest, ignore=shutil.ignore_patterns('packages'))
if __name__ == '__main__':

View File

@ -18,10 +18,7 @@ def main():
)
parser.add_argument(
'--headers',
nargs='+',
help='The headers to install at the location.',
required=True
'--headers', nargs='+', help='The headers to install at the location.', required=True
)
parser.add_argument('--location', type=str, required=True)
@ -41,9 +38,7 @@ def main():
# Copy all files specified in the args.
for header_file in args.headers:
shutil.copyfile(
header_file, os.path.join(args.location, os.path.basename(header_file))
)
shutil.copyfile(header_file, os.path.join(args.location, os.path.basename(header_file)))
if __name__ == '__main__':

View File

@ -9,9 +9,7 @@ import os
import subprocess
import sys
BUILDROOT_DIR = os.path.abspath(
os.path.join(os.path.realpath(__file__), '..', '..', '..')
)
BUILDROOT_DIR = os.path.abspath(os.path.join(os.path.realpath(__file__), '..', '..', '..'))
PERFETTO_SESSION_KEY = 'session1'
PERFETTO_TRACE_FILE = '/data/misc/perfetto-traces/trace'
@ -39,17 +37,15 @@ def install_apk(apk_path, package_name, adb_path='adb'):
print('Installing APK')
subprocess.check_output([adb_path, 'shell', 'am', 'force-stop', package_name])
# Allowed to fail if APK was never installed.
subprocess.call([adb_path, 'uninstall', package_name],
stdout=subprocess.DEVNULL)
subprocess.call([adb_path, 'uninstall', package_name], stdout=subprocess.DEVNULL)
subprocess.check_output([adb_path, 'install', apk_path])
def start_perfetto(package_name, adb_path='adb'):
print('Starting trace')
cmd = [
adb_path, 'shell', 'echo', "'" + PERFETTO_CONFIG % package_name + "'",
'|', 'perfetto', '-c', '-', '--txt', '-o', PERFETTO_TRACE_FILE,
'--detach', PERFETTO_SESSION_KEY
adb_path, 'shell', 'echo', "'" + PERFETTO_CONFIG % package_name + "'", '|', 'perfetto', '-c',
'-', '--txt', '-o', PERFETTO_TRACE_FILE, '--detach', PERFETTO_SESSION_KEY
]
subprocess.check_output(cmd, stderr=subprocess.STDOUT)
@ -71,8 +67,7 @@ def launch_package(package_name, activity_name, adb_path='adb'):
stderr=subprocess.STDOUT)
for line in logcat.stdout:
print('>>>>>>>> ' + line.strip())
if ('Observatory listening' in line) or ('Dart VM service is listening'
in line):
if ('Observatory listening' in line) or ('Dart VM service is listening' in line):
logcat.kill()
break
@ -88,12 +83,9 @@ def collect_and_validate_trace(adb_path='adb'):
print('Validating trace')
traceconv = os.path.join(
BUILDROOT_DIR, 'third_party', 'android_tools', 'trace_to_text',
'trace_to_text'
BUILDROOT_DIR, 'third_party', 'android_tools', 'trace_to_text', 'trace_to_text'
)
traceconv_output = subprocess.check_output([
traceconv, 'systrace', 'trace.pb'
],
traceconv_output = subprocess.check_output([traceconv, 'systrace', 'trace.pb'],
stderr=subprocess.STDOUT,
universal_newlines=True)
@ -111,10 +103,7 @@ def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--apk-path',
dest='apk_path',
action='store',
help='Provide the path to the APK to install'
'--apk-path', dest='apk_path', action='store', help='Provide the path to the APK to install'
)
parser.add_argument(
'--package-name',
@ -145,10 +134,7 @@ def main():
],
text=True).strip()
if int(android_api_level) < 29:
print(
'Android API %s detected. This script requires API 29 or above.' %
android_api_level
)
print('Android API %s detected. This script requires API 29 or above.' % android_api_level)
return 0
install_apk(args.apk_path, args.package_name, args.adb_path)

View File

@ -60,11 +60,7 @@ class BenchmarkResult: # pylint: disable=too-many-instance-attributes
figures.append(plt.figure(dpi=1200, frameon=False, figsize=(11, 8.5)))
for family in self.series:
plt.plot(
self.series[family]['x'],
self.series[family]['y'],
label=self.series_labels[family]
)
plt.plot(self.series[family]['x'], self.series[family]['y'], label=self.series_labels[family])
plt.xlabel('Benchmark Seed')
plt.ylabel('Time (' + self.time_unit + ')')
@ -92,9 +88,7 @@ class BenchmarkResult: # pylint: disable=too-many-instance-attributes
figures.append(plt.figure(dpi=1200, frameon=False, figsize=(11, 8.5)))
for family in self.series:
plt.plot(
self.series[family]['x'],
self.series[family]['y'],
label=self.series_labels[family]
self.series[family]['x'], self.series[family]['y'], label=self.series_labels[family]
)
plt.xlabel('Benchmark Seed')
@ -140,9 +134,7 @@ def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'filename',
action='store',
help='Path to the JSON output from Google Benchmark'
'filename', action='store', help='Path to the JSON output from Google Benchmark'
)
parser.add_argument(
'-o',
@ -232,15 +224,11 @@ def process_benchmark_data(benchmark_json, output_pdf, output_csv):
else:
benchmark_draw_call_count = -1
optional_keys = [
'DrawCallCount_Varies', 'VerbCount', 'PointCount', 'VertexCount',
'GlyphCount'
]
optional_keys = ['DrawCallCount_Varies', 'VerbCount', 'PointCount', 'VertexCount', 'GlyphCount']
if benchmark_name not in benchmark_results_data:
benchmark_results_data[benchmark_name] = BenchmarkResult(
benchmark_name, benchmark_backend, benchmark_unit,
benchmark_draw_call_count
benchmark_name, benchmark_backend, benchmark_unit, benchmark_draw_call_count
)
for key in optional_keys:

View File

@ -17,30 +17,22 @@ SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))
BAT = '.bat' if sys.platform.startswith(('cygwin', 'win')) else ''
GRADLE_BIN = os.path.normpath(
os.path.join(
SCRIPT_PATH, '..', '..', '..', 'third_party', 'gradle', 'bin',
'gradle%s' % BAT
)
os.path.join(SCRIPT_PATH, '..', '..', '..', 'third_party', 'gradle', 'bin', 'gradle%s' % BAT)
)
ANDROID_HOME = os.path.normpath(
os.path.join(
SCRIPT_PATH, '..', '..', '..', 'third_party', 'android_tools', 'sdk'
)
os.path.join(SCRIPT_PATH, '..', '..', '..', 'third_party', 'android_tools', 'sdk')
)
if platform.system() == 'Darwin':
JAVA_HOME = os.path.normpath(
os.path.join(
SCRIPT_PATH, '..', '..', '..', 'third_party', 'java', 'openjdk',
'Contents', 'Home'
SCRIPT_PATH, '..', '..', '..', 'third_party', 'java', 'openjdk', 'Contents', 'Home'
)
)
else:
JAVA_HOME = os.path.normpath(
os.path.join(
SCRIPT_PATH, '..', '..', '..', 'third_party', 'java', 'openjdk'
)
os.path.join(SCRIPT_PATH, '..', '..', '..', 'third_party', 'java', 'openjdk')
)

View File

@ -20,8 +20,7 @@ def xvfb_pid_filename(child_build_name):
"""Returns the filename to the Xvfb pid file. This name is unique for each
builder. This is used by the linux builders."""
return os.path.join(
tempfile.gettempdir(),
'xvfb-' + xvfb_display_index(child_build_name) + '.pid'
tempfile.gettempdir(), 'xvfb-' + xvfb_display_index(child_build_name) + '.pid'
)
@ -86,8 +85,8 @@ def start_virtual_x(child_build_name, build_dir):
# Start a virtual X server that we run the tests in. This makes it so we can
# run the tests even if we didn't start the tests from an X session.
proc = subprocess.Popen([
cmd, display, '-screen', '0', '1280x800x24', '-ac', '-dpi', '96',
'-maxclients', '512', '-extension', 'MIT-SHM'
cmd, display, '-screen', '0', '1280x800x24', '-ac', '-dpi', '96', '-maxclients', '512',
'-extension', 'MIT-SHM'
],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,

View File

@ -9,9 +9,7 @@ import os
import subprocess
import sys
EMSDK_ROOT = os.path.abspath(
os.path.join(__file__, '..', '..', '..', 'buildtools', 'emsdk')
)
EMSDK_ROOT = os.path.abspath(os.path.join(__file__, '..', '..', '..', 'buildtools', 'emsdk'))
EMSDK_PATH = os.path.join(EMSDK_ROOT, 'emsdk.py')
@ -21,17 +19,13 @@ EMSDK_VERSION = '3.1.44'
def main():
try:
subprocess.check_call([
sys.executable, EMSDK_PATH, 'install', EMSDK_VERSION
],
subprocess.check_call([sys.executable, EMSDK_PATH, 'install', EMSDK_VERSION],
stdout=subprocess.DEVNULL)
except subprocess.CalledProcessError:
print('Failed to install emsdk')
return 1
try:
subprocess.check_call([
sys.executable, EMSDK_PATH, 'activate', EMSDK_VERSION
],
subprocess.check_call([sys.executable, EMSDK_PATH, 'activate', EMSDK_VERSION],
stdout=subprocess.DEVNULL)
except subprocess.CalledProcessError:
print('Failed to activate emsdk')

View File

@ -45,8 +45,7 @@ def main():
for file in args.files:
if (file.endswith(os.path.join('io', 'flutter', 'Log.java')) or
file.endswith(os.path.join('io', 'flutter', 'util', 'TraceSection.java')
)):
file.endswith(os.path.join('io', 'flutter', 'util', 'TraceSection.java'))):
continue
with open(file) as f:
contents = f.read()
@ -55,9 +54,7 @@ def main():
if ANDROIDX_TRACE_CLASS in contents or ANDROID_TRACE_CLASS in contents:
bad_trace_files.append(file)
has_bad_files = CheckBadFiles(
bad_log_files, ANDROID_LOG_CLASS, FLUTTER_LOG_CLASS
)
has_bad_files = CheckBadFiles(bad_log_files, ANDROID_LOG_CLASS, FLUTTER_LOG_CLASS)
has_bad_files = has_bad_files or CheckBadFiles(
bad_trace_files, 'android[x].tracing.Trace', FLUTTER_TRACE_CLASS
)

View File

@ -72,26 +72,16 @@ def main():
with open(os.path.join(THIS_DIR, 'files.json')) as f:
dependencies = json.load(f)
parser = argparse.ArgumentParser(
description='Generate the POM file for the engine artifacts'
)
parser = argparse.ArgumentParser(description='Generate the POM file for the engine artifacts')
parser.add_argument(
'--engine-artifact-id',
type=utf8,
required=True,
help='The artifact id. e.g. android_arm_release'
)
parser.add_argument('--engine-version', type=utf8, required=True, help='The engine commit hash')
parser.add_argument(
'--engine-version',
type=utf8,
required=True,
help='The engine commit hash'
)
parser.add_argument(
'--destination',
type=utf8,
required=True,
help='The destination directory absolute path'
'--destination', type=utf8, required=True, help='The destination directory absolute path'
)
parser.add_argument(
'--include-embedding-dependencies',
@ -116,22 +106,12 @@ def main():
# Write the POM file.
with open(os.path.join(args.destination, out_file_name), 'w') as f:
f.write(
POM_FILE_CONTENT.format(
engine_artifact_id, artifact_version, pom_dependencies
)
)
f.write(POM_FILE_CONTENT.format(engine_artifact_id, artifact_version, pom_dependencies))
# Write the Maven metadata file.
with open(os.path.join(args.destination,
'%s.maven-metadata.xml' % engine_artifact_id),
'w') as f:
with open(os.path.join(args.destination, '%s.maven-metadata.xml' % engine_artifact_id), 'w') as f:
timestamp = datetime.datetime.utcnow().strftime("%Y%m%d.%H%M%S")
f.write(
MAVEN_METADATA_CONTENT.format(
engine_artifact_id, artifact_version, timestamp
)
)
f.write(MAVEN_METADATA_CONTENT.format(engine_artifact_id, artifact_version, timestamp))
if __name__ == '__main__':

View File

@ -14,9 +14,7 @@ import shutil
import subprocess
import sys
SRC_ROOT = os.path.dirname(
os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
)
SRC_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
FUCHSIA_SDK_DIR = os.path.join(SRC_ROOT, 'fuchsia', 'sdk')
FLUTTER_DIR = os.path.join(SRC_ROOT, 'flutter')
@ -64,12 +62,10 @@ def DownloadFuchsiaSDKFromGCS(sdk_path, verbose):
universal_newlines=True,
)
if curl_result.returncode == 0 and verbose:
print(
'curl output:stdout:\n{}\nstderr:\n{}'.format(
curl_result.stdout,
curl_result.stderr,
)
)
print('curl output:stdout:\n{}\nstderr:\n{}'.format(
curl_result.stdout,
curl_result.stderr,
))
elif curl_result.returncode != 0:
eprint(
'Failed to download: stdout:\n{}\nstderr:\n{}'.format(
@ -138,10 +134,7 @@ def Main():
parser.add_argument('--host-os', help='The host os')
parser.add_argument(
'--fuchsia-sdk-path',
help='The path in gcs to the fuchsia sdk to download'
)
parser.add_argument('--fuchsia-sdk-path', help='The path in gcs to the fuchsia sdk to download')
args = parser.parse_args()
fail_loudly = 1 if args.fail_loudly else 0

View File

@ -28,24 +28,20 @@ PLATFORM_2_PATH = {
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
SRC_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, '..', '..', '..'))
MATERIAL_TTF = os.path.join(SCRIPT_DIR, 'fixtures', 'MaterialIcons-Regular.ttf')
VARIABLE_MATERIAL_TTF = os.path.join(
SCRIPT_DIR, 'fixtures', 'MaterialSymbols-Variable.ttf'
)
VARIABLE_MATERIAL_TTF = os.path.join(SCRIPT_DIR, 'fixtures', 'MaterialSymbols-Variable.ttf')
IS_WINDOWS = sys.platform.startswith(('cygwin', 'win'))
EXE = '.exe' if IS_WINDOWS else ''
BAT = '.bat' if IS_WINDOWS else ''
FONT_SUBSET = os.path.join(SRC_DIR, 'out', 'host_debug', 'font-subset' + EXE)
FONT_SUBSET_ZIP = os.path.join(
SRC_DIR, 'out', 'host_debug', 'zip_archives',
PLATFORM_2_PATH.get(sys.platform, ''), 'font-subset.zip'
SRC_DIR, 'out', 'host_debug', 'zip_archives', PLATFORM_2_PATH.get(sys.platform, ''),
'font-subset.zip'
)
if not os.path.isfile(FONT_SUBSET):
FONT_SUBSET = os.path.join(
SRC_DIR, 'out', 'host_debug_unopt', 'font-subset' + EXE
)
FONT_SUBSET = os.path.join(SRC_DIR, 'out', 'host_debug_unopt', 'font-subset' + EXE)
FONT_SUBSET_ZIP = os.path.join(
SRC_DIR, 'out', 'host_debug_unopt', 'zip_archives',
PLATFORM_2_PATH.get(sys.platform, ''), 'font-subset.zip'
SRC_DIR, 'out', 'host_debug_unopt', 'zip_archives', PLATFORM_2_PATH.get(sys.platform, ''),
'font-subset.zip'
)
if not os.path.isfile(FONT_SUBSET):
raise Exception(
@ -57,11 +53,10 @@ COMPARE_TESTS = (
(True, '1.ttf', MATERIAL_TTF, [r'57347']),
(True, '1.ttf', MATERIAL_TTF, [r'0xE003']),
(True, '1.ttf', MATERIAL_TTF, [r'\uE003']),
(False, '1.ttf', MATERIAL_TTF, [r'57348'
]), # False because different codepoint
(False, '1.ttf', MATERIAL_TTF, [r'57348']), # False because different codepoint
(True, '2.ttf', MATERIAL_TTF, [r'0xE003', r'0xE004']),
(True, '2.ttf', MATERIAL_TTF, [r'0xE003', r'optional:0xE004'
]), # Optional codepoint that is found
(True, '2.ttf', MATERIAL_TTF, [r'0xE003',
r'optional:0xE004']), # Optional codepoint that is found
(True, '2.ttf', MATERIAL_TTF, [
r'0xE003',
r'0xE004',
@ -81,23 +76,19 @@ COMPARE_TESTS = (
(True, '1variable.ttf', VARIABLE_MATERIAL_TTF, [r'57347']),
(True, '1variable.ttf', VARIABLE_MATERIAL_TTF, [r'0xE003']),
(True, '1variable.ttf', VARIABLE_MATERIAL_TTF, [r'\uE003']),
(False, '1variable.ttf', VARIABLE_MATERIAL_TTF,
[r'57348']), # False because different codepoint
(False, '1variable.ttf', VARIABLE_MATERIAL_TTF, [r'57348'
]), # False because different codepoint
(True, '2variable.ttf', VARIABLE_MATERIAL_TTF, [r'0xE003', r'0xE004']),
(
True, '2variable.ttf', VARIABLE_MATERIAL_TTF, [
r'0xE003',
r'0xE004',
r'57347',
]
), # Duplicated codepoint
(
True, '3variable.ttf', VARIABLE_MATERIAL_TTF, [
r'0xE003',
r'0xE004',
r'0xE021',
]
),
(True, '2variable.ttf', VARIABLE_MATERIAL_TTF, [
r'0xE003',
r'0xE004',
r'57347',
]), # Duplicated codepoint
(True, '3variable.ttf', VARIABLE_MATERIAL_TTF, [
r'0xE003',
r'0xE004',
r'0xE021',
]),
)
FAIL_TESTS = [
@ -157,11 +148,7 @@ def RunCmd(cmd, codepoints, fail=False):
print(' %s' % ' '.join(cmd))
print('STDIN: "%s"' % ' '.join(codepoints))
p = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=SRC_DIR
cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, cwd=SRC_DIR
)
stdout_data, stderr_data = p.communicate(input=' '.join(codepoints).encode())
if p.returncode != 0 and fail == False:

View File

@ -59,9 +59,7 @@ def GetHostArchFromPlatform():
def GetPMBinPath():
return os.path.join(
GetFuchsiaSDKPath(), 'tools', GetHostArchFromPlatform(), 'pm'
)
return os.path.join(GetFuchsiaSDKPath(), 'tools', GetHostArchFromPlatform(), 'pm')
def RunExecutable(command):
@ -69,10 +67,7 @@ def RunExecutable(command):
def RunGN(variant_dir, flags):
print(
'Running gn for variant "%s" with flags: %s' %
(variant_dir, ','.join(flags))
)
print('Running gn for variant "%s" with flags: %s' % (variant_dir, ','.join(flags)))
RunExecutable([
os.path.join('flutter', 'tools', 'gn'),
] + flags)
@ -84,8 +79,7 @@ def BuildNinjaTargets(variant_dir, targets):
assert os.path.exists(os.path.join(_out_dir, variant_dir))
print('Running autoninja for targets: %s' % targets)
RunExecutable(['autoninja', '-C',
os.path.join(_out_dir, variant_dir)] + targets)
RunExecutable(['autoninja', '-C', os.path.join(_out_dir, variant_dir)] + targets)
def RemoveDirectoryIfExists(path):
@ -129,16 +123,14 @@ def CopyGenSnapshotIfExists(source, destination):
FindFileAndCopyTo('gen_snapshot', source_root, destination_base)
FindFileAndCopyTo('gen_snapshot_product', source_root, destination_base)
FindFileAndCopyTo(
'kernel_compiler.dart.snapshot', source_root, destination_base,
'kernel_compiler.snapshot'
'kernel_compiler.dart.snapshot', source_root, destination_base, 'kernel_compiler.snapshot'
)
FindFileAndCopyTo(
'frontend_server.dart.snapshot', source_root, destination_base,
'flutter_frontend_server.snapshot'
)
FindFileAndCopyTo(
'list_libraries.dart.snapshot', source_root, destination_base,
'list_libraries.snapshot'
'list_libraries.dart.snapshot', source_root, destination_base, 'list_libraries.snapshot'
)
@ -154,9 +146,7 @@ def CopyZirconFFILibIfExists(source, destination):
FindFileAndCopyTo('libzircon_ffi.so', source_root, destination_base)
def CopyToBucketWithMode(
source, destination, aot, product, runner_type, api_level
):
def CopyToBucketWithMode(source, destination, aot, product, runner_type, api_level):
mode = 'aot' if aot else 'jit'
product_suff = '_product' if product else ''
runner_name = '%s_%s%s_runner' % (runner_type, mode, product_suff)
@ -198,13 +188,9 @@ def CopyVulkanDepsToBucket(src, dst, arch):
sdk_path = GetFuchsiaSDKPath()
deps_bucket_path = os.path.join(_bucket_directory, dst)
if not os.path.exists(deps_bucket_path):
FindFileAndCopyTo('VkLayer_khronos_validation.json', '%s/pkg' % (sdk_path), deps_bucket_path)
FindFileAndCopyTo(
'VkLayer_khronos_validation.json', '%s/pkg' % (sdk_path),
deps_bucket_path
)
FindFileAndCopyTo(
'VkLayer_khronos_validation.so', '%s/arch/%s' % (sdk_path, arch),
deps_bucket_path
'VkLayer_khronos_validation.so', '%s/arch/%s' % (sdk_path, arch), deps_bucket_path
)
@ -234,10 +220,7 @@ def CopyBuildToBucket(runtime_mode, arch, optimized, product):
# are about to package.
bucket_root = os.path.join(_bucket_directory, 'flutter')
licenses_root = os.path.join(_src_root_dir, 'flutter/ci/licenses_golden')
license_files = [
'licenses_flutter', 'licenses_fuchsia', 'licenses_skia',
'licenses_third_party'
]
license_files = ['licenses_flutter', 'licenses_fuchsia', 'licenses_skia', 'licenses_third_party']
for license in license_files:
src_path = os.path.join(licenses_root, license)
dst_path = os.path.join(bucket_root, license)
@ -313,8 +296,8 @@ def ProcessCIPDPackage(upload, engine_version):
def BuildTarget(
runtime_mode, arch, optimized, enable_lto, enable_legacy, asan,
dart_version_git_info, prebuilt_dart_sdk, build_targets
runtime_mode, arch, optimized, enable_lto, enable_legacy, asan, dart_version_git_info,
prebuilt_dart_sdk, build_targets
):
unopt = "_unopt" if not optimized else ""
out_dir = 'fuchsia_%s%s_%s' % (runtime_mode, unopt, arch)
@ -362,11 +345,7 @@ def main():
help='If set, uploads the CIPD package and tags it as the latest.'
)
parser.add_argument(
'--engine-version',
required=False,
help='Specifies the flutter engine SHA.'
)
parser.add_argument('--engine-version', required=False, help='Specifies the flutter engine SHA.')
parser.add_argument(
'--unoptimized',
@ -376,15 +355,10 @@ def main():
)
parser.add_argument(
'--runtime-mode',
type=str,
choices=['debug', 'profile', 'release', 'all'],
default='all'
'--runtime-mode', type=str, choices=['debug', 'profile', 'release', 'all'], default='all'
)
parser.add_argument(
'--archs', type=str, choices=['x64', 'arm64', 'all'], default='all'
)
parser.add_argument('--archs', type=str, choices=['x64', 'arm64', 'all'], default='all')
parser.add_argument(
'--asan',
@ -394,10 +368,7 @@ def main():
)
parser.add_argument(
'--no-lto',
action='store_true',
default=False,
help='If set, disables LTO for the build.'
'--no-lto', action='store_true', default=False, help='If set, disables LTO for the build.'
)
parser.add_argument(
@ -417,10 +388,8 @@ def main():
parser.add_argument(
'--targets',
default='',
help=(
'Comma-separated list; adds additional targets to build for '
'Fuchsia.'
)
help=('Comma-separated list; adds additional targets to build for '
'Fuchsia.')
)
parser.add_argument(
@ -477,9 +446,8 @@ def main():
if build_mode == 'all' or runtime_mode == build_mode:
if not args.skip_build:
BuildTarget(
runtime_mode, arch, optimized, enable_lto, enable_legacy,
args.asan, not args.no_dart_version_git_info,
not args.no_prebuilt_dart_sdk,
runtime_mode, arch, optimized, enable_lto, enable_legacy, args.asan,
not args.no_dart_version_git_info, not args.no_prebuilt_dart_sdk,
args.targets.split(",") if args.targets else ['flutter']
)
CopyBuildToBucket(runtime_mode, arch, optimized, product)

View File

@ -16,13 +16,9 @@ import sys
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--cmc-bin', dest='cmc_bin', action='store', required=True
)
parser.add_argument('--cmc-bin', dest='cmc_bin', action='store', required=True)
parser.add_argument('--output', dest='output', action='store', required=True)
parser.add_argument(
'--manifest-file', dest='manifest_file', action='store', required=True
)
parser.add_argument('--manifest-file', dest='manifest_file', action='store', required=True)
parser.add_argument(
'--includepath',
dest='includepath',

View File

@ -45,17 +45,14 @@ def GetBuildIdParts(exec_path, read_elf):
file_out = subprocess.check_output([read_elf, '-n', exec_path])
build_id_line = file_out.splitlines()[-1].split()
if (build_id_line[0] != b'Build' or build_id_line[1] != b'ID:' or
not sha1_pattern.match(str(build_id_line[-1])) or
not len(build_id_line[-1]) > 2):
not sha1_pattern.match(str(build_id_line[-1])) or not len(build_id_line[-1]) > 2):
raise Exception(
'Expected the last line of llvm-readelf to match "Build ID <Hex String>" Got: %s'
% file_out
'Expected the last line of llvm-readelf to match "Build ID <Hex String>" Got: %s' % file_out
)
build_id = build_id_line[-1]
return {
'build_id': build_id.decode('utf-8'),
'prefix_dir': build_id[:2].decode('utf-8'),
'build_id': build_id.decode('utf-8'), 'prefix_dir': build_id[:2].decode('utf-8'),
'exec_name': build_id[2:].decode('utf-8')
}
@ -106,11 +103,9 @@ def main():
)
args = parser.parse_args()
assert os.path.exists(args.exec_path
), ('exec_path "%s" does not exist' % args.exec_path)
assert os.path.exists(args.exec_path), ('exec_path "%s" does not exist' % args.exec_path)
assert os.path.exists(args.dest), ('dest "%s" does not exist' % args.dest)
assert os.path.exists(args.read_elf
), ('read_elf "%s" does not exist' % args.read_elf)
assert os.path.exists(args.read_elf), ('read_elf "%s" does not exist' % args.read_elf)
parts = GetBuildIdParts(args.exec_path, args.read_elf)
dbg_prefix_base = os.path.join(args.dest, parts['prefix_dir'])
@ -135,8 +130,7 @@ def main():
# If the debug file hasn't changed, don't rewrite the debug and completion
# file, speeding up incremental builds.
if os.path.exists(dbg_file_path) and HashFile(args.exec_path
) == HashFile(dbg_file_path):
if os.path.exists(dbg_file_path) and HashFile(args.exec_path) == HashFile(dbg_file_path):
return 0
shutil.copyfile(args.exec_path, dbg_file_path)

View File

@ -50,9 +50,7 @@ def CopyPath(src, dst):
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--file-list', dest='file_list', action='store', required=True
)
parser.add_argument('--file-list', dest='file_list', action='store', required=True)
args = parser.parse_args()

View File

@ -11,16 +11,10 @@ import sys
def main():
parser = argparse.ArgumentParser(
description='Generate a script that invokes a Dart application'
)
parser.add_argument(
'--out', help='Path to the invocation file to generate', required=True
)
parser = argparse.ArgumentParser(description='Generate a script that invokes a Dart application')
parser.add_argument('--out', help='Path to the invocation file to generate', required=True)
parser.add_argument('--dart', help='Path to the Dart binary', required=True)
parser.add_argument(
'--snapshot', help='Path to the app snapshot', required=True
)
parser.add_argument('--snapshot', help='Path to the app snapshot', required=True)
args = parser.parse_args()
app_file = args.out
@ -28,14 +22,12 @@ def main():
if not os.path.exists(app_path):
os.makedirs(app_path)
script_template = string.Template(
'''#!/bin/sh
script_template = string.Template('''#!/bin/sh
$dart \\
$snapshot \\
"$$@"
'''
)
''')
with open(app_file, 'w') as file:
file.write(script_template.substitute(args.__dict__))
permissions = (

View File

@ -15,16 +15,12 @@ import re
import sys
THIS_DIR = os.path.abspath(os.path.dirname(__file__))
sys.path += [
os.path.join(THIS_DIR, '..', '..', '..', 'third_party', 'pyyaml', 'lib3')
]
sys.path += [os.path.join(THIS_DIR, '..', '..', '..', 'third_party', 'pyyaml', 'lib3')]
import yaml
DEFAULT_LANGUAGE_VERSION = '2.8'
Package = collections.namedtuple(
'Package', ['name', 'rootUri', 'languageVersion', 'packageUri']
)
Package = collections.namedtuple('Package', ['name', 'rootUri', 'languageVersion', 'packageUri'])
class PackageConfig:
@ -107,12 +103,8 @@ def collect_packages(items, relative_to):
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--input', help='Path to original package_config', required=True
)
parser.add_argument(
'--output', help='Path to the updated package_config', required=True
)
parser.add_argument('--input', help='Path to original package_config', required=True)
parser.add_argument('--output', help='Path to the updated package_config', required=True)
parser.add_argument('--root', help='Path to fuchsia root', required=True)
parser.add_argument('--depfile', help='Path to the depfile', required=True)
args = parser.parse_args()
@ -131,11 +123,7 @@ def main():
with open(args.output, 'w') as output_file:
package_config = PackageConfig(packages)
json.dump(
package_config.asdict(),
output_file,
indent=2,
sort_keys=True,
separators=(',', ': ')
package_config.asdict(), output_file, indent=2, sort_keys=True, separators=(',', ': ')
)
return 0

View File

@ -31,16 +31,10 @@ def collect(path_prefix, lines):
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--path_prefix',
help='Directory path containing the manifest entry sources',
required=True
)
parser.add_argument(
'--input', help='Path to original manifest', required=True
)
parser.add_argument(
'--output', help='Path to the updated json file', required=True
'--path_prefix', help='Directory path containing the manifest entry sources', required=True
)
parser.add_argument('--input', help='Path to original manifest', required=True)
parser.add_argument('--output', help='Path to the updated json file', required=True)
args = parser.parse_args()
with open(args.input, 'r') as input_file:

View File

@ -14,14 +14,10 @@ import sys
def main():
parser = argparse.ArgumentParser(
'Merges sources of a Dart target and its dependencies',
fromfile_prefix_chars='@'
'Merges sources of a Dart target and its dependencies', fromfile_prefix_chars='@'
)
parser.add_argument(
'--output',
help='Path to output the final list',
type=argparse.FileType('w'),
required=True
'--output', help='Path to output the final list', type=argparse.FileType('w'), required=True
)
parser.add_argument(
'--depfile',
@ -34,16 +30,10 @@ def main():
help='Sources of this target',
nargs='*',
)
parser.add_argument(
'--source_lists',
help='Files containing lists of Dart sources',
nargs='*'
)
parser.add_argument('--source_lists', help='Files containing lists of Dart sources', nargs='*')
args = parser.parse_args()
args.depfile.write(
'{}: {}\n'.format(args.output.name, ' '.join(args.source_lists))
)
args.depfile.write('{}: {}\n'.format(args.output.name, ' '.join(args.source_lists)))
# Merges sources of this target, and all of its dependencies.
all_sources = set(args.sources)

View File

@ -15,15 +15,9 @@ def main():
"Verifies that all .dart files are included in sources, and sources don't include nonexsitent files"
)
parser.add_argument(
"--source_dir",
help="Path to the directory containing the package sources",
required=True
)
parser.add_argument(
"--stamp",
help="File to touch when source checking succeeds",
required=True
"--source_dir", help="Path to the directory containing the package sources", required=True
)
parser.add_argument("--stamp", help="File to touch when source checking succeeds", required=True)
parser.add_argument("sources", help="source files", nargs=argparse.REMAINDER)
args = parser.parse_args()
@ -41,8 +35,7 @@ def main():
expected_sources = set(args.sources)
# It is possible for sources to include dart files outside of source_dir.
actual_sources.update([
s for s in (expected_sources - actual_sources)
if src_dir_path.joinpath(s).resolve().exists()
s for s in (expected_sources - actual_sources) if src_dir_path.joinpath(s).resolve().exists()
],)
if actual_sources == expected_sources:
@ -56,14 +49,16 @@ def main():
missing_sources = actual_sources - expected_sources
if missing_sources:
print(
'\nSource files found that were missing from the "sources" parameter:\n{}\n'
.format("\n".join(sources_to_abs_path(missing_sources))),
'\nSource files found that were missing from the "sources" parameter:\n{}\n'.format(
"\n".join(sources_to_abs_path(missing_sources))
),
)
nonexistent_sources = expected_sources - actual_sources
if nonexistent_sources:
print(
'\nSource files listed in "sources" parameter but not found:\n{}\n'
.format("\n".join(sources_to_abs_path(nonexistent_sources))),
'\nSource files listed in "sources" parameter but not found:\n{}\n'.format(
"\n".join(sources_to_abs_path(nonexistent_sources))
),
)
return 1

View File

@ -13,12 +13,8 @@ def main():
parser = argparse.ArgumentParser(
description='Executes a command, then rewrites the depfile, converts all absolute paths to relative'
)
parser.add_argument(
'--depfile', help='Path to the depfile to rewrite', required=True
)
parser.add_argument(
'command', nargs='+', help='Positional args for the command to run'
)
parser.add_argument('--depfile', help='Path to the depfile to rewrite', required=True)
parser.add_argument('command', nargs='+', help='Positional args for the command to run')
args = parser.parse_args()
retval = subprocess.call(args.command)

View File

@ -24,12 +24,8 @@ def main():
type=Path,
required=True
)
parser.add_argument(
"--sources", help="List of FIDL source files", nargs="+", required=True
)
parser.add_argument(
"--dep-libraries", help="List of dependent libraries", nargs="*"
)
parser.add_argument("--sources", help="List of FIDL source files", nargs="+", required=True)
parser.add_argument("--dep-libraries", help="List of dependent libraries", nargs="*")
args, args_to_forward = parser.parse_known_args()
# Each line contains a library's source files separated by spaces.

View File

@ -72,12 +72,8 @@ def GatherArtifacts(src_root, dst_root, create_meta_package=True):
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--artifacts-root', dest='artifacts_root', action='store', required=True
)
parser.add_argument(
'--dest-dir', dest='dst_dir', action='store', required=True
)
parser.add_argument('--artifacts-root', dest='artifacts_root', action='store', required=True)
parser.add_argument('--dest-dir', dest='dst_dir', action='store', required=True)
args = parser.parse_args()

View File

@ -25,9 +25,7 @@ def GenerateManifest(package_dir):
common_prefix = os.path.commonprefix([root, package_dir])
rel_path = os.path.relpath(os.path.join(root, f), common_prefix)
from_package = os.path.abspath(os.path.join(package_dir, rel_path))
assert from_package, 'Failed to create from_package for %s' % os.path.join(
root, f
)
assert from_package, 'Failed to create from_package for %s' % os.path.join(root, f)
full_paths.append('%s=%s' % (rel_path, from_package))
parent_dir = os.path.abspath(os.path.join(package_dir, os.pardir))
@ -43,8 +41,7 @@ def CreateFarPackage(pm_bin, package_dir, signing_key, dst_dir, api_level):
manifest_path = GenerateManifest(package_dir)
pm_command_base = [
pm_bin, '-m', manifest_path, '-k', signing_key, '-o', dst_dir,
'--api-level', api_level
pm_bin, '-m', manifest_path, '-k', signing_key, '-o', dst_dir, '--api-level', api_level
]
# Build the package
@ -60,24 +57,13 @@ def main():
parser = argparse.ArgumentParser()
parser.add_argument('--pm-bin', dest='pm_bin', action='store', required=True)
parser.add_argument('--package-dir', dest='package_dir', action='store', required=True)
parser.add_argument('--manifest-file', dest='manifest_file', action='store', required=False)
parser.add_argument(
'--package-dir', dest='package_dir', action='store', required=True
)
parser.add_argument(
'--manifest-file', dest='manifest_file', action='store', required=False
)
parser.add_argument(
'--manifest-json-file',
dest='manifest_json_file',
action='store',
required=True
)
parser.add_argument(
'--far-name', dest='far_name', action='store', required=False
)
parser.add_argument(
'--api-level', dest='api_level', action='store', required=False
'--manifest-json-file', dest='manifest_json_file', action='store', required=True
)
parser.add_argument('--far-name', dest='far_name', action='store', required=False)
parser.add_argument('--api-level', dest='api_level', action='store', required=False)
args = parser.parse_args()
@ -113,16 +99,13 @@ def main():
# Use check_output so if anything goes wrong we get the output.
try:
build_command = [
'build', '--output-package-manifest', args.manifest_json_file
]
build_command = ['build', '--output-package-manifest', args.manifest_json_file]
if args.api_level is not None:
build_command = ['--api-level', args.api_level] + build_command
archive_command = [
'archive', '--output=' +
os.path.join(os.path.dirname(output_dir), args.far_name + "-0")
'archive', '--output=' + os.path.join(os.path.dirname(output_dir), args.far_name + "-0")
]
pm_commands = [build_command, archive_command]
@ -130,24 +113,16 @@ def main():
for pm_command in pm_commands:
subprocess.check_output(pm_command_base + pm_command)
except subprocess.CalledProcessError as e:
print(
'==================== Manifest contents ========================================='
)
print('==================== Manifest contents =========================================')
with open(manifest_file, 'r') as manifest:
sys.stdout.write(manifest.read())
print(
'==================== End manifest contents ====================================='
)
print('==================== End manifest contents =====================================')
meta_contents_path = os.path.join(output_dir, 'meta', 'contents')
if os.path.exists(meta_contents_path):
print(
'==================== meta/contents ============================================='
)
print('==================== meta/contents =============================================')
with open(meta_contents_path, 'r') as meta_contents:
sys.stdout.write(meta_contents.read())
print(
'==================== End meta/contents ========================================='
)
print('==================== End meta/contents =========================================')
raise
return 0

View File

@ -19,12 +19,8 @@ def main():
parser = argparse.ArgumentParser()
parser.add_argument('--pm-bin', dest='pm_bin', action='store', required=True)
parser.add_argument(
'--repo-dir', dest='repo_dir', action='store', required=True
)
parser.add_argument(
'--archive', dest='archives', action='append', required=True
)
parser.add_argument('--repo-dir', dest='repo_dir', action='store', required=True)
parser.add_argument('--archive', dest='archives', action='append', required=True)
args = parser.parse_args()

View File

@ -54,8 +54,7 @@ def _get_stripped_path(bin_path):
returns |bin_path| if no stripped path is found.
"""
stripped_path = bin_path.replace('lib.unstripped/',
'lib/').replace('exe.unstripped/', '')
stripped_path = bin_path.replace('lib.unstripped/', 'lib/').replace('exe.unstripped/', '')
if os.path.exists(stripped_path):
return stripped_path
else:
@ -101,8 +100,7 @@ def _write_build_ids_txt(binary_paths, ids_txt_path):
# Create a set to dedupe stripped binary paths in case both the stripped and
# unstripped versions of a binary are specified.
readelf_stdout = subprocess.check_output(['readelf', '-n'] +
sorted(unprocessed_binary_paths)
).decode('utf8')
sorted(unprocessed_binary_paths)).decode('utf8')
if len(binary_paths) == 1:
# Readelf won't report a binary's path if only one was provided to the
@ -122,8 +120,7 @@ def _write_build_ids_txt(binary_paths, ids_txt_path):
# Paths to the unstripped executables listed in "ids.txt" are specified
# as relative paths to that file.
unstripped_rel_path = os.path.relpath(
os.path.abspath(binary_path),
os.path.dirname(os.path.abspath(ids_txt_path))
os.path.abspath(binary_path), os.path.dirname(os.path.abspath(ids_txt_path))
)
build_id = line[len(READELF_BUILD_ID_PREFIX):]
@ -151,11 +148,7 @@ def _get_component_manifests(component_info):
# until compile time.
def _get_resource_items_from_json_items(component_info):
nested_resources = []
files = [
c.get('source')
for c in component_info
if c.get('type') == 'json_of_resources'
]
files = [c.get('source') for c in component_info if c.get('type') == 'json_of_resources']
for json_file in files:
for resource in _parse_component(json_file):
nested_resources.append(resource)
@ -217,9 +210,7 @@ def _write_meta_package_manifest(
manifest_entries['meta/package'] = package_json_filepath
def _write_component_manifest(
manifest_entries, component_info, archive_manifest_path, out_dir
):
def _write_component_manifest(manifest_entries, component_info, archive_manifest_path, out_dir):
"""Copy component manifest files and add to archive manifest.
Raises an exception if a component uses a unknown manifest version.
@ -241,8 +232,7 @@ def _write_component_manifest(
# os.path.dirname(archive_manifest_path),
# component_manifest.get('output_name') + extension)
manifest_dest_file_path = os.path.join(
os.path.dirname(archive_manifest_path),
component_manifest.get('output_name')
os.path.dirname(archive_manifest_path), component_manifest.get('output_name')
)
# Add the 'meta/' subdir, for example, if `output_name` includes it
os.makedirs(os.path.dirname(manifest_dest_file_path), exist_ok=True)
@ -254,8 +244,7 @@ def _write_component_manifest(
def _write_package_manifest(
manifest_entries, expanded_files, out_dir, exclude_file, root_dir,
component_info
manifest_entries, expanded_files, out_dir, exclude_file, root_dir, component_info
):
"""Writes the package manifest for a Fuchsia package
@ -321,13 +310,12 @@ def _build_manifest(args):
# because of runtime libraries.
manifest_entries = {}
_write_meta_package_manifest(
manifest_entries, args.manifest_path, args.app_name, args.out_dir,
args.package_version
manifest_entries, args.manifest_path, args.app_name, args.out_dir, args.package_version
)
for component_item in component_info:
_write_package_manifest(
manifest_entries, expanded_files, args.out_dir, args.exclude_file,
args.root_dir, component_item
manifest_entries, expanded_files, args.out_dir, args.exclude_file, args.root_dir,
component_item
)
component_manifests.append(
_write_component_manifest(
@ -347,13 +335,11 @@ def _build_manifest(args):
roots = [gen_dir, args.root_dir, args.out_dir]
excluded_files_set = set(args.exclude_file)
expanded_deps_files = [
path for path in expanded_files
if make_package_path(path, roots) not in excluded_files_set
path for path in expanded_files if make_package_path(path, roots) not in excluded_files_set
]
_write_gn_deps_file(
args.depfile_path, args.manifest_path, component_manifests, args.out_dir,
expanded_deps_files
args.depfile_path, args.manifest_path, component_manifests, args.out_dir, expanded_deps_files
)
return 0
@ -364,29 +350,19 @@ def main():
parser.add_argument('--out-dir', required=True, help='Build output directory')
parser.add_argument('--app-name', required=True, help='Package name')
parser.add_argument(
'--runtime-deps-file',
required=True,
help='File with the list of runtime dependencies.'
)
parser.add_argument(
'--depfile-path', required=True, help='Path to write GN deps file.'
'--runtime-deps-file', required=True, help='File with the list of runtime dependencies.'
)
parser.add_argument('--depfile-path', required=True, help='Path to write GN deps file.')
parser.add_argument(
'--exclude-file',
action='append',
default=[],
help='Package-relative file path to exclude from the package.'
)
parser.add_argument(
'--manifest-path', required=True, help='Manifest output path.'
)
parser.add_argument(
'--build-ids-file', required=True, help='Debug symbol index path.'
)
parser.add_argument('--manifest-path', required=True, help='Manifest output path.')
parser.add_argument('--build-ids-file', required=True, help='Debug symbol index path.')
parser.add_argument('--json-file', required=True)
parser.add_argument(
'--package-version', default='0', help='Version of the package'
)
parser.add_argument('--package-version', default='0', help='Version of the package')
args = parser.parse_args()

View File

@ -17,46 +17,33 @@ import json
def GetDartSdkGitRevision(buildroot):
project_root = path.join(buildroot, 'third_party', 'dart')
return subprocess.check_output([
'git', '-C', project_root, 'rev-parse', 'HEAD'
]).strip()
return subprocess.check_output(['git', '-C', project_root, 'rev-parse', 'HEAD']).strip()
def GetDartSdkSemanticVersion(buildroot):
project_root = path.join(buildroot, 'third_party', 'dart')
return subprocess.check_output([
'git', '-C', project_root, 'describe', '--abbrev=0'
]).strip()
return subprocess.check_output(['git', '-C', project_root, 'describe', '--abbrev=0']).strip()
def GetFlutterEngineGitRevision(buildroot):
project_root = path.join(buildroot, 'flutter')
return subprocess.check_output([
'git', '-C', project_root, 'rev-parse', 'HEAD'
]).strip()
return subprocess.check_output(['git', '-C', project_root, 'rev-parse', 'HEAD']).strip()
def GetFuchsiaSdkVersion(buildroot):
with open(path.join(buildroot, 'fuchsia', 'sdk',
'linux' if sys.platform.startswith('linux') else 'mac',
'meta', 'manifest.json'), 'r') as fuchsia_sdk_manifest:
'linux' if sys.platform.startswith('linux') else 'mac', 'meta',
'manifest.json'), 'r') as fuchsia_sdk_manifest:
return json.load(fuchsia_sdk_manifest)['id']
def main():
# Parse arguments.
parser = ArgumentParser()
parser.add_argument('--input', action='store', help='input file path', required=True)
parser.add_argument('--output', action='store', help='output file path', required=True)
parser.add_argument(
'--input', action='store', help='input file path', required=True
)
parser.add_argument(
'--output', action='store', help='output file path', required=True
)
parser.add_argument(
'--buildroot',
action='store',
help='path to the flutter engine buildroot',
required=True
'--buildroot', action='store', help='path to the flutter engine buildroot', required=True
)
args = parser.parse_args()
@ -72,9 +59,7 @@ def main():
).replace(
'{{FLUTTER_ENGINE_GIT_REVISION}}',
GetFlutterEngineGitRevision(args.buildroot).decode('utf-8')
).replace(
'{{FUCHSIA_SDK_VERSION}}', GetFuchsiaSdkVersion(args.buildroot)
)
).replace('{{FUCHSIA_SDK_VERSION}}', GetFuchsiaSdkVersion(args.buildroot))
)

View File

@ -21,9 +21,7 @@ import tempfile
# Path to the engine root checkout. This is used to calculate absolute
# paths if relative ones are passed to the script.
BUILD_ROOT_DIR = os.path.abspath(
os.path.join(os.path.realpath(__file__), '..', '..', '..', '..')
)
BUILD_ROOT_DIR = os.path.abspath(os.path.join(os.path.realpath(__file__), '..', '..', '..', '..'))
def IsLinux():
@ -103,9 +101,7 @@ def ProcessCIPDPackage(upload, cipd_yaml, engine_version, out_dir, target_arch):
else:
command = [
'cipd', 'pkg-build', '-pkg-def', cipd_yaml, '-out',
os.path.join(
_packaging_dir, 'fuchsia-debug-symbols-%s.cipd' % target_arch
)
os.path.join(_packaging_dir, 'fuchsia-debug-symbols-%s.cipd' % target_arch)
]
# Retry up to three times. We've seen CIPD fail on verification in some
@ -181,14 +177,8 @@ def main():
'empty temp directory'
)
)
parser.add_argument(
'--target-arch', type=str, choices=['x64', 'arm64'], required=True
)
parser.add_argument(
'--engine-version',
required=True,
help='Specifies the flutter engine SHA.'
)
parser.add_argument('--target-arch', type=str, choices=['x64', 'arm64'], required=True)
parser.add_argument('--engine-version', required=True, help='Specifies the flutter engine SHA.')
parser.add_argument('--upload', default=False, action='store_true')

View File

@ -17,12 +17,8 @@ import hashlib
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--input', dest='file_path', action='store', required=True
)
parser.add_argument(
'--clang-cpu', dest='clang_cpu', action='store', required=True
)
parser.add_argument('--input', dest='file_path', action='store', required=True)
parser.add_argument('--clang-cpu', dest='clang_cpu', action='store', required=True)
args = parser.parse_args()

View File

@ -19,9 +19,7 @@ def main():
dest = sys.argv[2]
if os.path.isdir(source):
print(
f'{source} is a directory, tool "copy" does not support directory copies'
)
print(f'{source} is a directory, tool "copy" does not support directory copies')
return 1
if os.path.exists(dest):

View File

@ -14,9 +14,7 @@ import tempfile
## Path to the engine root checkout. This is used to calculate absolute
## paths if relative ones are passed to the script.
BUILD_ROOT_DIR = os.path.abspath(
os.path.join(os.path.realpath(__file__), '..', '..', '..', '..')
)
BUILD_ROOT_DIR = os.path.abspath(os.path.join(os.path.realpath(__file__), '..', '..', '..', '..'))
FUCHSIA_ARTIFACTS_DEBUG_NAMESPACE = 'debug'
FUCHSIA_ARTIFACTS_BUCKET_NAME = 'fuchsia-artifacts-release'
@ -35,9 +33,7 @@ def remote_filename(exec_path):
def exists_remotely(remote_path):
gsutil = os.path.join(os.environ['DEPOT_TOOLS'], 'gsutil.py')
command = ['python3', gsutil, '--', 'stat', remote_path]
process = subprocess.Popen(
command, stderr=subprocess.PIPE, stdout=subprocess.PIPE
)
process = subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
stdout, stderr = process.communicate()
return_code = process.wait()
if return_code == 0:
@ -60,8 +56,7 @@ def process_symbols(should_upload, symbol_dir):
for file in files:
remote_path = 'gs://%s/%s/%s' % (
FUCHSIA_ARTIFACTS_BUCKET_NAME, FUCHSIA_ARTIFACTS_DEBUG_NAMESPACE,
remote_filename(file)
FUCHSIA_ARTIFACTS_BUCKET_NAME, FUCHSIA_ARTIFACTS_DEBUG_NAMESPACE, remote_filename(file)
)
if should_upload and not exists_remotely(remote_path):
gsutil = os.path.join(os.environ['DEPOT_TOOLS'], 'gsutil.py')
@ -75,20 +70,11 @@ def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--symbol-dir',
required=True,
help='Directory that contain the debug symbols.'
'--symbol-dir', required=True, help='Directory that contain the debug symbols.'
)
parser.add_argument('--engine-version', required=True, help='Specifies the flutter engine SHA.')
parser.add_argument(
'--engine-version',
required=True,
help='Specifies the flutter engine SHA.'
)
parser.add_argument(
'--upload',
default=False,
action='store_true',
help='If set, uploads symbols to the server.'
'--upload', default=False, action='store_true', help='If set, uploads symbols to the server.'
)
args = parser.parse_args()

View File

@ -8,12 +8,7 @@ import platform
import subprocess
import sys
sys.path.insert(
0,
os.path.abspath(
os.path.join(os.path.dirname(__file__), 'test_scripts/test/')
)
)
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'test_scripts/test/')))
from common import catch_sigterm, wait_for_sigterm
@ -27,19 +22,13 @@ def Main():
# Ensures the signals can be correctly forwarded to the subprocesses.
catch_sigterm()
os.environ['SRC_ROOT'] = os.path.abspath(
os.path.join(os.path.dirname(__file__), '../../../')
)
os.environ['SRC_ROOT'] = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../'))
# Flutter uses a different repo structure and fuchsia sdk is not in the
# third_party/, so images root and sdk root need to be explicitly set.
os.environ['FUCHSIA_IMAGES_ROOT'] = os.path.join(
os.environ['SRC_ROOT'], 'fuchsia/images/'
)
os.environ['FUCHSIA_IMAGES_ROOT'] = os.path.join(os.environ['SRC_ROOT'], 'fuchsia/images/')
assert platform.system() == 'Linux', 'Unsupported OS ' + platform.system()
os.environ['FUCHSIA_SDK_ROOT'] = os.path.join(
os.environ['SRC_ROOT'], 'fuchsia/sdk/linux/'
)
os.environ['FUCHSIA_SDK_ROOT'] = os.path.join(os.environ['SRC_ROOT'], 'fuchsia/sdk/linux/')
with subprocess.Popen(sys.argv[1:]) as proc:
try:

View File

@ -30,9 +30,7 @@ public final class BuildConfig {{
def main():
parser = argparse.ArgumentParser(
description='Generate BuildConfig.java for Android'
)
parser = argparse.ArgumentParser(description='Generate BuildConfig.java for Android')
parser.add_argument('--runtime-mode', type=str, required=True)
parser.add_argument('--out', type=str, required=True)

View File

@ -32,19 +32,15 @@ SECTIONS = {
]
),
'linux':
Section(
'Linux Embedder', [
'shell/platform/linux',
'shell/platform/common',
]
),
Section('Linux Embedder', [
'shell/platform/linux',
'shell/platform/common',
]),
'windows':
Section(
'Windows Embedder', [
'shell/platform/windows',
'shell/platform/common',
]
),
Section('Windows Embedder', [
'shell/platform/windows',
'shell/platform/common',
]),
'impeller':
Section('Impeller', [
'impeller',
@ -56,12 +52,8 @@ def generate_doxyfile(section, output_dir, log_file, doxy_file):
doxyfile = open('docs/Doxyfile.template', 'r').read()
doxyfile = doxyfile.replace('@@OUTPUT_DIRECTORY@@', output_dir)
doxyfile = doxyfile.replace('@@LOG_FILE@@', log_file)
doxyfile = doxyfile.replace(
'@@INPUT_DIRECTORIES@@', '"{}"'.format('" "'.join(section.inputs))
)
doxyfile = doxyfile.replace(
'@@PROJECT_NAME@@', 'Flutter {}'.format(section.title)
)
doxyfile = doxyfile.replace('@@INPUT_DIRECTORIES@@', '"{}"'.format('" "'.join(section.inputs)))
doxyfile = doxyfile.replace('@@PROJECT_NAME@@', 'Flutter {}'.format(section.title))
doxyfile = doxyfile.replace(
'@@DOCSET_FEEDNAME@@', 'Flutter {} Documentation'.format(section.title)
)

View File

@ -118,8 +118,7 @@ def glyph_program(glyph):
# Round To Grid every on-curve point, but ignore those who are on the ASCENT
# or DESCENT line. This step keeps "p" (ascent flushed) and "É" (descent
# flushed)'s y extents from overlapping each other.
for index, point in enumerate(
[p for contour in glyph.foreground for p in contour]):
for index, point in enumerate([p for contour in glyph.foreground for p in contour]):
if point.y not in [ASCENT, DESCENT]:
instructions += f"""
PUSHB_1
@ -210,9 +209,8 @@ square_codepoints = [
[0x221E, 0x222B, 0x2248, 0x2260],
unicode_range(0x2264, 0x2265),
[
0x22F2, 0x25CA, 0x3007, 0x4E00, 0x4E03, 0x4E09, 0x4E5D, 0x4E8C,
0x4E94, 0x516B, 0x516D, 0x5341, 0x56D7, 0x56DB, 0x571F, 0x6728,
0x6C34, 0x706B, 0x91D1
0x22F2, 0x25CA, 0x3007, 0x4E00, 0x4E03, 0x4E09, 0x4E5D, 0x4E8C, 0x4E94, 0x516B, 0x516D,
0x5341, 0x56D7, 0x56DB, 0x571F, 0x6728, 0x6C34, 0x706B, 0x91D1
],
unicode_range(0xF000, 0xF002),
] for codepoint in l
@ -253,8 +251,7 @@ create_glyph(".notdef", not_def_glyph).unicode = -1
def create_no_path_glyph(codepoint, advance_percentage):
name = "Zero Advance" if advance_percentage == 0 else (
"Full Advance"
if advance_percentage == 1 else f"1/{(int)(1/advance_percentage)} Advance"
"Full Advance" if advance_percentage == 1 else f"1/{(int)(1/advance_percentage)} Advance"
)
no_path_glyph = font.createChar(codepoint, name)
no_path_glyph.width = (int)(EM * advance_percentage)
@ -295,9 +292,7 @@ for glyph in font.glyphs():
else:
glyph_mapping[script] = [codepoint]
codepoints_by_script = [
glyph_mapping.get(script, []) for script in script_list
]
codepoints_by_script = [glyph_mapping.get(script, []) for script in script_list]
def describe_codepoint_range(codepoints):
if not codepoints:
@ -320,8 +315,7 @@ for glyph in font.glyphs():
full_list = " ".join([map_char(c) for c in characters])
return "**codepoint(s):** " + ", ".join([
f"{hex(r[0])}-{hex(r[-1])}" if len(r) > 1 else hex(r[0])
for r in codepoint_ranges
f"{hex(r[0])}-{hex(r[-1])}" if len(r) > 1 else hex(r[0]) for r in codepoint_ranges
]) + "<br />" + "**character(s):** " + full_list
print(

View File

@ -12,9 +12,7 @@ import subprocess
import sys
SRC_ROOT = os.path.dirname(
os.path.dirname(
os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
)
os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
)
FLUTTER_DIR = os.path.join(SRC_ROOT, 'flutter')
@ -34,8 +32,7 @@ def Main(argv):
'config',
'core.hooksPath',
githooks,
],
cwd=FLUTTER_DIR)
], cwd=FLUTTER_DIR)
return result.returncode

View File

@ -32,9 +32,7 @@ class GNTestCase(unittest.TestCase):
def test_to_gn_args(self):
# This would not necessarily be true on a 32-bit machine?
self.assertEquals(
self._gn_args(['--ios', '--simulator'])['target_cpu'], 'x64'
)
self.assertEquals(self._gn_args(['--ios', '--simulator'])['target_cpu'], 'x64')
self.assertEquals(self._gn_args(['--ios'])['target_cpu'], 'arm')
def test_cannot_use_android_and_enable_unittests(self):

View File

@ -17,29 +17,23 @@ SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
def JavadocBin():
if sys.platform == 'darwin':
return os.path.join(
SCRIPT_DIR, '..', '..', '..', 'third_party', 'java', 'openjdk',
'Contents', 'Home', 'bin', 'javadoc'
SCRIPT_DIR, '..', '..', '..', 'third_party', 'java', 'openjdk', 'Contents', 'Home', 'bin',
'javadoc'
)
elif sys.platform.startswith(('cygwin', 'win')):
return os.path.join(
SCRIPT_DIR, '..', '..', '..', 'third_party', 'java', 'openjdk', 'bin',
'javadoc.exe'
SCRIPT_DIR, '..', '..', '..', 'third_party', 'java', 'openjdk', 'bin', 'javadoc.exe'
)
else:
return os.path.join(
SCRIPT_DIR, '..', '..', '..', 'third_party', 'java', 'openjdk', 'bin',
'javadoc'
SCRIPT_DIR, '..', '..', '..', 'third_party', 'java', 'openjdk', 'bin', 'javadoc'
)
def main():
parser = argparse.ArgumentParser(
description='Runs javadoc on Flutter Android libraries'
)
parser = argparse.ArgumentParser(description='Runs javadoc on Flutter Android libraries')
parser.add_argument('--out-dir', type=str, required=True)
parser.add_argument(
'--android-source-root', type=str, default=ANDROID_SRC_ROOT
)
parser.add_argument('--android-source-root', type=str, default=ANDROID_SRC_ROOT)
parser.add_argument('--build-config-path', type=str)
parser.add_argument('--third-party', type=str, default='third_party')
parser.add_argument('--quiet', default=False, action='store_true')
@ -57,12 +51,8 @@ def main():
classpath = [
args.android_source_root,
os.path.join(
args.third_party, 'android_tools/sdk/platforms/android-34/android.jar'
),
os.path.join(
args.third_party, 'android_embedding_dependencies', 'lib', '*'
),
os.path.join(args.third_party, 'android_tools/sdk/platforms/android-34/android.jar'),
os.path.join(args.third_party, 'android_embedding_dependencies', 'lib', '*'),
]
if args.build_config_path:
classpath.append(args.build_config_path)

View File

@ -55,17 +55,11 @@ def Main():
default=False,
help='Force artifact upload, overwriting existing artifacts.'
)
parser.add_argument(
'--all', action='store_true', default=False, help='Re-run all builds.'
)
parser.add_argument('--all', action='store_true', default=False, help='Re-run all builds.')
parser.add_argument('--builder', type=str, help='The builer to rerun.')
parser.add_argument('--commit', type=str, required=True, help='The commit to rerun.')
parser.add_argument(
'--commit', type=str, required=True, help='The commit to rerun.'
)
parser.add_argument(
'--dry-run',
action='store_true',
help='Print what would be done, but do nothing.'
'--dry-run', action='store_true', help='Print what would be done, but do nothing.'
)
args = parser.parse_args()
@ -92,10 +86,7 @@ def Main():
capture_output=True,
)
if auth_result.returncode != 0:
print(
'Auth failed:\nstdout:\n%s\nstderr:\n%s' %
(auth_result.stdout, auth_result.stderr)
)
print('Auth failed:\nstdout:\n%s\nstderr:\n%s' % (auth_result.stdout, auth_result.stderr))
return 1
auth_token = auth_result.stdout.rstrip()
@ -106,9 +97,7 @@ def Main():
% (args.commit, builder)
)
else:
params = '{"Commit": "%s", "Builder": "%s", "Repo": "engine"}' % (
args.commit, builder
)
params = '{"Commit": "%s", "Builder": "%s", "Repo": "engine"}' % (args.commit, builder)
curl_command = [
'curl',
'http://flutter-dashboard.appspot.com/api/reset-prod-task',