diff options
Diffstat (limited to 'deps/v8/build/android/gyp')
102 files changed, 14165 insertions, 0 deletions
diff --git a/deps/v8/build/android/gyp/OWNERS b/deps/v8/build/android/gyp/OWNERS new file mode 100644 index 0000000000..74dca6f718 --- /dev/null +++ b/deps/v8/build/android/gyp/OWNERS @@ -0,0 +1,6 @@ +agrieve@chromium.org +estevenson@chromium.org +digit@chromium.org +wnwen@chromium.org + +# COMPONENT: Build diff --git a/deps/v8/build/android/gyp/aar.py b/deps/v8/build/android/gyp/aar.py new file mode 100755 index 0000000000..d0f357db33 --- /dev/null +++ b/deps/v8/build/android/gyp/aar.py @@ -0,0 +1,166 @@ +#!/usr/bin/env python +# +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Processes an Android AAR file.""" + +import argparse +import os +import posixpath +import re +import shutil +import sys +from xml.etree import ElementTree +import zipfile + +from util import build_utils +from util import md5_check + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + os.pardir, os.pardir))) +import gn_helpers + + +def _IsManifestEmpty(manifest_str): + """Returns whether the given manifest has merge-worthy elements. + + E.g.: <activity>, <service>, etc. + """ + doc = ElementTree.fromstring(manifest_str) + for node in doc: + if node.tag == 'application': + if len(node): + return False + elif node.tag != 'uses-sdk': + return False + + return True + + +def _CreateInfo(aar_file): + data = {} + data['aidl'] = [] + data['assets'] = [] + data['resources'] = [] + data['subjars'] = [] + data['subjar_tuples'] = [] + data['has_classes_jar'] = False + data['has_proguard_flags'] = False + data['has_native_libraries'] = False + data['has_r_text_file'] = False + with zipfile.ZipFile(aar_file) as z: + data['is_manifest_empty'] = ( + _IsManifestEmpty(z.read('AndroidManifest.xml'))) + + for name in z.namelist(): + if name.endswith('/'): + continue + if name.startswith('aidl/'): + data['aidl'].append(name) + elif name.startswith('res/'): + data['resources'].append(name) + elif name.startswith('libs/') and name.endswith('.jar'): + label = posixpath.basename(name)[:-4] + label = re.sub(r'[^a-zA-Z0-9._]', '_', label) + data['subjars'].append(name) + data['subjar_tuples'].append([label, name]) + elif name.startswith('assets/'): + data['assets'].append(name) + elif name.startswith('jni/'): + data['has_native_libraries'] = True + if 'native_libraries' in data: + data['native_libraries'].append(name) + else: + data['native_libraries'] = [name] + elif name == 'classes.jar': + data['has_classes_jar'] = True + elif name == 'proguard.txt': + data['has_proguard_flags'] = True + elif name == 'R.txt': + # Some AARs, e.g. gvr_controller_java, have empty R.txt. Such AARs + # have no resources as well. We treat empty R.txt as having no R.txt. + data['has_r_text_file'] = (z.read('R.txt').strip() != '') + + return """\ +# Generated by //build/android/gyp/aar.py +# To regenerate, use "update_android_aar_prebuilts = true" and run "gn gen". + +""" + gn_helpers.ToGNString(data) + + +def _AddCommonArgs(parser): + parser.add_argument('aar_file', + help='Path to the AAR file.', + type=os.path.normpath) + + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + command_parsers = parser.add_subparsers(dest='command') + subp = command_parsers.add_parser( + 'list', help='Output a GN scope describing the contents of the .aar.') + _AddCommonArgs(subp) + subp.add_argument('--output', + help='Output file.', + default='-') + + subp = command_parsers.add_parser('extract', help='Extracts the .aar') + _AddCommonArgs(subp) + subp.add_argument('--output-dir', + help='Output directory for the extracted files.', + required=True, + type=os.path.normpath) + subp.add_argument('--assert-info-file', + help='Path to .info file. Asserts that it matches what ' + '"list" would output.', + type=argparse.FileType('r')) + + args = parser.parse_args() + + if args.command == 'extract': + if args.assert_info_file: + expected = _CreateInfo(args.aar_file) + actual = args.assert_info_file.read() + if actual != expected: + raise Exception('android_aar_prebuilt() cached .info file is ' + 'out-of-date. Run gn gen with ' + 'update_android_aar_prebuilts=true to update it.') + + def clobber(): + # Clear previously extracted versions of the AAR if it is obsolete. + shutil.rmtree(args.output_dir, ignore_errors=True) + build_utils.ExtractAll(args.aar_file, path=args.output_dir) + + with zipfile.ZipFile(args.aar_file) as zf: + md5_check.CallAndRecordIfStale( + clobber, input_paths=[args.aar_file], + output_paths=[ + os.path.join(args.output_dir, n) for n in zf.namelist()]) + + elif args.command == 'list': + aar_info = _CreateInfo(args.aar_file) + aar_output_present = args.output != '-' and os.path.isfile(args.output) + if aar_output_present: + # Some .info files are read-only, for examples the cipd-controlled ones + # under third_party/android_deps/repositoty. To deal with these, first + # that its content is correct, and if it is, exit without touching + # the file system. + file_info = open(args.output, 'r').read() + if file_info == aar_info: + return + + # Try to write the file. This may fail for read-only ones that were + # not updated. + try: + with open(args.output, 'w') as f: + f.write(aar_info) + except IOError as e: + if not aar_output_present: + raise e + raise Exception('Could not update output file: %s\n%s\n' % + (args.output, e)) + +if __name__ == '__main__': + sys.exit(main()) diff --git a/deps/v8/build/android/gyp/aar.pydeps b/deps/v8/build/android/gyp/aar.pydeps new file mode 100644 index 0000000000..e08c5475e3 --- /dev/null +++ b/deps/v8/build/android/gyp/aar.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aar.pydeps build/android/gyp/aar.py +../../gn_helpers.py +aar.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/aidl.py b/deps/v8/build/android/gyp/aidl.py new file mode 100755 index 0000000000..64ad29041a --- /dev/null +++ b/deps/v8/build/android/gyp/aidl.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Invokes Android's aidl +""" + +import optparse +import os +import re +import sys +import zipfile + +from util import build_utils + + +def main(argv): + option_parser = optparse.OptionParser() + option_parser.add_option('--aidl-path', help='Path to the aidl binary.') + option_parser.add_option('--imports', help='Files to import.') + option_parser.add_option('--includes', + help='Directories to add as import search paths.') + option_parser.add_option('--srcjar', help='Path for srcjar output.') + options, args = option_parser.parse_args(argv[1:]) + + with build_utils.TempDir() as temp_dir: + for f in args: + classname = os.path.splitext(os.path.basename(f))[0] + output = os.path.join(temp_dir, classname + '.java') + aidl_cmd = [options.aidl_path] + aidl_cmd += [ + '-p' + s for s in build_utils.ParseGnList(options.imports) + ] + if options.includes is not None: + aidl_cmd += [ + '-I' + s for s in build_utils.ParseGnList(options.includes) + ] + aidl_cmd += [ + f, + output + ] + build_utils.CheckOutput(aidl_cmd) + + with build_utils.AtomicOutput(options.srcjar) as f: + with zipfile.ZipFile(f, 'w') as srcjar: + for path in build_utils.FindInDirectory(temp_dir, '*.java'): + with open(path) as fileobj: + data = fileobj.read() + pkg_name = re.search(r'^\s*package\s+(.*?)\s*;', data, re.M).group(1) + arcname = '%s/%s' % ( + pkg_name.replace('.', '/'), os.path.basename(path)) + build_utils.AddToZipHermetic(srcjar, arcname, data=data) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/deps/v8/build/android/gyp/aidl.pydeps b/deps/v8/build/android/gyp/aidl.pydeps new file mode 100644 index 0000000000..2dbce376f1 --- /dev/null +++ b/deps/v8/build/android/gyp/aidl.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/aidl.pydeps build/android/gyp/aidl.py +../../gn_helpers.py +aidl.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/apkbuilder.py b/deps/v8/build/android/gyp/apkbuilder.py new file mode 100755 index 0000000000..310a192828 --- /dev/null +++ b/deps/v8/build/android/gyp/apkbuilder.py @@ -0,0 +1,377 @@ +#!/usr/bin/env python +# +# Copyright (c) 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Adds the code parts to a resource APK.""" + +import argparse +import itertools +import os +import shutil +import sys +import tempfile +import zipfile + +import finalize_apk + +from util import build_utils + + +# Taken from aapt's Package.cpp: +_NO_COMPRESS_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.gif', '.wav', '.mp2', + '.mp3', '.ogg', '.aac', '.mpg', '.mpeg', '.mid', + '.midi', '.smf', '.jet', '.rtttl', '.imy', '.xmf', + '.mp4', '.m4a', '.m4v', '.3gp', '.3gpp', '.3g2', + '.3gpp2', '.amr', '.awb', '.wma', '.wmv', '.webm') + + +def _ParseArgs(args): + parser = argparse.ArgumentParser() + build_utils.AddDepfileOption(parser) + parser.add_argument( + '--assets', + help='GYP-list of files to add as assets in the form ' + '"srcPath:zipPath", where ":zipPath" is optional.') + parser.add_argument( + '--java-resources', help='GYP-list of java_resources JARs to include.') + parser.add_argument('--write-asset-list', + action='store_true', + help='Whether to create an assets/assets_list file.') + parser.add_argument( + '--uncompressed-assets', + help='Same as --assets, except disables compression.') + parser.add_argument('--resource-apk', + help='An .ap_ file built using aapt', + required=True) + parser.add_argument('--output-apk', + help='Path to the output file', + required=True) + parser.add_argument('--format', choices=['apk', 'bundle-module'], + default='apk', help='Specify output format.') + parser.add_argument('--dex-file', + help='Path to the classes.dex to use') + parser.add_argument('--uncompress-dex', action='store_true', + help='Store .dex files uncompressed in the APK') + parser.add_argument('--native-libs', + action='append', + help='GYP-list of native libraries to include. ' + 'Can be specified multiple times.', + default=[]) + parser.add_argument('--secondary-native-libs', + action='append', + help='GYP-list of native libraries for secondary ' + 'android-abi. Can be specified multiple times.', + default=[]) + parser.add_argument('--android-abi', + help='Android architecture to use for native libraries') + parser.add_argument('--secondary-android-abi', + help='The secondary Android architecture to use for' + 'secondary native libraries') + parser.add_argument( + '--native-lib-placeholders', + help='GYP-list of native library placeholders to add.') + parser.add_argument( + '--secondary-native-lib-placeholders', + help='GYP-list of native library placeholders to add ' + 'for the secondary ABI') + parser.add_argument('--uncompress-shared-libraries', default='False', + choices=['true', 'True', 'false', 'False'], + help='Whether to uncompress native shared libraries. Argument must be ' + 'a boolean value.') + parser.add_argument('--apksigner-path', + help='Path to the apksigner executable.') + parser.add_argument('--zipalign-path', + help='Path to the zipalign executable.') + parser.add_argument('--key-path', + help='Path to keystore for signing.') + parser.add_argument('--key-passwd', + help='Keystore password') + parser.add_argument('--key-name', + help='Keystore name') + options = parser.parse_args(args) + options.assets = build_utils.ParseGnList(options.assets) + options.uncompressed_assets = build_utils.ParseGnList( + options.uncompressed_assets) + options.native_lib_placeholders = build_utils.ParseGnList( + options.native_lib_placeholders) + options.secondary_native_lib_placeholders = build_utils.ParseGnList( + options.secondary_native_lib_placeholders) + options.java_resources = build_utils.ParseGnList(options.java_resources) + all_libs = [] + for gyp_list in options.native_libs: + all_libs.extend(build_utils.ParseGnList(gyp_list)) + options.native_libs = all_libs + secondary_libs = [] + for gyp_list in options.secondary_native_libs: + secondary_libs.extend(build_utils.ParseGnList(gyp_list)) + options.secondary_native_libs = secondary_libs + + # --apksigner-path, --zipalign-path, --key-xxx arguments are + # required when building an APK, but not a bundle module. + if options.format == 'apk': + required_args = ['apksigner_path', 'zipalign_path', 'key_path', + 'key_passwd', 'key_name'] + for required in required_args: + if not vars(options)[required]: + raise Exception('Argument --%s is required for APKs.' % ( + required.replace('_', '-'))) + + options.uncompress_shared_libraries = \ + options.uncompress_shared_libraries in [ 'true', 'True' ] + + if not options.android_abi and (options.native_libs or + options.native_lib_placeholders): + raise Exception('Must specify --android-abi with --native-libs') + if not options.secondary_android_abi and (options.secondary_native_libs or + options.secondary_native_lib_placeholders): + raise Exception('Must specify --secondary-android-abi with' + ' --secondary-native-libs') + return options + + +def _SplitAssetPath(path): + """Returns (src, dest) given an asset path in the form src[:dest].""" + path_parts = path.split(':') + src_path = path_parts[0] + if len(path_parts) > 1: + dest_path = path_parts[1] + else: + dest_path = os.path.basename(src_path) + return src_path, dest_path + + +def _ExpandPaths(paths): + """Converts src:dst into tuples and enumerates files within directories. + + Args: + paths: Paths in the form "src_path:dest_path" + + Returns: + A list of (src_path, dest_path) tuples sorted by dest_path (for stable + ordering within output .apk). + """ + ret = [] + for path in paths: + src_path, dest_path = _SplitAssetPath(path) + if os.path.isdir(src_path): + for f in build_utils.FindInDirectory(src_path, '*'): + ret.append((f, os.path.join(dest_path, f[len(src_path) + 1:]))) + else: + ret.append((src_path, dest_path)) + ret.sort(key=lambda t:t[1]) + return ret + + +def _AddAssets(apk, path_tuples, disable_compression=False): + """Adds the given paths to the apk. + + Args: + apk: ZipFile to write to. + paths: List of paths (with optional :zipPath suffix) to add. + disable_compression: Whether to disable compression. + """ + # Group all uncompressed assets together in the hope that it will increase + # locality of mmap'ed files. + for target_compress in (False, True): + for src_path, dest_path in path_tuples: + + compress = not disable_compression and ( + os.path.splitext(src_path)[1] not in _NO_COMPRESS_EXTENSIONS) + if target_compress == compress: + apk_path = 'assets/' + dest_path + try: + apk.getinfo(apk_path) + # Should never happen since write_build_config.py handles merging. + raise Exception('Multiple targets specified the asset path: %s' % + apk_path) + except KeyError: + build_utils.AddToZipHermetic(apk, apk_path, src_path=src_path, + compress=compress) + + +def _CreateAssetsList(path_tuples): + """Returns a newline-separated list of asset paths for the given paths.""" + dests = sorted(t[1] for t in path_tuples) + return '\n'.join(dests) + '\n' + + +def _AddNativeLibraries(out_apk, native_libs, android_abi, uncompress): + """Add native libraries to APK.""" + has_crazy_linker = any('android_linker' in os.path.basename(p) + for p in native_libs) + for path in native_libs: + basename = os.path.basename(path) + + compress = None + if (uncompress and os.path.splitext(basename)[1] == '.so' + and 'android_linker' not in basename + and (not has_crazy_linker or 'clang_rt' not in basename) + and (not has_crazy_linker or 'crashpad_handler' not in basename)): + compress = False + # Add prefix to prevent android install from extracting upon install. + if has_crazy_linker: + basename = 'crazy.' + basename + + apk_path = 'lib/%s/%s' % (android_abi, basename) + build_utils.AddToZipHermetic(out_apk, + apk_path, + src_path=path, + compress=compress) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + options = _ParseArgs(args) + + native_libs = sorted(options.native_libs) + + # Include native libs in the depfile_deps since GN doesn't know about the + # dependencies when is_component_build=true. + depfile_deps = list(native_libs) + + secondary_native_libs = [] + if options.secondary_native_libs: + secondary_native_libs = sorted(options.secondary_native_libs) + depfile_deps += secondary_native_libs + + if options.java_resources: + # Included via .build_config, so need to write it to depfile. + depfile_deps.extend(options.java_resources) + + assets = _ExpandPaths(options.assets) + uncompressed_assets = _ExpandPaths(options.uncompressed_assets) + + # Included via .build_config, so need to write it to depfile. + depfile_deps.extend(x[0] for x in assets) + depfile_deps.extend(x[0] for x in uncompressed_assets) + + # Bundle modules have a structure similar to APKs, except that resources + # are compiled in protobuf format (instead of binary xml), and that some + # files are located into different top-level directories, e.g.: + # AndroidManifest.xml -> manifest/AndroidManifest.xml + # classes.dex -> dex/classes.dex + # res/ -> res/ (unchanged) + # assets/ -> assets/ (unchanged) + # <other-file> -> root/<other-file> + # + # Hence, the following variables are used to control the location of files in + # the final archive. + if options.format == 'bundle-module': + apk_manifest_dir = 'manifest/' + apk_root_dir = 'root/' + apk_dex_dir = 'dex/' + else: + apk_manifest_dir = '' + apk_root_dir = '' + apk_dex_dir = '' + + # Targets generally do not depend on apks, so no need for only_if_changed. + with build_utils.AtomicOutput(options.output_apk, only_if_changed=False) as f: + with zipfile.ZipFile(options.resource_apk) as resource_apk, \ + zipfile.ZipFile(f, 'w', zipfile.ZIP_DEFLATED) as out_apk: + + def copy_resource(zipinfo, out_dir=''): + compress = zipinfo.compress_type != zipfile.ZIP_STORED + build_utils.AddToZipHermetic( + out_apk, + out_dir + zipinfo.filename, + data=resource_apk.read(zipinfo.filename), + compress=compress) + + # Make assets come before resources in order to maintain the same file + # ordering as GYP / aapt. http://crbug.com/561862 + resource_infos = resource_apk.infolist() + + # 1. AndroidManifest.xml + copy_resource( + resource_apk.getinfo('AndroidManifest.xml'), out_dir=apk_manifest_dir) + + # 2. Assets + if options.write_asset_list: + data = _CreateAssetsList(itertools.chain(assets, uncompressed_assets)) + build_utils.AddToZipHermetic(out_apk, 'assets/assets_list', data=data) + + _AddAssets(out_apk, assets, disable_compression=False) + _AddAssets(out_apk, uncompressed_assets, disable_compression=True) + + # 3. Dex files + if options.dex_file and options.dex_file.endswith('.zip'): + with zipfile.ZipFile(options.dex_file, 'r') as dex_zip: + for dex in (d for d in dex_zip.namelist() if d.endswith('.dex')): + build_utils.AddToZipHermetic( + out_apk, + apk_dex_dir + dex, + data=dex_zip.read(dex), + compress=not options.uncompress_dex) + elif options.dex_file: + build_utils.AddToZipHermetic( + out_apk, + apk_dex_dir + 'classes.dex', + src_path=options.dex_file, + compress=not options.uncompress_dex) + + # 4. Native libraries. + _AddNativeLibraries(out_apk, native_libs, options.android_abi, + options.uncompress_shared_libraries) + + if options.secondary_android_abi: + _AddNativeLibraries(out_apk, secondary_native_libs, + options.secondary_android_abi, + options.uncompress_shared_libraries) + + for name in sorted(options.native_lib_placeholders): + # Note: Empty libs files are ignored by md5check (can cause issues + # with stale builds when the only change is adding/removing + # placeholders). + apk_path = 'lib/%s/%s' % (options.android_abi, name) + build_utils.AddToZipHermetic(out_apk, apk_path, data='') + + for name in sorted(options.secondary_native_lib_placeholders): + # Note: Empty libs files are ignored by md5check (can cause issues + # with stale builds when the only change is adding/removing + # placeholders). + apk_path = 'lib/%s/%s' % (options.secondary_android_abi, name) + build_utils.AddToZipHermetic(out_apk, apk_path, data='') + + # 5. Resources + for info in resource_infos: + if info.filename != 'AndroidManifest.xml': + copy_resource(info) + + # 6. Java resources that should be accessible via + # Class.getResourceAsStream(), in particular parts of Emma jar. + # Prebuilt jars may contain class files which we shouldn't include. + for java_resource in options.java_resources: + with zipfile.ZipFile(java_resource, 'r') as java_resource_jar: + for apk_path in java_resource_jar.namelist(): + apk_path_lower = apk_path.lower() + + if apk_path_lower.startswith('meta-inf/'): + continue + if apk_path_lower.endswith('/'): + continue + if apk_path_lower.endswith('.class'): + continue + + build_utils.AddToZipHermetic( + out_apk, + apk_root_dir + apk_path, + data=java_resource_jar.read(apk_path)) + + if options.format == 'apk': + finalize_apk.FinalizeApk(options.apksigner_path, options.zipalign_path, + f.name, f.name, options.key_path, + options.key_passwd, options.key_name) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + options.output_apk, + inputs=depfile_deps, + add_pydeps=False) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/deps/v8/build/android/gyp/apkbuilder.pydeps b/deps/v8/build/android/gyp/apkbuilder.pydeps new file mode 100644 index 0000000000..3ae03319c9 --- /dev/null +++ b/deps/v8/build/android/gyp/apkbuilder.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/apkbuilder.pydeps build/android/gyp/apkbuilder.py +../../gn_helpers.py +apkbuilder.py +finalize_apk.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/assert_static_initializers.py b/deps/v8/build/android/gyp/assert_static_initializers.py new file mode 100755 index 0000000000..019baface1 --- /dev/null +++ b/deps/v8/build/android/gyp/assert_static_initializers.py @@ -0,0 +1,160 @@ +#!/usr/bin/env python +# Copyright 2017 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Checks the number of static initializers in an APK's library.""" + +import argparse +import os +import re +import subprocess +import sys +import tempfile +import zipfile + +from util import build_utils + +_DUMP_STATIC_INITIALIZERS_PATH = os.path.join(build_utils.DIR_SOURCE_ROOT, + 'tools', 'linux', + 'dump-static-initializers.py') + + +def _RunReadelf(so_path, options, tool_prefix=''): + return subprocess.check_output([tool_prefix + 'readelf'] + options + + [so_path]) + + +def _ParseLibBuildId(so_path, tool_prefix): + """Returns the Build ID of the given native library.""" + stdout = _RunReadelf(so_path, ['-n'], tool_prefix) + match = re.search(r'Build ID: (\w+)', stdout) + return match.group(1) if match else None + + +def _VerifyLibBuildIdsMatch(tool_prefix, *so_files): + if len(set(_ParseLibBuildId(f, tool_prefix) for f in so_files)) > 1: + raise Exception('Found differing build ids in output directory and apk. ' + 'Your output directory is likely stale.') + + +def _GetStaticInitializers(so_path, tool_prefix): + output = subprocess.check_output( + [_DUMP_STATIC_INITIALIZERS_PATH, '-d', so_path, '-t', tool_prefix]) + summary = re.search(r'Found \d+ static initializers in (\d+) files.', output) + return output.splitlines()[:-1], int(summary.group(1)) + + +def _PrintDumpSIsCount(apk_so_name, unzipped_so, out_dir, tool_prefix): + lib_name = os.path.basename(apk_so_name).replace('crazy.', '') + so_with_symbols_path = os.path.join(out_dir, 'lib.unstripped', lib_name) + if not os.path.exists(so_with_symbols_path): + raise Exception('Unstripped .so not found. Looked here: %s', + so_with_symbols_path) + _VerifyLibBuildIdsMatch(tool_prefix, unzipped_so, so_with_symbols_path) + sis, _ = _GetStaticInitializers(so_with_symbols_path, tool_prefix) + for si in sis: + print si + + +# Mostly copied from //infra/scripts/legacy/scripts/slave/chromium/sizes.py. +def _ReadInitArray(so_path, tool_prefix): + stdout = _RunReadelf(so_path, ['-SW'], tool_prefix) + # Matches: .ctors PROGBITS 000000000516add0 5169dd0 000010 00 WA 0 0 8 + match = re.search(r'\.init_array.*$', stdout, re.MULTILINE) + if not match: + raise Exception('Did not find section: .init_array in:\n' + stdout) + size_str = re.split(r'\W+', match.group(0))[5] + return int(size_str, 16) + + +def _CountStaticInitializers(so_path, tool_prefix): + # Find the number of files with at least one static initializer. + # First determine if we're 32 or 64 bit + stdout = _RunReadelf(so_path, ['-h'], tool_prefix) + elf_class_line = re.search('Class:.*$', stdout, re.MULTILINE).group(0) + elf_class = re.split(r'\W+', elf_class_line)[1] + if elf_class == 'ELF32': + word_size = 4 + else: + word_size = 8 + + # Then find the number of files with global static initializers. + # NOTE: this is very implementation-specific and makes assumptions + # about how compiler and linker implement global static initializers. + init_array_size = _ReadInitArray(so_path, tool_prefix) + return init_array_size / word_size + + +def _AnalyzeStaticInitializers(apk_filename, tool_prefix, dump_sis, out_dir, + ignored_libs): + # Static initializer counting mostly copies logic in + # infra/scripts/legacy/scripts/slave/chromium/sizes.py. + with zipfile.ZipFile(apk_filename) as z: + so_files = [ + f for f in z.infolist() if f.filename.endswith('.so') + and f.file_size > 0 and os.path.basename(f.filename) not in ignored_libs + ] + # Skip checking static initializers for secondary abi libs. They will be + # checked by 32-bit bots. This avoids the complexity of finding 32 bit .so + # files in the output directory in 64 bit builds. + has_64 = any('64' in f.filename for f in so_files) + files_to_check = [f for f in so_files if not has_64 or '64' in f.filename] + + si_count = 0 + for f in files_to_check: + with tempfile.NamedTemporaryFile() as temp: + temp.write(z.read(f)) + temp.flush() + si_count += _CountStaticInitializers(temp.name, tool_prefix) + if dump_sis: + # Print count and list of SIs reported by dump-static-initializers.py. + # Doesn't work well on all archs (particularly arm), which is why + # the readelf method is used for tracking SI counts. + _PrintDumpSIsCount(f.filename, temp.name, out_dir, tool_prefix) + return si_count + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--touch', help='File to touch upon success') + parser.add_argument('--tool-prefix', required=True, + help='Prefix for nm and friends') + parser.add_argument('--expected-count', required=True, type=int, + help='Fail if number of static initializers is not ' + 'equal to this value.') + parser.add_argument('apk', help='APK file path.') + args = parser.parse_args() + + #TODO(crbug.com/838414): add support for files included via loadable_modules. + ignored_libs = ['libarcore_sdk_c.so'] + + si_count = _AnalyzeStaticInitializers(args.apk, args.tool_prefix, False, '.', + ignored_libs) + if si_count != args.expected_count: + print 'Expected {} static initializers, but found {}.'.format( + args.expected_count, si_count) + if args.expected_count > si_count: + print 'You have removed one or more static initializers. Thanks!' + print 'To fix the build, update the expectation in:' + print ' //chrome/android/static_initializers.gni' + else: + print 'Dumping static initializers via dump-static-initializers.py:' + sys.stdout.flush() + _AnalyzeStaticInitializers(args.apk, args.tool_prefix, True, '.', + ignored_libs) + print + print 'If the above list is not useful, consider listing them with:' + print ' //tools/binary_size/diagnose_bloat.py' + print + print 'For more information:' + print (' https://chromium.googlesource.com/chromium/src/+/master/docs/' + 'static_initializers.md') + sys.exit(1) + + if args.touch: + open(args.touch, 'w') + + +if __name__ == '__main__': + main() diff --git a/deps/v8/build/android/gyp/assert_static_initializers.pydeps b/deps/v8/build/android/gyp/assert_static_initializers.pydeps new file mode 100644 index 0000000000..e031668f46 --- /dev/null +++ b/deps/v8/build/android/gyp/assert_static_initializers.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/assert_static_initializers.pydeps build/android/gyp/assert_static_initializers.py +../../gn_helpers.py +assert_static_initializers.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/bundletool.py b/deps/v8/build/android/gyp/bundletool.py new file mode 100755 index 0000000000..ac9561e768 --- /dev/null +++ b/deps/v8/build/android/gyp/bundletool.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Simple wrapper around the bundletool tool. + +Bundletool is distributed as a versioned jar file. This script abstracts the +location and version of this jar file, as well as the JVM invokation.""" + +import logging +import os +import subprocess +import sys + +# Assume this is stored under build/android/gyp/ +BUNDLETOOL_DIR = os.path.abspath(os.path.join( + __file__, '..', '..', '..', '..', 'third_party', 'android_build_tools', + 'bundletool')) + +BUNDLETOOL_VERSION = '0.9.0' + +BUNDLETOOL_JAR_PATH = os.path.join( + BUNDLETOOL_DIR, 'bundletool-all-%s.jar' % BUNDLETOOL_VERSION) + +def RunBundleTool(args): + args = ['java', '-jar', BUNDLETOOL_JAR_PATH] + args + logging.debug(' '.join(args)) + subprocess.check_call(args) + +if __name__ == '__main__': + RunBundleTool(sys.argv[1:]) diff --git a/deps/v8/build/android/gyp/bytecode_processor.py b/deps/v8/build/android/gyp/bytecode_processor.py new file mode 100755 index 0000000000..020b52f5f0 --- /dev/null +++ b/deps/v8/build/android/gyp/bytecode_processor.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python +# Copyright 2017 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Wraps bin/helper/java_bytecode_rewriter and expands @FileArgs.""" + +import argparse +import os +import subprocess +import sys + +from util import build_utils + + +def _AddSwitch(parser, val): + parser.add_argument( + val, action='store_const', default='--disabled', const=val) + + +def main(argv): + argv = build_utils.ExpandFileArgs(argv[1:]) + parser = argparse.ArgumentParser() + parser.add_argument('--script', required=True, + help='Path to the java binary wrapper script.') + parser.add_argument('--input-jar', required=True) + parser.add_argument('--output-jar', required=True) + parser.add_argument('--direct-classpath-jars', required=True) + parser.add_argument('--sdk-classpath-jars', required=True) + parser.add_argument('--extra-classpath-jars', dest='extra_jars', + action='append', default=[], + help='Extra inputs, passed last to the binary script.') + parser.add_argument('-v', '--verbose', action='store_true') + _AddSwitch(parser, '--is-prebuilt') + _AddSwitch(parser, '--enable-custom-resources') + _AddSwitch(parser, '--enable-assert') + _AddSwitch(parser, '--enable-thread-annotations') + _AddSwitch(parser, '--enable-check-class-path') + parser.add_argument( + '--split-compat-class-names', + action='append', + default=[], + help='Names of classes that need to be made SplitCompat-enabled.') + args = parser.parse_args(argv) + + sdk_jars = build_utils.ParseGnList(args.sdk_classpath_jars) + assert len(sdk_jars) > 0 + + direct_jars = build_utils.ParseGnList(args.direct_classpath_jars) + assert len(direct_jars) > 0 + + extra_classpath_jars = [] + for a in args.extra_jars: + extra_classpath_jars.extend(build_utils.ParseGnList(a)) + + split_compat_class_names = build_utils.ParseGnList( + args.split_compat_class_names) + + if args.verbose: + verbose = '--verbose' + else: + verbose = '--not-verbose' + + cmd = ([ + args.script, args.input_jar, args.output_jar, verbose, args.is_prebuilt, + args.enable_assert, args.enable_custom_resources, + args.enable_thread_annotations, args.enable_check_class_path, + str(len(sdk_jars)) + ] + sdk_jars + [str(len(direct_jars))] + direct_jars + [ + str(len(split_compat_class_names)) + ] + split_compat_class_names + extra_classpath_jars) + subprocess.check_call(cmd) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/deps/v8/build/android/gyp/bytecode_processor.pydeps b/deps/v8/build/android/gyp/bytecode_processor.pydeps new file mode 100644 index 0000000000..d8ff396495 --- /dev/null +++ b/deps/v8/build/android/gyp/bytecode_processor.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/bytecode_processor.pydeps build/android/gyp/bytecode_processor.py +../../gn_helpers.py +bytecode_processor.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/compile_resources.py b/deps/v8/build/android/gyp/compile_resources.py new file mode 100755 index 0000000000..3f2f5dfe6e --- /dev/null +++ b/deps/v8/build/android/gyp/compile_resources.py @@ -0,0 +1,916 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Compile Android resources into an intermediate APK. + +This can also generate an R.txt, and an .srcjar file containing the proper +final R.java class for all resource packages the APK depends on. + +This will crunch images with aapt2. +""" + +import argparse +import collections +import contextlib +import multiprocessing.pool +import os +import re +import shutil +import subprocess +import sys +import tempfile +import zipfile +from xml.etree import ElementTree + +from util import build_utils +from util import resource_utils + +# Name of environment variable that can be used to force this script to +# put temporary resource files into specific sub-directories, instead of +# temporary ones. +_ENV_DEBUG_VARIABLE = 'ANDROID_DEBUG_TEMP_RESOURCES_DIR' + +# Import jinja2 from third_party/jinja2 +sys.path.insert(1, os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party')) +from jinja2 import Template # pylint: disable=F0401 + +# Pngs that we shouldn't convert to webp. Please add rationale when updating. +_PNG_WEBP_BLACKLIST_PATTERN = re.compile('|'.join([ + # Crashes on Galaxy S5 running L (https://crbug.com/807059). + r'.*star_gray\.png', + # Android requires pngs for 9-patch images. + r'.*\.9\.png', + # Daydream requires pngs for icon files. + r'.*daydream_icon_.*\.png'])) + + +def _ListToDictionary(lst, separator): + """Splits each element of the passed-in |lst| using |separator| and creates + dictionary treating first element of the split as the key and second as the + value.""" + return dict(item.split(separator, 1) for item in lst) + + +def _ParseArgs(args): + """Parses command line options. + + Returns: + An options object as from argparse.ArgumentParser.parse_args() + """ + parser, input_opts, output_opts = resource_utils.ResourceArgsParser() + + input_opts.add_argument( + '--aapt2-path', required=True, help='Path to the Android aapt2 tool.') + input_opts.add_argument('--android-manifest', required=True, + help='AndroidManifest.xml path') + input_opts.add_argument( + '--shared-resources', + action='store_true', + help='Make all resources in R.java non-final and allow the resource IDs ' + 'to be reset to a different package index when the apk is loaded by ' + 'another application at runtime.') + + input_opts.add_argument( + '--app-as-shared-lib', + action='store_true', + help='Same as --shared-resources, but also ensures all resource IDs are ' + 'directly usable from the APK loaded as an application.') + + input_opts.add_argument( + '--package-id', + help='Custom package ID for resources (instead of 0x7f). Cannot be used ' + 'with --shared-resources.') + + input_opts.add_argument( + '--package-name-to-id-mapping', + help='List containing mapping from package name to package IDs that will ' + 'be assigned.') + + input_opts.add_argument( + '--package-name', + help='Package name that will be used to determine package ID.') + + input_opts.add_argument( + '--arsc-package-name', help='Package name to use for resources.arsc file') + + input_opts.add_argument( + '--shared-resources-whitelist', + help='An R.txt file acting as a whitelist for resources that should be ' + 'non-final and have their package ID changed at runtime in R.java. ' + 'Implies and overrides --shared-resources.') + + input_opts.add_argument( + '--shared-resources-whitelist-locales', + default='[]', + help='Optional GN-list of locales. If provided, all strings corresponding' + ' to this locale list will be kept in the final output for the ' + 'resources identified through --shared-resources-whitelist, even ' + 'if --locale-whitelist is being used.') + + input_opts.add_argument( + '--use-resource-ids-path', + help='Use resource IDs generated by aapt --emit-ids') + + input_opts.add_argument('--proto-format', action='store_true', + help='Compile resources to protocol buffer format.') + + input_opts.add_argument('--support-zh-hk', action='store_true', + help='Use zh-rTW resources for zh-rHK.') + + input_opts.add_argument('--debuggable', + action='store_true', + help='Whether to add android:debuggable="true"') + + input_opts.add_argument('--version-code', help='Version code for apk.') + input_opts.add_argument('--version-name', help='Version name for apk.') + + input_opts.add_argument( + '--no-compress', + help='disables compression for the given comma-separated list of ' + 'extensions') + + input_opts.add_argument( + '--locale-whitelist', + default='[]', + help='GN list of languages to include. All other language configs will ' + 'be stripped out. List may include a combination of Android locales ' + 'or Chrome locales.') + + input_opts.add_argument('--resource-blacklist-regex', default='', + help='Do not include matching drawables.') + + input_opts.add_argument( + '--resource-blacklist-exceptions', + default='[]', + help='GN list of globs that say which blacklisted images to include even ' + 'when --resource-blacklist-regex is set.') + + input_opts.add_argument('--png-to-webp', action='store_true', + help='Convert png files to webp format.') + + input_opts.add_argument('--webp-binary', default='', + help='Path to the cwebp binary.') + + input_opts.add_argument('--no-xml-namespaces', + action='store_true', + help='Whether to strip xml namespaces from processed ' + 'xml resources') + input_opts.add_argument( + '--resources-config-path', help='Path to aapt2 resources config file.') + input_opts.add_argument( + '--optimized-resources-path', + help='Output for `aapt2 optimize` (also enables the step).') + + output_opts.add_argument('--apk-path', required=True, + help='Path to output (partial) apk.') + + output_opts.add_argument('--apk-info-path', required=True, + help='Path to output info file for the partial apk.') + + output_opts.add_argument('--srcjar-out', + help='Path to srcjar to contain generated R.java.') + + output_opts.add_argument('--r-text-out', + help='Path to store the generated R.txt file.') + + output_opts.add_argument('--proguard-file', + help='Path to proguard.txt generated file') + + output_opts.add_argument( + '--proguard-file-main-dex', + help='Path to proguard.txt generated file for main dex') + + output_opts.add_argument( + '--emit-ids-out', + help= + 'Path to file produced by aapt2 --emit-ids (for use with --stable-ids)') + + options = parser.parse_args(args) + + resource_utils.HandleCommonOptions(options) + + options.locale_whitelist = build_utils.ParseGnList(options.locale_whitelist) + options.shared_resources_whitelist_locales = build_utils.ParseGnList( + options.shared_resources_whitelist_locales) + options.resource_blacklist_exceptions = build_utils.ParseGnList( + options.resource_blacklist_exceptions) + + if options.shared_resources and options.app_as_shared_lib: + raise Exception('Only one of --app-as-shared-lib or --shared-resources ' + 'can be used.') + + if options.package_name_to_id_mapping: + package_names_list = build_utils.ParseGnList( + options.package_name_to_id_mapping) + options.package_name_to_id_mapping = _ListToDictionary( + package_names_list, '=') + + return options + + +def _SortZip(original_path, sorted_path): + """Generate new zip archive by sorting all files in the original by name.""" + with zipfile.ZipFile(sorted_path, 'w') as sorted_zip, \ + zipfile.ZipFile(original_path, 'r') as original_zip: + for info in sorted(original_zip.infolist(), key=lambda i: i.filename): + sorted_zip.writestr(info, original_zip.read(info)) + + +def _IterFiles(root_dir): + for root, _, files in os.walk(root_dir): + for f in files: + yield os.path.join(root, f) + + +def _DuplicateZhResources(resource_dirs): + """Duplicate Taiwanese resources into Hong-Kong specific directory.""" + renamed_paths = dict() + for resource_dir in resource_dirs: + # We use zh-TW resources for zh-HK (if we have zh-TW resources). + for path in _IterFiles(resource_dir): + if 'zh-rTW' in path: + hk_path = path.replace('zh-rTW', 'zh-rHK') + build_utils.MakeDirectory(os.path.dirname(hk_path)) + shutil.copyfile(path, hk_path) + renamed_paths[os.path.relpath(hk_path, resource_dir)] = os.path.relpath( + path, resource_dir) + return renamed_paths + + +def _RenameLocaleResourceDirs(resource_dirs): + """Rename locale resource directories into standard names when necessary. + + This is necessary to deal with the fact that older Android releases only + support ISO 639-1 two-letter codes, and sometimes even obsolete versions + of them. + + In practice it means: + * 3-letter ISO 639-2 qualifiers are renamed under a corresponding + 2-letter one. E.g. for Filipino, strings under values-fil/ will be moved + to a new corresponding values-tl/ sub-directory. + + * Modern ISO 639-1 codes will be renamed to their obsolete variant + for Indonesian, Hebrew and Yiddish (e.g. 'values-in/ -> values-id/). + + * Norwegian macrolanguage strings will be renamed to Bokmål (main + Norway language). See http://crbug.com/920960. In practice this + means that 'values-no/ -> values-nb/' unless 'values-nb/' already + exists. + + * BCP 47 langauge tags will be renamed to an equivalent ISO 639-1 + locale qualifier if possible (e.g. 'values-b+en+US/ -> values-en-rUS'). + Though this is not necessary at the moment, because no third-party + package that Chromium links against uses these for the current list of + supported locales, this may change when the list is extended in the + future). + + Args: + resource_dirs: list of top-level resource directories. + Returns: + A dictionary mapping renamed paths to their original location + (e.g. '.../values-tl/strings.xml' -> ' .../values-fil/strings.xml'). + """ + renamed_paths = dict() + for resource_dir in resource_dirs: + for path in _IterFiles(resource_dir): + locale = resource_utils.FindLocaleInStringResourceFilePath(path) + if not locale: + continue + cr_locale = resource_utils.ToChromiumLocaleName(locale) + if not cr_locale: + continue # Unsupported Android locale qualifier!? + locale2 = resource_utils.ToAndroidLocaleName(cr_locale) + if locale != locale2: + path2 = path.replace('/values-%s/' % locale, '/values-%s/' % locale2) + if path == path2: + raise Exception('Could not substitute locale %s for %s in %s' % + (locale, locale2, path)) + if os.path.exists(path2): + # This happens sometimes, e.g. some libraries provide both + # values-nb/ and values-no/ with the same content. + continue + build_utils.MakeDirectory(os.path.dirname(path2)) + shutil.move(path, path2) + renamed_paths[os.path.relpath(path2, resource_dir)] = os.path.relpath( + path, resource_dir) + return renamed_paths + + +def _ToAndroidLocales(locale_whitelist, support_zh_hk): + """Converts the list of Chrome locales to Android config locale qualifiers. + + Args: + locale_whitelist: A list of Chromium locale names. + support_zh_hk: True if we need to support zh-HK by duplicating + the zh-TW strings. + Returns: + A set of matching Android config locale qualifier names. + """ + ret = set() + for locale in locale_whitelist: + locale = resource_utils.ToAndroidLocaleName(locale) + if locale is None or ('-' in locale and '-r' not in locale): + raise Exception('Unsupported Chromium locale name: %s' % locale) + ret.add(locale) + # Always keep non-regional fall-backs. + language = locale.split('-')[0] + ret.add(language) + + # We don't actually support zh-HK in Chrome on Android, but we mimic the + # native side behavior where we use zh-TW resources when the locale is set to + # zh-HK. See https://crbug.com/780847. + if support_zh_hk: + assert not any('HK' in l for l in locale_whitelist), ( + 'Remove special logic if zh-HK is now supported (crbug.com/780847).') + ret.add('zh-rHK') + return set(ret) + + +def _MoveImagesToNonMdpiFolders(res_root): + """Move images from drawable-*-mdpi-* folders to drawable-* folders. + + Why? http://crbug.com/289843 + """ + renamed_paths = dict() + for src_dir_name in os.listdir(res_root): + src_components = src_dir_name.split('-') + if src_components[0] != 'drawable' or 'mdpi' not in src_components: + continue + src_dir = os.path.join(res_root, src_dir_name) + if not os.path.isdir(src_dir): + continue + dst_components = [c for c in src_components if c != 'mdpi'] + assert dst_components != src_components + dst_dir_name = '-'.join(dst_components) + dst_dir = os.path.join(res_root, dst_dir_name) + build_utils.MakeDirectory(dst_dir) + for src_file_name in os.listdir(src_dir): + if not os.path.splitext(src_file_name)[1] in ('.png', '.webp'): + continue + src_file = os.path.join(src_dir, src_file_name) + dst_file = os.path.join(dst_dir, src_file_name) + assert not os.path.lexists(dst_file) + shutil.move(src_file, dst_file) + renamed_paths[os.path.relpath(dst_file, res_root)] = os.path.relpath( + src_file, res_root) + return renamed_paths + + +def _PackageIdFromOptions(options): + package_id = None + if options.package_id: + package_id = options.package_id + if options.package_name: + package_id = options.package_name_to_id_mapping.get(options.package_name) + if package_id is None: + raise Exception( + 'Package name %s is not present in package_name_to_id_mapping.' % + options.package_name) + return package_id + + +def _CreateLinkApkArgs(options): + """Create command-line arguments list to invoke 'aapt2 link'. + + Args: + options: The command-line options tuple. + Returns: + A list of strings corresponding to the command-line invokation for + the command, matching the arguments from |options|. + """ + link_command = [ + options.aapt2_path, + 'link', + '--version-code', options.version_code, + '--version-name', options.version_name, + '--auto-add-overlay', + '--no-version-vectors', + ] + + for j in options.include_resources: + link_command += ['-I', j] + if options.proguard_file: + link_command += ['--proguard', options.proguard_file] + if options.proguard_file_main_dex: + link_command += ['--proguard-main-dex', options.proguard_file_main_dex] + if options.emit_ids_out: + link_command += ['--emit-ids', options.emit_ids_out] + + if options.no_compress: + for ext in options.no_compress.split(','): + link_command += ['-0', ext] + + # Note: only one of --proto-format, --shared-lib or --app-as-shared-lib + # can be used with recent versions of aapt2. + if options.proto_format: + link_command.append('--proto-format') + elif options.shared_resources: + link_command.append('--shared-lib') + + if options.no_xml_namespaces: + link_command.append('--no-xml-namespaces') + + package_id = _PackageIdFromOptions(options) + if package_id is not None: + link_command += ['--package-id', package_id, '--allow-reserved-package-id'] + + return link_command + + +def _FixManifest(options, temp_dir): + """Fix the APK's AndroidManifest.xml. + + This adds any missing namespaces for 'android' and 'tools', and + sets certains elements like 'platformBuildVersionCode' or + 'android:debuggable' depending on the content of |options|. + + Args: + options: The command-line arguments tuple. + temp_dir: A temporary directory where the fixed manifest will be written to. + Returns: + Tuple of: + * Manifest path within |temp_dir|. + * Original package_name (if different from arsc_package_name). + """ + def maybe_extract_version(j): + try: + return resource_utils.ExtractBinaryManifestValues(options.aapt2_path, j) + except build_utils.CalledProcessError: + return None + + android_sdk_jars = [j for j in options.include_resources + if os.path.basename(j) in ('android.jar', + 'android_system.jar')] + extract_all = [maybe_extract_version(j) for j in android_sdk_jars] + successful_extractions = [x for x in extract_all if x] + if len(successful_extractions) == 0: + raise Exception( + 'Unable to find android SDK jar among candidates: %s' + % ', '.join(android_sdk_jars)) + elif len(successful_extractions) > 1: + raise Exception( + 'Found multiple android SDK jars among candidates: %s' + % ', '.join(android_sdk_jars)) + version_code, version_name = successful_extractions.pop()[:2] + + debug_manifest_path = os.path.join(temp_dir, 'AndroidManifest.xml') + doc, manifest_node, app_node = resource_utils.ParseAndroidManifest( + options.android_manifest) + + manifest_node.set('platformBuildVersionCode', version_code) + manifest_node.set('platformBuildVersionName', version_name) + + orig_package = manifest_node.get('package') + if options.arsc_package_name: + manifest_node.set('package', options.arsc_package_name) + + if options.debuggable: + app_node.set('{%s}%s' % (resource_utils.ANDROID_NAMESPACE, 'debuggable'), + 'true') + + with open(debug_manifest_path, 'w') as debug_manifest: + debug_manifest.write(ElementTree.tostring(doc.getroot(), encoding='UTF-8')) + + return debug_manifest_path, orig_package + + +def _ResourceNameFromPath(path): + return os.path.splitext(os.path.basename(path))[0] + + +def _CreateKeepPredicate(resource_dirs, resource_blacklist_regex, + resource_blacklist_exceptions): + """Return a predicate lambda to determine which resource files to keep. + + Args: + resource_dirs: list of top-level resource directories. + resource_blacklist_regex: A regular expression describing all resources + to exclude, except if they are mip-maps, or if they are listed + in |resource_blacklist_exceptions|. + resource_blacklist_exceptions: A list of glob patterns corresponding + to exceptions to the |resource_blacklist_regex|. + Returns: + A lambda that takes a path, and returns true if the corresponding file + must be kept. + """ + naive_predicate = lambda path: os.path.basename(path)[0] != '.' + if resource_blacklist_regex == '': + # Do not extract dotfiles (e.g. ".gitkeep"). aapt ignores them anyways. + return naive_predicate + + if resource_blacklist_regex != '': + # A simple predicate that only removes (returns False for) paths covered by + # the blacklist regex, except if they are mipmaps, or listed as exceptions. + naive_predicate = lambda path: ( + not re.search(resource_blacklist_regex, path) or + re.search(r'[/-]mipmap[/-]', path) or + build_utils.MatchesGlob(path, resource_blacklist_exceptions)) + + # Build a set of all names from drawables kept by naive_predicate(). + # Used later to ensure that we never exclude drawables from densities + # that are filtered-out by naive_predicate(). + non_filtered_drawables = set() + for resource_dir in resource_dirs: + for path in _IterFiles(resource_dir): + if re.search(r'[/-]drawable[/-]', path) and naive_predicate(path): + non_filtered_drawables.add(_ResourceNameFromPath(path)) + + # NOTE: Defined as a function, instead of a lambda to avoid the + # auto-formatter to put this on a very long line that overflows. + def drawable_predicate(path): + return (naive_predicate(path) + or _ResourceNameFromPath(path) not in non_filtered_drawables) + + return drawable_predicate + + +def _ConvertToWebP(webp_binary, png_files): + renamed_paths = dict() + pool = multiprocessing.pool.ThreadPool(10) + def convert_image(png_path_tuple): + png_path, original_dir = png_path_tuple + root = os.path.splitext(png_path)[0] + webp_path = root + '.webp' + args = [webp_binary, png_path, '-mt', '-quiet', '-m', '6', '-q', '100', + '-lossless', '-o', webp_path] + subprocess.check_call(args) + os.remove(png_path) + renamed_paths[os.path.relpath(webp_path, original_dir)] = os.path.relpath( + png_path, original_dir) + + pool.map(convert_image, [f for f in png_files + if not _PNG_WEBP_BLACKLIST_PATTERN.match(f[0])]) + pool.close() + pool.join() + return renamed_paths + + +def _CompileDeps(aapt2_path, dep_subdirs, temp_dir): + partials_dir = os.path.join(temp_dir, 'partials') + build_utils.MakeDirectory(partials_dir) + partial_compile_command = [ + aapt2_path, + 'compile', + # TODO(wnwen): Turn this on once aapt2 forces 9-patch to be crunched. + # '--no-crunch', + ] + pool = multiprocessing.pool.ThreadPool(10) + def compile_partial(directory): + dirname = os.path.basename(directory) + partial_path = os.path.join(partials_dir, dirname + '.zip') + compile_command = (partial_compile_command + + ['--dir', directory, '-o', partial_path]) + build_utils.CheckOutput( + compile_command, + stderr_filter=lambda output: + build_utils.FilterLines( + output, r'ignoring configuration .* for styleable')) + + # Sorting the files in the partial ensures deterministic output from the + # aapt2 link step which uses order of files in the partial. + sorted_partial_path = os.path.join(partials_dir, dirname + '.sorted.zip') + _SortZip(partial_path, sorted_partial_path) + + return sorted_partial_path + + partials = pool.map(compile_partial, dep_subdirs) + pool.close() + pool.join() + return partials + + +def _CreateResourceInfoFile( + renamed_paths, apk_info_path, dependencies_res_zips): + lines = set() + for zip_file in dependencies_res_zips: + zip_info_file_path = zip_file + '.info' + if os.path.exists(zip_info_file_path): + with open(zip_info_file_path, 'r') as zip_info_file: + lines.update(zip_info_file.readlines()) + for dest, source in renamed_paths.iteritems(): + lines.add('Rename:{},{}\n'.format(dest, source)) + with build_utils.AtomicOutput(apk_info_path) as info_file: + info_file.writelines(sorted(lines)) + + +def _RemoveUnwantedLocalizedStrings(dep_subdirs, options): + """Remove localized strings that should not go into the final output. + + Args: + dep_subdirs: List of resource dependency directories. + options: Command-line options namespace. + """ + if (not options.locale_whitelist + and not options.shared_resources_whitelist_locales): + # Keep everything, there is nothing to do. + return + + # Collect locale and file paths from the existing subdirs. + # The following variable maps Android locale names to + # sets of corresponding xml file paths. + locale_to_files_map = collections.defaultdict(set) + for directory in dep_subdirs: + for f in _IterFiles(directory): + locale = resource_utils.FindLocaleInStringResourceFilePath(f) + if locale: + locale_to_files_map[locale].add(f) + + all_locales = set(locale_to_files_map) + + # Set A: wanted locales, either all of them or the + # list provided by --locale-whitelist. + wanted_locales = all_locales + if options.locale_whitelist: + wanted_locales = _ToAndroidLocales(options.locale_whitelist, + options.support_zh_hk) + + # Set B: shared resources locales, which is either set A + # or the list provided by --shared-resources-whitelist-locales + shared_resources_locales = wanted_locales + shared_names_whitelist = set() + if options.shared_resources_whitelist_locales: + shared_names_whitelist = set( + resource_utils.GetRTxtStringResourceNames( + options.shared_resources_whitelist)) + + shared_resources_locales = _ToAndroidLocales( + options.shared_resources_whitelist_locales, options.support_zh_hk) + + # Remove any file that belongs to a locale not covered by + # either A or B. + removable_locales = (all_locales - wanted_locales - shared_resources_locales) + for locale in removable_locales: + for path in locale_to_files_map[locale]: + os.remove(path) + + # For any locale in B but not in A, only keep the shared + # resource strings in each file. + for locale in shared_resources_locales - wanted_locales: + for path in locale_to_files_map[locale]: + resource_utils.FilterAndroidResourceStringsXml( + path, lambda x: x in shared_names_whitelist) + + # For any locale in A but not in B, only keep the strings + # that are _not_ from shared resources in the file. + for locale in wanted_locales - shared_resources_locales: + for path in locale_to_files_map[locale]: + resource_utils.FilterAndroidResourceStringsXml( + path, lambda x: x not in shared_names_whitelist) + + +def _PackageApk(options, dep_subdirs, temp_dir, gen_dir, r_txt_path): + """Compile resources with aapt2 and generate intermediate .ap_ file. + + Args: + options: The command-line options tuple. E.g. the generated apk + will be written to |options.apk_path|. + dep_subdirs: The list of directories where dependency resource zips + were extracted (its content will be altered by this function). + temp_dir: A temporary directory. + gen_dir: Another temp directory where some intermediate files are + generated. + r_txt_path: The path where the R.txt file will written to. + """ + renamed_paths = dict() + renamed_paths.update(_DuplicateZhResources(dep_subdirs)) + renamed_paths.update(_RenameLocaleResourceDirs(dep_subdirs)) + + _RemoveUnwantedLocalizedStrings(dep_subdirs, options) + + # Create a function that selects which resource files should be packaged + # into the final output. Any file that does not pass the predicate will + # be removed below. + keep_predicate = _CreateKeepPredicate(dep_subdirs, + options.resource_blacklist_regex, + options.resource_blacklist_exceptions) + png_paths = [] + for directory in dep_subdirs: + for f in _IterFiles(directory): + if not keep_predicate(f): + os.remove(f) + elif f.endswith('.png'): + png_paths.append((f, directory)) + if png_paths and options.png_to_webp: + renamed_paths.update(_ConvertToWebP(options.webp_binary, png_paths)) + for directory in dep_subdirs: + renamed_paths.update(_MoveImagesToNonMdpiFolders(directory)) + + link_command = _CreateLinkApkArgs(options) + # TODO(digit): Is this below actually required for R.txt generation? + link_command += ['--java', gen_dir] + + fixed_manifest, orig_package = _FixManifest(options, temp_dir) + link_command += [ + '--manifest', fixed_manifest, '--rename-manifest-package', orig_package + ] + + partials = _CompileDeps(options.aapt2_path, dep_subdirs, temp_dir) + for partial in partials: + link_command += ['-R', partial] + + # Creates a .zip with AndroidManifest.xml, resources.arsc, res/* + # Also creates R.txt + with build_utils.AtomicOutput(options.apk_path) as unoptimized, \ + build_utils.AtomicOutput(r_txt_path) as r_txt, \ + _MaybeCreateStableIdsFile(options) as stable_ids: + if stable_ids: + link_command += ['--stable-ids', stable_ids.name] + link_command += ['-o', unoptimized.name] + link_command += ['--output-text-symbols', r_txt.name] + build_utils.CheckOutput( + link_command, print_stdout=False, print_stderr=False) + + if options.optimized_resources_path: + with build_utils.AtomicOutput(options.optimized_resources_path) as opt: + _OptimizeApk(opt.name, options, temp_dir, unoptimized.name, r_txt.name) + + _CreateResourceInfoFile( + renamed_paths, options.apk_info_path, options.dependencies_res_zips) + + +def _OptimizeApk(output, options, temp_dir, unoptimized_apk_path, r_txt_path): + """Optimize intermediate .ap_ file with aapt2. + + Args: + output: Path to write to. + options: The command-line options. + temp_dir: A temporary directory. + unoptimized_apk_path: path of the apk to optimize. + r_txt_path: path to the R.txt file of the unoptimized apk. + """ + # Resources of type ID are references to UI elements/views. They are used by + # UI automation testing frameworks. They are kept in so that they dont break + # tests, even though they may not actually be used during runtime. See + # https://crbug.com/900993 + id_resources = _ExtractIdResources(r_txt_path) + gen_config_path = os.path.join(temp_dir, 'aapt2.config') + if options.resources_config_path: + shutil.copyfile(options.resources_config_path, gen_config_path) + with open(gen_config_path, 'a+') as config: + for resource in id_resources: + config.write('{}#no_obfuscate\n'.format(resource)) + + # Optimize the resources.arsc file by obfuscating resource names and only + # allow usage via R.java constant. + optimize_command = [ + options.aapt2_path, + 'optimize', + '--enable-resource-obfuscation', + '-o', + output, + '--resources-config-path', + gen_config_path, + unoptimized_apk_path, + ] + build_utils.CheckOutput( + optimize_command, print_stdout=False, print_stderr=False) + + +def _ExtractIdResources(rtxt_path): + """Extract resources of type ID from the R.txt file + + Args: + rtxt_path: Path to R.txt file with all the resources + Returns: + List of id resources in the form of id/<resource_name> + """ + id_resources = [] + with open(rtxt_path) as rtxt: + for line in rtxt: + if ' id ' in line: + resource_name = line.split()[2] + id_resources.append('id/{}'.format(resource_name)) + return id_resources + + +@contextlib.contextmanager +def _MaybeCreateStableIdsFile(options): + """Transforms a file generated by --emit-ids from another package. + + --stable-ids is generally meant to be used by different versions of the same + package. To make it work for other packages, we need to transform the package + name references to match the package that resources are being generated for. + + Note: This will fail if the package ID of the resources in + |options.use_resource_ids_path| does not match the package ID of the + resources being linked. + + Args: + options: The command-line options + Yields: + Path to the transformed resource IDs file (lines formatted like + package:type/name = 0xPPTTEEEE) or None + """ + if options.use_resource_ids_path: + package_name = options.package_name + if not package_name: + package_name = resource_utils.ExtractPackageFromManifest( + options.android_manifest) + with open(options.use_resource_ids_path) as stable_ids_file: + with tempfile.NamedTemporaryFile() as output_ids_file: + output_stable_ids = re.sub( + r'^.*?:', + package_name + ':', + stable_ids_file.read(), + flags=re.MULTILINE) + output_ids_file.write(output_stable_ids) + output_ids_file.flush() + yield output_ids_file + else: + yield None + + +def _WriteFinalRTxtFile(options, aapt_r_txt_path): + """Determine final R.txt and return its location. + + This handles --r-text-in and --r-text-out options at the same time. + + Args: + options: The command-line options tuple. + aapt_r_txt_path: The path to the R.txt generated by aapt. + Returns: + Path to the final R.txt file. + """ + if options.r_text_in: + r_txt_file = options.r_text_in + else: + # When an empty res/ directory is passed, aapt does not write an R.txt. + r_txt_file = aapt_r_txt_path + if not os.path.exists(r_txt_file): + build_utils.Touch(r_txt_file) + + if options.r_text_out: + shutil.copyfile(r_txt_file, options.r_text_out) + + return r_txt_file + + +def main(args): + args = build_utils.ExpandFileArgs(args) + options = _ParseArgs(args) + + debug_temp_resources_dir = os.environ.get(_ENV_DEBUG_VARIABLE) + if debug_temp_resources_dir: + debug_temp_resources_dir = os.path.join(debug_temp_resources_dir, + os.path.basename(options.apk_path)) + build_utils.DeleteDirectory(debug_temp_resources_dir) + build_utils.MakeDirectory(debug_temp_resources_dir) + + with resource_utils.BuildContext(debug_temp_resources_dir) as build: + dep_subdirs = resource_utils.ExtractDeps(options.dependencies_res_zips, + build.deps_dir) + + _PackageApk(options, dep_subdirs, build.temp_dir, build.gen_dir, + build.r_txt_path) + + r_txt_path = _WriteFinalRTxtFile(options, build.r_txt_path) + + # If --shared-resources-whitelist is used, the all resources listed in + # the corresponding R.txt file will be non-final, and an onResourcesLoaded() + # will be generated to adjust them at runtime. + # + # Otherwise, if --shared-resources is used, the all resources will be + # non-final, and an onResourcesLoaded() method will be generated too. + # + # Otherwise, all resources will be final, and no method will be generated. + # + rjava_build_options = resource_utils.RJavaBuildOptions() + if options.shared_resources_whitelist: + rjava_build_options.ExportSomeResources( + options.shared_resources_whitelist) + rjava_build_options.GenerateOnResourcesLoaded() + elif options.shared_resources or options.app_as_shared_lib: + rjava_build_options.ExportAllResources() + rjava_build_options.GenerateOnResourcesLoaded() + + resource_utils.CreateRJavaFiles( + build.srcjar_dir, None, r_txt_path, options.extra_res_packages, + options.extra_r_text_files, rjava_build_options) + + if options.srcjar_out: + build_utils.ZipDir(options.srcjar_out, build.srcjar_dir) + + # Sanity check that the created resources have the expected package ID. + expected_id = _PackageIdFromOptions(options) + if expected_id is None: + expected_id = '0x00' if options.shared_resources else '0x7f' + expected_id = int(expected_id, 16) + _, package_id = resource_utils.ExtractArscPackage(options.aapt2_path, + options.apk_path) + if package_id != expected_id: + raise Exception( + 'Invalid package ID 0x%x (expected 0x%x)' % (package_id, expected_id)) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + options.apk_path, + inputs=options.dependencies_res_zips + options.extra_r_text_files, + add_pydeps=False) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/deps/v8/build/android/gyp/compile_resources.pydeps b/deps/v8/build/android/gyp/compile_resources.pydeps new file mode 100644 index 0000000000..2ffcb52a2c --- /dev/null +++ b/deps/v8/build/android/gyp/compile_resources.pydeps @@ -0,0 +1,29 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/compile_resources.pydeps build/android/gyp/compile_resources.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_compat.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../gn_helpers.py +compile_resources.py +util/__init__.py +util/build_utils.py +util/md5_check.py +util/resource_utils.py diff --git a/deps/v8/build/android/gyp/copy_ex.py b/deps/v8/build/android/gyp/copy_ex.py new file mode 100755 index 0000000000..48d1b26df1 --- /dev/null +++ b/deps/v8/build/android/gyp/copy_ex.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Copies files to a directory.""" + +import filecmp +import itertools +import optparse +import os +import shutil +import sys + +from util import build_utils + + +def _get_all_files(base): + """Returns a list of all the files in |base|. Each entry is relative to the + last path entry of |base|.""" + result = [] + dirname = os.path.dirname(base) + for root, _, files in os.walk(base): + result.extend([os.path.join(root[len(dirname):], f) for f in files]) + return result + +def CopyFile(f, dest, deps): + """Copy file or directory and update deps.""" + if os.path.isdir(f): + shutil.copytree(f, os.path.join(dest, os.path.basename(f))) + deps.extend(_get_all_files(f)) + else: + if os.path.isfile(os.path.join(dest, os.path.basename(f))): + dest = os.path.join(dest, os.path.basename(f)) + + deps.append(f) + + if os.path.isfile(dest): + if filecmp.cmp(dest, f, shallow=False): + return + # The shutil.copy() below would fail if the file does not have write + # permissions. Deleting the file has similar costs to modifying the + # permissions. + os.unlink(dest) + + shutil.copy(f, dest) + +def DoCopy(options, deps): + """Copy files or directories given in options.files and update deps.""" + files = list(itertools.chain.from_iterable(build_utils.ParseGnList(f) + for f in options.files)) + + for f in files: + if os.path.isdir(f) and not options.clear: + print ('To avoid stale files you must use --clear when copying ' + 'directories') + sys.exit(-1) + CopyFile(f, options.dest, deps) + +def DoRenaming(options, deps): + """Copy and rename files given in options.renaming_sources and update deps.""" + src_files = list(itertools.chain.from_iterable( + build_utils.ParseGnList(f) + for f in options.renaming_sources)) + + dest_files = list(itertools.chain.from_iterable( + build_utils.ParseGnList(f) + for f in options.renaming_destinations)) + + if (len(src_files) != len(dest_files)): + print('Renaming source and destination files not match.') + sys.exit(-1) + + for src, dest in itertools.izip(src_files, dest_files): + if os.path.isdir(src): + print ('renaming diretory is not supported.') + sys.exit(-1) + else: + CopyFile(src, os.path.join(options.dest, dest), deps) + +def main(args): + args = build_utils.ExpandFileArgs(args) + + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--dest', help='Directory to copy files to.') + parser.add_option('--files', action='append', + help='List of files to copy.') + parser.add_option('--clear', action='store_true', + help='If set, the destination directory will be deleted ' + 'before copying files to it. This is highly recommended to ' + 'ensure that no stale files are left in the directory.') + parser.add_option('--stamp', help='Path to touch on success.') + parser.add_option('--renaming-sources', + action='append', + help='List of files need to be renamed while being ' + 'copied to dest directory') + parser.add_option('--renaming-destinations', + action='append', + help='List of destination file name without path, the ' + 'number of elements must match rename-sources.') + + options, _ = parser.parse_args(args) + + if options.clear: + build_utils.DeleteDirectory(options.dest) + build_utils.MakeDirectory(options.dest) + + deps = [] + + if options.files: + DoCopy(options, deps) + + if options.renaming_sources: + DoRenaming(options, deps) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, options.stamp, deps, add_pydeps=False) + + if options.stamp: + build_utils.Touch(options.stamp) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/deps/v8/build/android/gyp/copy_ex.pydeps b/deps/v8/build/android/gyp/copy_ex.pydeps new file mode 100644 index 0000000000..e0fb31eaa9 --- /dev/null +++ b/deps/v8/build/android/gyp/copy_ex.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/copy_ex.pydeps build/android/gyp/copy_ex.py +../../gn_helpers.py +copy_ex.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/create_apk_operations_script.py b/deps/v8/build/android/gyp/create_apk_operations_script.py new file mode 100755 index 0000000000..cd2722f9c0 --- /dev/null +++ b/deps/v8/build/android/gyp/create_apk_operations_script.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python +# Copyright 2017 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import argparse +import os +import string +import sys + + +SCRIPT_TEMPLATE = string.Template("""\ +#!/usr/bin/env python +# +# This file was generated by build/android/gyp/create_apk_operations_script.py + +import os +import sys + +def main(): + script_directory = os.path.dirname(__file__) + resolve = lambda p: p if p is None else os.path.abspath(os.path.join( + script_directory, p)) + sys.path.append(resolve(${APK_OPERATIONS_DIR})) + import apk_operations + output_dir = resolve(${OUTPUT_DIR}) + try: + apk_operations.Run( + output_dir, + resolve(${APK_PATH}), + resolve(${INC_JSON_PATH}), + ${FLAGS_FILE}, + ${TARGET_CPU}, + resolve(${MAPPING_PATH})) + except TypeError: + rel_output_dir = os.path.relpath(output_dir) + rel_script_path = os.path.relpath(sys.argv[0], output_dir) + sys.stderr.write('Script out-of-date. Rebuild via:\\n') + sys.stderr.write(' ninja -C %s %s\\n' % (rel_output_dir, rel_script_path)) + return 1 + + +if __name__ == '__main__': + sys.exit(main()) +""") + + +def main(args): + parser = argparse.ArgumentParser() + parser.add_argument('--script-output-path', + help='Output path for executable script.') + parser.add_argument('--apk-path') + parser.add_argument('--incremental-install-json-path') + parser.add_argument('--command-line-flags-file') + parser.add_argument('--target-cpu') + parser.add_argument('--proguard-mapping-path') + args = parser.parse_args(args) + + def relativize(path): + """Returns the path relative to the output script directory.""" + if path is None: + return path + return os.path.relpath(path, os.path.dirname(args.script_output_path)) + apk_operations_dir = os.path.join(os.path.dirname(__file__), os.path.pardir) + apk_operations_dir = relativize(apk_operations_dir) + + with open(args.script_output_path, 'w') as script: + script_dict = { + 'APK_OPERATIONS_DIR': repr(apk_operations_dir), + 'OUTPUT_DIR': repr(relativize('.')), + 'APK_PATH': repr(relativize(args.apk_path)), + 'INC_JSON_PATH': repr(relativize(args.incremental_install_json_path)), + 'MAPPING_PATH': repr(relativize(args.proguard_mapping_path)), + 'FLAGS_FILE': repr(args.command_line_flags_file), + 'TARGET_CPU': repr(args.target_cpu), + } + script.write(SCRIPT_TEMPLATE.substitute(script_dict)) + os.chmod(args.script_output_path, 0750) + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/deps/v8/build/android/gyp/create_apk_operations_script.pydeps b/deps/v8/build/android/gyp/create_apk_operations_script.pydeps new file mode 100644 index 0000000000..9d4dcb8fe5 --- /dev/null +++ b/deps/v8/build/android/gyp/create_apk_operations_script.pydeps @@ -0,0 +1,3 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_apk_operations_script.pydeps build/android/gyp/create_apk_operations_script.py +create_apk_operations_script.py diff --git a/deps/v8/build/android/gyp/create_app_bundle.py b/deps/v8/build/android/gyp/create_app_bundle.py new file mode 100755 index 0000000000..9666feb3fe --- /dev/null +++ b/deps/v8/build/android/gyp/create_app_bundle.py @@ -0,0 +1,377 @@ +#!/usr/bin/env python +# +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Create an Android application bundle from one or more bundle modules.""" + +import argparse +import itertools +import json +import os +import shutil +import sys +import tempfile +import zipfile + +# NOTE: Keep this consistent with the _create_app_bundle_py_imports definition +# in build/config/android/rules.py +from util import build_utils +from util import resource_utils + +import bundletool + +# Location of language-based assets in bundle modules. +_LOCALES_SUBDIR = 'assets/locales/' + +# The fallback locale should always have its .pak file included in +# the base apk, i.e. not use language-based asset targetting. This ensures +# that Chrome won't crash on startup if its bundle is installed on a device +# with an unsupported system locale (e.g. fur-rIT). +_FALLBACK_LOCALE = 'en-US' + +# List of split dimensions recognized by this tool. +_ALL_SPLIT_DIMENSIONS = [ 'ABI', 'SCREEN_DENSITY', 'LANGUAGE' ] + +# Due to historical reasons, certain languages identified by Chromium with a +# 3-letters ISO 639-2 code, are mapped to a nearly equivalent 2-letters +# ISO 639-1 code instead (due to the fact that older Android releases only +# supported the latter when matching resources). +# +# the same conversion as for Java resources. +_SHORTEN_LANGUAGE_CODE_MAP = { + 'fil': 'tl', # Filipino to Tagalog. +} + +# A list of extensions corresponding to files that should never be compressed +# in the bundle. This used to be handled by bundletool automatically until +# release 0.8.0, which required that this be passed to the BundleConfig +# file instead. +# +# This is the original list, which was taken from aapt2, with 'webp' added to +# it (which curiously was missing from the list). +_UNCOMPRESSED_FILE_EXTS = [ + '3g2', '3gp', '3gpp', '3gpp2', 'aac', 'amr', 'awb', 'git', 'imy', 'jet', + 'jpeg', 'jpg', 'm4a', 'm4v', 'mid', 'midi', 'mkv', 'mp2', 'mp3', 'mp4', + 'mpeg', 'mpg', 'ogg', 'png', 'rtttl', 'smf', 'wav', 'webm', 'webp', 'wmv', + 'xmf' +] + + +def _ParseArgs(args): + parser = argparse.ArgumentParser() + parser.add_argument('--out-bundle', required=True, + help='Output bundle zip archive.') + parser.add_argument('--module-zips', required=True, + help='GN-list of module zip archives.') + parser.add_argument( + '--rtxt-in-paths', action='append', help='GN-list of module R.txt files.') + parser.add_argument( + '--rtxt-out-path', help='Path to combined R.txt file for bundle.') + parser.add_argument('--uncompressed-assets', action='append', + help='GN-list of uncompressed assets.') + parser.add_argument( + '--compress-shared-libraries', + action='store_true', + help='Whether to store native libraries compressed.') + parser.add_argument('--split-dimensions', + help="GN-list of split dimensions to support.") + parser.add_argument( + '--base-module-rtxt-path', + help='Optional path to the base module\'s R.txt file, only used with ' + 'language split dimension.') + parser.add_argument( + '--base-whitelist-rtxt-path', + help='Optional path to an R.txt file, string resources ' + 'listed there _and_ in --base-module-rtxt-path will ' + 'be kept in the base bundle module, even if language' + ' splitting is enabled.') + + parser.add_argument('--keystore-path', help='Keystore path') + parser.add_argument('--keystore-password', help='Keystore password') + parser.add_argument('--key-name', help='Keystore key name') + + options = parser.parse_args(args) + options.module_zips = build_utils.ParseGnList(options.module_zips) + options.rtxt_in_paths = build_utils.ExpandFileArgs(options.rtxt_in_paths) + + if len(options.module_zips) == 0: + raise Exception('The module zip list cannot be empty.') + + # Signing is optional, but all --keyXX parameters should be set. + if options.keystore_path or options.keystore_password or options.key_name: + if not options.keystore_path or not options.keystore_password or \ + not options.key_name: + raise Exception('When signing the bundle, use --keystore-path, ' + '--keystore-password and --key-name.') + + # Merge all uncompressed assets into a set. + uncompressed_list = [] + if options.uncompressed_assets: + for l in options.uncompressed_assets: + for entry in build_utils.ParseGnList(l): + # Each entry has the following format: 'zipPath' or 'srcPath:zipPath' + pos = entry.find(':') + if pos >= 0: + uncompressed_list.append(entry[pos + 1:]) + else: + uncompressed_list.append(entry) + + options.uncompressed_assets = set(uncompressed_list) + + # Check that all split dimensions are valid + if options.split_dimensions: + options.split_dimensions = build_utils.ParseGnList(options.split_dimensions) + for dim in options.split_dimensions: + if dim.upper() not in _ALL_SPLIT_DIMENSIONS: + parser.error('Invalid split dimension "%s" (expected one of: %s)' % ( + dim, ', '.join(x.lower() for x in _ALL_SPLIT_DIMENSIONS))) + + # As a special case, --base-whitelist-rtxt-path can be empty to indicate + # that the module doesn't need such a whitelist. That's because it is easier + # to check this condition here than through GN rules :-( + if options.base_whitelist_rtxt_path == '': + options.base_module_rtxt_path = None + + # Check --base-module-rtxt-path and --base-whitelist-rtxt-path usage. + if options.base_module_rtxt_path: + if not options.base_whitelist_rtxt_path: + parser.error( + '--base-module-rtxt-path requires --base-whitelist-rtxt-path') + if 'language' not in options.split_dimensions: + parser.error('--base-module-rtxt-path is only valid with ' + 'language-based splits.') + + return options + + +def _MakeSplitDimension(value, enabled): + """Return dict modelling a BundleConfig splitDimension entry.""" + return {'value': value, 'negate': not enabled} + + +def _GenerateBundleConfigJson(uncompressed_assets, compress_shared_libraries, + split_dimensions, base_master_resource_ids): + """Generate a dictionary that can be written to a JSON BuildConfig. + + Args: + uncompressed_assets: A list or set of file paths under assets/ that always + be stored uncompressed. + compress_shared_libraries: Boolean, whether to compress native libs. + split_dimensions: list of split dimensions. + base_master_resource_ids: Optional list of 32-bit resource IDs to keep + inside the base module, even when split dimensions are enabled. + Returns: + A dictionary that can be written as a json file. + """ + # Compute splitsConfig list. Each item is a dictionary that can have + # the following keys: + # 'value': One of ['LANGUAGE', 'DENSITY', 'ABI'] + # 'negate': Boolean, True to indicate that the bundle should *not* be + # split (unused at the moment by this script). + + split_dimensions = [ _MakeSplitDimension(dim, dim in split_dimensions) + for dim in _ALL_SPLIT_DIMENSIONS ] + + # Native libraries loaded by the crazy linker. + # Whether other .so files are compressed is controlled by + # "uncompressNativeLibraries". + uncompressed_globs = ['lib/*/crazy.*'] + # Locale-specific pak files stored in bundle splits need not be compressed. + uncompressed_globs.extend( + ['assets/locales#lang_*/*.pak', 'assets/fallback-locales/*.pak']) + uncompressed_globs.extend('assets/' + x for x in uncompressed_assets) + # NOTE: Use '**' instead of '*' to work through directories! + uncompressed_globs.extend('**.' + ext for ext in _UNCOMPRESSED_FILE_EXTS) + + data = { + 'optimizations': { + 'splitsConfig': { + 'splitDimension': split_dimensions, + }, + 'uncompressNativeLibraries': { + 'enabled': not compress_shared_libraries, + }, + }, + 'compression': { + 'uncompressedGlob': sorted(uncompressed_globs), + }, + } + + if base_master_resource_ids: + data['master_resources'] = { + 'resource_ids': list(base_master_resource_ids), + } + + return json.dumps(data, indent=2) + + +def _RewriteLanguageAssetPath(src_path): + """Rewrite the destination path of a locale asset for language-based splits. + + Should only be used when generating bundles with language-based splits. + This will rewrite paths that look like locales/<locale>.pak into + locales#<language>/<locale>.pak, where <language> is the language code + from the locale. + + Returns new path. + """ + if not src_path.startswith(_LOCALES_SUBDIR) or not src_path.endswith('.pak'): + return [src_path] + + locale = src_path[len(_LOCALES_SUBDIR):-4] + android_locale = resource_utils.ToAndroidLocaleName(locale) + + # The locale format is <lang>-<region> or <lang>. Extract the language. + pos = android_locale.find('-') + if pos >= 0: + android_language = android_locale[:pos] + else: + android_language = android_locale + + if locale == _FALLBACK_LOCALE: + # Fallback locale .pak files must be placed in a different directory + # to ensure they are always stored in the base module. + result_path = 'assets/fallback-locales/%s.pak' % locale + else: + # Other language .pak files go into a language-specific asset directory + # that bundletool will store in separate split APKs. + result_path = 'assets/locales#lang_%s/%s.pak' % (android_language, locale) + + return result_path + + +def _SplitModuleForAssetTargeting(src_module_zip, tmp_dir, split_dimensions): + """Splits assets in a module if needed. + + Args: + src_module_zip: input zip module path. + tmp_dir: Path to temporary directory, where the new output module might + be written to. + split_dimensions: list of split dimensions. + + Returns: + If the module doesn't need asset targeting, doesn't do anything and + returns src_module_zip. Otherwise, create a new module zip archive under + tmp_dir with the same file name, but which contains assets paths targeting + the proper dimensions. + """ + split_language = 'LANGUAGE' in split_dimensions + if not split_language: + # Nothing to target, so return original module path. + return src_module_zip + + with zipfile.ZipFile(src_module_zip, 'r') as src_zip: + language_files = [ + f for f in src_zip.namelist() if f.startswith(_LOCALES_SUBDIR)] + + if not language_files: + # Not language-based assets to split in this module. + return src_module_zip + + tmp_zip = os.path.join(tmp_dir, os.path.basename(src_module_zip)) + with zipfile.ZipFile(tmp_zip, 'w') as dst_zip: + for info in src_zip.infolist(): + src_path = info.filename + is_compressed = info.compress_type != zipfile.ZIP_STORED + + dst_path = src_path + if src_path in language_files: + dst_path = _RewriteLanguageAssetPath(src_path) + + build_utils.AddToZipHermetic( + dst_zip, + dst_path, + data=src_zip.read(src_path), + compress=is_compressed) + + return tmp_zip + + +def _GenerateBaseResourcesWhitelist(base_module_rtxt_path, + base_whitelist_rtxt_path): + """Generate a whitelist of base master resource ids. + + Args: + base_module_rtxt_path: Path to base module R.txt file. + base_whitelist_rtxt_path: Path to base whitelist R.txt file. + Returns: + list of resource ids. + """ + ids_map = resource_utils.GenerateStringResourcesWhitelist( + base_module_rtxt_path, base_whitelist_rtxt_path) + return ids_map.keys() + + +def main(args): + args = build_utils.ExpandFileArgs(args) + options = _ParseArgs(args) + + split_dimensions = [] + if options.split_dimensions: + split_dimensions = [x.upper() for x in options.split_dimensions] + + + with build_utils.TempDir() as tmp_dir: + module_zips = [ + _SplitModuleForAssetTargeting(module, tmp_dir, split_dimensions) \ + for module in options.module_zips] + + base_master_resource_ids = None + if options.base_module_rtxt_path: + base_master_resource_ids = _GenerateBaseResourcesWhitelist( + options.base_module_rtxt_path, options.base_whitelist_rtxt_path) + + bundle_config = _GenerateBundleConfigJson( + options.uncompressed_assets, options.compress_shared_libraries, + split_dimensions, base_master_resource_ids) + + tmp_bundle = os.path.join(tmp_dir, 'tmp_bundle') + + tmp_unsigned_bundle = tmp_bundle + if options.keystore_path: + tmp_unsigned_bundle = tmp_bundle + '.unsigned' + + # Important: bundletool requires that the bundle config file is + # named with a .pb.json extension. + tmp_bundle_config = tmp_bundle + '.BundleConfig.pb.json' + + with open(tmp_bundle_config, 'w') as f: + f.write(bundle_config) + + cmd_args = ['java', '-jar', bundletool.BUNDLETOOL_JAR_PATH, 'build-bundle'] + cmd_args += ['--modules=%s' % ','.join(module_zips)] + cmd_args += ['--output=%s' % tmp_unsigned_bundle] + cmd_args += ['--config=%s' % tmp_bundle_config] + + build_utils.CheckOutput(cmd_args, print_stdout=True, print_stderr=True) + + if options.keystore_path: + # NOTE: As stated by the public documentation, apksigner cannot be used + # to sign the bundle (because it rejects anything that isn't an APK). + # The signature and digest algorithm selection come from the internal + # App Bundle documentation. There is no corresponding public doc :-( + signing_cmd_args = [ + 'jarsigner', '-sigalg', 'SHA256withRSA', '-digestalg', 'SHA-256', + '-keystore', 'file:' + options.keystore_path, + '-storepass' , options.keystore_password, + '-signedjar', tmp_bundle, + tmp_unsigned_bundle, + options.key_name, + ] + build_utils.CheckOutput(signing_cmd_args, print_stderr=True) + + shutil.move(tmp_bundle, options.out_bundle) + + if options.rtxt_out_path: + with open(options.rtxt_out_path, 'w') as rtxt_out: + for rtxt_in_path in options.rtxt_in_paths: + with open(rtxt_in_path, 'r') as rtxt_in: + rtxt_out.write('-- Contents of {}\n'.format( + os.path.basename(rtxt_in_path))) + rtxt_out.write(rtxt_in.read()) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/deps/v8/build/android/gyp/create_app_bundle.pydeps b/deps/v8/build/android/gyp/create_app_bundle.pydeps new file mode 100644 index 0000000000..fef04fab53 --- /dev/null +++ b/deps/v8/build/android/gyp/create_app_bundle.pydeps @@ -0,0 +1,30 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle.pydeps build/android/gyp/create_app_bundle.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_compat.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../gn_helpers.py +bundletool.py +create_app_bundle.py +util/__init__.py +util/build_utils.py +util/md5_check.py +util/resource_utils.py diff --git a/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.py b/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.py new file mode 100755 index 0000000000..f01691e418 --- /dev/null +++ b/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +# +# Copyright 2019 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates an .apks from an .aab with only English strings.""" + +import argparse +import os +import sys + +sys.path.append( + os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))) +from pylib.utils import app_bundle_utils + + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + '--bundle', required=True, help='Path to input .aab file.') + parser.add_argument( + '--output', required=True, help='Path to output .apks file.') + parser.add_argument('--aapt2-path', required=True, help='Path to aapt2.') + parser.add_argument( + '--keystore-path', required=True, help='Path to keystore.') + parser.add_argument( + '--keystore-password', required=True, help='Keystore password.') + parser.add_argument( + '--keystore-name', required=True, help='Key name within keystore') + + args = parser.parse_args() + + app_bundle_utils.GenerateBundleApks( + args.bundle, + args.output, + args.aapt2_path, + args.keystore_path, + args.keystore_password, + args.keystore_name, + minimal=True, + check_for_noop=False) + + +if __name__ == '__main__': + main() diff --git a/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.pydeps b/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.pydeps new file mode 100644 index 0000000000..cd5b08158f --- /dev/null +++ b/deps/v8/build/android/gyp/create_app_bundle_minimal_apks.pydeps @@ -0,0 +1,33 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_app_bundle_minimal_apks.pydeps build/android/gyp/create_app_bundle_minimal_apks.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_compat.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../gn_helpers.py +../pylib/__init__.py +../pylib/utils/__init__.py +../pylib/utils/app_bundle_utils.py +bundletool.py +create_app_bundle_minimal_apks.py +util/__init__.py +util/build_utils.py +util/md5_check.py +util/resource_utils.py diff --git a/deps/v8/build/android/gyp/create_bundle_wrapper_script.py b/deps/v8/build/android/gyp/create_bundle_wrapper_script.py new file mode 100755 index 0000000000..a1a34fe77c --- /dev/null +++ b/deps/v8/build/android/gyp/create_bundle_wrapper_script.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Create a wrapper script to manage an Android App Bundle.""" + +import argparse +import os +import string +import sys + +# Import apk_operations even though this script doesn't use it so that +# targets that depend on the wrapper scripts will rebuild when apk_operations +# or its deps change. +sys.path.insert(1, os.path.join(os.path.dirname(__file__), os.pardir)) +import apk_operations # pylint: disable=unused-import +from util import build_utils + +SCRIPT_TEMPLATE = string.Template("""\ +#!/usr/bin/env python +# +# This file was generated by build/android/gyp/create_bundle_wrapper_script.py + +import os +import sys + +def main(): + script_directory = os.path.dirname(__file__) + resolve = lambda p: p if p is None else os.path.abspath(os.path.join( + script_directory, p)) + sys.path.append(resolve(${WRAPPED_SCRIPT_DIR})) + import apk_operations + + apk_operations.RunForBundle(output_directory=resolve(${OUTPUT_DIR}), + bundle_path=resolve(${BUNDLE_PATH}), + bundle_apks_path=resolve(${BUNDLE_APKS_PATH}), + aapt2_path=resolve(${AAPT2_PATH}), + keystore_path=resolve(${KEYSTORE_PATH}), + keystore_password=${KEYSTORE_PASSWORD}, + keystore_alias=${KEY_NAME}, + package_name=${PACKAGE_NAME}, + command_line_flags_file=${FLAGS_FILE}, + proguard_mapping_path=resolve(${MAPPING_PATH}), + target_cpu=${TARGET_CPU}, + system_image_locales=${SYSTEM_IMAGE_LOCALES}) + +if __name__ == '__main__': + sys.exit(main()) +""") + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = argparse.ArgumentParser() + parser.add_argument('--script-output-path', required=True, + help='Output path for executable script.') + parser.add_argument('--bundle-path', required=True) + parser.add_argument('--bundle-apks-path', required=True) + parser.add_argument('--package-name', required=True) + parser.add_argument('--aapt2-path', required=True) + parser.add_argument('--keystore-path', required=True) + parser.add_argument('--keystore-password', required=True) + parser.add_argument('--key-name', required=True) + parser.add_argument('--command-line-flags-file') + parser.add_argument('--proguard-mapping-path') + parser.add_argument('--target-cpu') + parser.add_argument('--system-image-locales') + args = parser.parse_args(args) + + def relativize(path): + """Returns the path relative to the output script directory.""" + if path is None: + return path + return os.path.relpath(path, os.path.dirname(args.script_output_path)) + + wrapped_script_dir = os.path.join(os.path.dirname(__file__), os.path.pardir) + wrapped_script_dir = relativize(wrapped_script_dir) + + with open(args.script_output_path, 'w') as script: + script_dict = { + 'WRAPPED_SCRIPT_DIR': + repr(wrapped_script_dir), + 'OUTPUT_DIR': + repr(relativize('.')), + 'BUNDLE_PATH': + repr(relativize(args.bundle_path)), + 'BUNDLE_APKS_PATH': + repr(relativize(args.bundle_apks_path)), + 'PACKAGE_NAME': + repr(args.package_name), + 'AAPT2_PATH': + repr(relativize(args.aapt2_path)), + 'KEYSTORE_PATH': + repr(relativize(args.keystore_path)), + 'KEYSTORE_PASSWORD': + repr(args.keystore_password), + 'KEY_NAME': + repr(args.key_name), + 'MAPPING_PATH': + repr(relativize(args.proguard_mapping_path)), + 'FLAGS_FILE': + repr(args.command_line_flags_file), + 'TARGET_CPU': + repr(args.target_cpu), + 'SYSTEM_IMAGE_LOCALES': + repr(build_utils.ParseGnList(args.system_image_locales)), + } + script.write(SCRIPT_TEMPLATE.substitute(script_dict)) + os.chmod(args.script_output_path, 0750) + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/deps/v8/build/android/gyp/create_bundle_wrapper_script.pydeps b/deps/v8/build/android/gyp/create_bundle_wrapper_script.pydeps new file mode 100644 index 0000000000..5587566f50 --- /dev/null +++ b/deps/v8/build/android/gyp/create_bundle_wrapper_script.pydeps @@ -0,0 +1,102 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_bundle_wrapper_script.pydeps build/android/gyp/create_bundle_wrapper_script.py +../../../third_party/catapult/common/py_utils/py_utils/__init__.py +../../../third_party/catapult/common/py_utils/py_utils/cloud_storage.py +../../../third_party/catapult/common/py_utils/py_utils/cloud_storage_global_lock.py +../../../third_party/catapult/common/py_utils/py_utils/lock.py +../../../third_party/catapult/common/py_utils/py_utils/tempfile_ext.py +../../../third_party/catapult/dependency_manager/dependency_manager/__init__.py +../../../third_party/catapult/dependency_manager/dependency_manager/archive_info.py +../../../third_party/catapult/dependency_manager/dependency_manager/base_config.py +../../../third_party/catapult/dependency_manager/dependency_manager/cloud_storage_info.py +../../../third_party/catapult/dependency_manager/dependency_manager/dependency_info.py +../../../third_party/catapult/dependency_manager/dependency_manager/dependency_manager_util.py +../../../third_party/catapult/dependency_manager/dependency_manager/exceptions.py +../../../third_party/catapult/dependency_manager/dependency_manager/local_path_info.py +../../../third_party/catapult/dependency_manager/dependency_manager/manager.py +../../../third_party/catapult/dependency_manager/dependency_manager/uploader.py +../../../third_party/catapult/devil/devil/__init__.py +../../../third_party/catapult/devil/devil/android/__init__.py +../../../third_party/catapult/devil/devil/android/apk_helper.py +../../../third_party/catapult/devil/devil/android/constants/__init__.py +../../../third_party/catapult/devil/devil/android/constants/chrome.py +../../../third_party/catapult/devil/devil/android/constants/file_system.py +../../../third_party/catapult/devil/devil/android/decorators.py +../../../third_party/catapult/devil/devil/android/device_errors.py +../../../third_party/catapult/devil/devil/android/device_signal.py +../../../third_party/catapult/devil/devil/android/device_temp_file.py +../../../third_party/catapult/devil/devil/android/device_utils.py +../../../third_party/catapult/devil/devil/android/flag_changer.py +../../../third_party/catapult/devil/devil/android/install_commands.py +../../../third_party/catapult/devil/devil/android/logcat_monitor.py +../../../third_party/catapult/devil/devil/android/md5sum.py +../../../third_party/catapult/devil/devil/android/ndk/__init__.py +../../../third_party/catapult/devil/devil/android/ndk/abis.py +../../../third_party/catapult/devil/devil/android/sdk/__init__.py +../../../third_party/catapult/devil/devil/android/sdk/aapt.py +../../../third_party/catapult/devil/devil/android/sdk/adb_wrapper.py +../../../third_party/catapult/devil/devil/android/sdk/build_tools.py +../../../third_party/catapult/devil/devil/android/sdk/intent.py +../../../third_party/catapult/devil/devil/android/sdk/keyevent.py +../../../third_party/catapult/devil/devil/android/sdk/split_select.py +../../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../../third_party/catapult/devil/devil/base_error.py +../../../third_party/catapult/devil/devil/constants/__init__.py +../../../third_party/catapult/devil/devil/constants/exit_codes.py +../../../third_party/catapult/devil/devil/devil_env.py +../../../third_party/catapult/devil/devil/utils/__init__.py +../../../third_party/catapult/devil/devil/utils/cmd_helper.py +../../../third_party/catapult/devil/devil/utils/host_utils.py +../../../third_party/catapult/devil/devil/utils/lazy/__init__.py +../../../third_party/catapult/devil/devil/utils/lazy/weak_constant.py +../../../third_party/catapult/devil/devil/utils/logging_common.py +../../../third_party/catapult/devil/devil/utils/lsusb.py +../../../third_party/catapult/devil/devil/utils/parallelizer.py +../../../third_party/catapult/devil/devil/utils/reraiser_thread.py +../../../third_party/catapult/devil/devil/utils/reset_usb.py +../../../third_party/catapult/devil/devil/utils/run_tests_helper.py +../../../third_party/catapult/devil/devil/utils/timeout_retry.py +../../../third_party/catapult/devil/devil/utils/watchdog_timer.py +../../../third_party/catapult/devil/devil/utils/zip_utils.py +../../../third_party/catapult/third_party/zipfile/zipfile_2_7_13.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_compat.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../gn_helpers.py +../apk_operations.py +../devil_chromium.py +../incremental_install/__init__.py +../incremental_install/installer.py +../pylib/__init__.py +../pylib/constants/__init__.py +../pylib/constants/host_paths.py +../pylib/symbols/__init__.py +../pylib/symbols/deobfuscator.py +../pylib/utils/__init__.py +../pylib/utils/app_bundle_utils.py +../pylib/utils/simpleperf.py +../pylib/utils/time_profile.py +bundletool.py +create_bundle_wrapper_script.py +util/__init__.py +util/build_utils.py +util/md5_check.py +util/resource_utils.py diff --git a/deps/v8/build/android/gyp/create_java_binary_script.py b/deps/v8/build/android/gyp/create_java_binary_script.py new file mode 100755 index 0000000000..4469381c7c --- /dev/null +++ b/deps/v8/build/android/gyp/create_java_binary_script.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates a simple script to run a java "binary". + +This creates a script that sets up the java command line for running a java +jar. This includes correctly setting the classpath and the main class. +""" + +import optparse +import os +import sys + +from util import build_utils + +# The java command must be executed in the current directory because there may +# be user-supplied paths in the args. The script receives the classpath relative +# to the directory that the script is written in and then, when run, must +# recalculate the paths relative to the current directory. +script_template = """\ +#!/usr/bin/env python +# +# This file was generated by build/android/gyp/create_java_binary_script.py + +import argparse +import os +import sys + +self_dir = os.path.dirname(__file__) +classpath = [{classpath}] +bootclasspath = [{bootclasspath}] +extra_program_args = {extra_program_args} +if os.getcwd() != self_dir: + offset = os.path.relpath(self_dir, os.getcwd()) + classpath = [os.path.join(offset, p) for p in classpath] + bootclasspath = [os.path.join(offset, p) for p in bootclasspath] +java_cmd = ["java"] +# This is a simple argparser for jvm and jar arguments. +parser = argparse.ArgumentParser() +parser.add_argument('--jar-args') +parser.add_argument('--jvm-args') + +known_args, unknown_args = parser.parse_known_args(sys.argv[1:]) +if known_args.jvm_args: + jvm_arguments = known_args.jvm_args.strip('"').split() + java_cmd.extend(jvm_arguments) +if known_args.jar_args: + jar_arguments = known_args.jar_args.strip('"').split() + if unknown_args: + raise Exception('There are unknown arguments') +else: + jar_arguments = unknown_args + +{noverify_flag} +if bootclasspath: + java_cmd.append("-Xbootclasspath/p:" + ":".join(bootclasspath)) +java_cmd.extend( + ["-classpath", ":".join(classpath), "-enableassertions", \"{main_class}\"]) +java_cmd.extend(extra_program_args) +java_cmd.extend(jar_arguments) +os.execvp("java", java_cmd) +""" + +def main(argv): + argv = build_utils.ExpandFileArgs(argv) + parser = optparse.OptionParser() + parser.add_option('--output', help='Output path for executable script.') + parser.add_option('--main-class', + help='Name of the java class with the "main" entry point.') + parser.add_option('--classpath', action='append', default=[], + help='Classpath for running the jar.') + parser.add_option('--bootclasspath', action='append', default=[], + help='zip/jar files to add to bootclasspath for java cmd.') + parser.add_option('--noverify', action='store_true', + help='JVM flag: noverify.') + + options, extra_program_args = parser.parse_args(argv) + + if (options.noverify): + noverify_flag = 'java_cmd.append("-noverify")' + else: + noverify_flag = '' + + classpath = [] + for cp_arg in options.classpath: + classpath += build_utils.ParseGnList(cp_arg) + + bootclasspath = [] + for bootcp_arg in options.bootclasspath: + bootclasspath += build_utils.ParseGnList(bootcp_arg) + + run_dir = os.path.dirname(options.output) + bootclasspath = [os.path.relpath(p, run_dir) for p in bootclasspath] + classpath = [os.path.relpath(p, run_dir) for p in classpath] + + with build_utils.AtomicOutput(options.output) as script: + script.write(script_template.format( + classpath=('"%s"' % '", "'.join(classpath)), + bootclasspath=('"%s"' % '", "'.join(bootclasspath) + if bootclasspath else ''), + main_class=options.main_class, + extra_program_args=repr(extra_program_args), + noverify_flag=noverify_flag)) + + os.chmod(options.output, 0750) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/deps/v8/build/android/gyp/create_java_binary_script.pydeps b/deps/v8/build/android/gyp/create_java_binary_script.pydeps new file mode 100644 index 0000000000..96d79bf609 --- /dev/null +++ b/deps/v8/build/android/gyp/create_java_binary_script.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_java_binary_script.pydeps build/android/gyp/create_java_binary_script.py +../../gn_helpers.py +create_java_binary_script.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/create_size_info_files.py b/deps/v8/build/android/gyp/create_size_info_files.py new file mode 100755 index 0000000000..5b248e4195 --- /dev/null +++ b/deps/v8/build/android/gyp/create_size_info_files.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python + +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates size-info/*.info files used by SuperSize.""" + +import argparse +import os +import sys +import zipfile + +from util import build_utils +from util import jar_info_utils +from util import md5_check + + +def _MergeResInfoFiles(res_info_path, info_paths): + # Concatenate them all. + # only_if_changed=False since no build rules depend on this as an input. + with build_utils.AtomicOutput(res_info_path, only_if_changed=False) as dst: + for p in info_paths: + with open(p) as src: + dst.write(src.read()) + + +def _PakInfoPathsForAssets(assets): + return [f.split(':')[0] + '.info' for f in assets if f.endswith('.pak')] + + +def _MergePakInfoFiles(merged_path, pak_infos): + info_lines = set() + for pak_info_path in pak_infos: + with open(pak_info_path, 'r') as src_info_file: + info_lines.update(src_info_file.readlines()) + # only_if_changed=False since no build rules depend on this as an input. + with build_utils.AtomicOutput(merged_path, only_if_changed=False) as f: + f.writelines(sorted(info_lines)) + + +def _FullJavaNameFromClassFilePath(path): + # Input: base/android/java/src/org/chromium/Foo.class + # Output: base.android.java.src.org.chromium.Foo + if not path.endswith('.class'): + return '' + path = os.path.splitext(path)[0] + parts = [] + while path: + # Use split to be platform independent. + head, tail = os.path.split(path) + path = head + parts.append(tail) + parts.reverse() # Package comes first + return '.'.join(parts) + + +def _MergeJarInfoFiles(output, inputs): + """Merge several .jar.info files to generate an .apk.jar.info. + + Args: + output: output file path. + inputs: List of .info.jar or .jar files. + """ + info_data = dict() + for path in inputs: + # android_java_prebuilt adds jar files in the src directory (relative to + # the output directory, usually ../../third_party/example.jar). + # android_aar_prebuilt collects jar files in the aar file and uses the + # java_prebuilt rule to generate gen/example/classes.jar files. + # We scan these prebuilt jars to parse each class path for the FQN. This + # allows us to later map these classes back to their respective src + # directories. + # TODO(agrieve): This should probably also check that the mtime of the .info + # is newer than that of the .jar, or change prebuilts to always output + # .info files so that they always exist (and change the depfile to + # depend directly on them). + if path.endswith('.info'): + info_data.update(jar_info_utils.ParseJarInfoFile(path)) + else: + with zipfile.ZipFile(path) as zip_info: + for name in zip_info.namelist(): + fully_qualified_name = _FullJavaNameFromClassFilePath(name) + if fully_qualified_name: + info_data[fully_qualified_name] = '{}/{}'.format(path, name) + + # only_if_changed=False since no build rules depend on this as an input. + with build_utils.AtomicOutput(output, only_if_changed=False) as f: + jar_info_utils.WriteJarInfoFile(f, info_data) + + +def _FindJarInputs(jar_paths): + ret = [] + for jar_path in jar_paths: + jar_info_path = jar_path + '.info' + if os.path.exists(jar_info_path): + ret.append(jar_info_path) + else: + ret.append(jar_path) + return ret + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = argparse.ArgumentParser(description=__doc__) + build_utils.AddDepfileOption(parser) + parser.add_argument( + '--jar-info-path', required=True, help='Output .jar.info file') + parser.add_argument( + '--pak-info-path', required=True, help='Output .pak.info file') + parser.add_argument( + '--res-info-path', required=True, help='Output .res.info file') + parser.add_argument( + '--jar-files', + required=True, + action='append', + help='GN-list of .jar file paths') + parser.add_argument( + '--assets', + required=True, + action='append', + help='GN-list of files to add as assets in the form ' + '"srcPath:zipPath", where ":zipPath" is optional.') + parser.add_argument( + '--uncompressed-assets', + required=True, + action='append', + help='Same as --assets, except disables compression.') + parser.add_argument( + '--resource-apk', + dest='resource_apks', + required=True, + action='append', + help='An .ap_ file built using aapt') + + options = parser.parse_args(args) + + options.jar_files = build_utils.ParseGnList(options.jar_files) + options.assets = build_utils.ParseGnList(options.assets) + options.uncompressed_assets = build_utils.ParseGnList( + options.uncompressed_assets) + + jar_inputs = _FindJarInputs(set(options.jar_files)) + pak_inputs = _PakInfoPathsForAssets(options.assets + + options.uncompressed_assets) + res_inputs = [p + '.info' for p in options.resource_apks] + + # Don't bother re-running if no .info files have changed (saves ~250ms). + md5_check.CallAndRecordIfStale( + lambda: _MergeJarInfoFiles(options.jar_info_path, jar_inputs), + input_paths=jar_inputs, + output_paths=[options.jar_info_path]) + + # Always recreate these (just as fast as md5 checking them). + _MergePakInfoFiles(options.pak_info_path, pak_inputs) + _MergeResInfoFiles(options.res_info_path, res_inputs) + + all_inputs = jar_inputs + pak_inputs + res_inputs + build_utils.WriteDepfile( + options.depfile, + options.jar_info_path, + inputs=all_inputs, + add_pydeps=False) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/deps/v8/build/android/gyp/create_size_info_files.pydeps b/deps/v8/build/android/gyp/create_size_info_files.pydeps new file mode 100644 index 0000000000..4ab7f94ea6 --- /dev/null +++ b/deps/v8/build/android/gyp/create_size_info_files.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_size_info_files.pydeps build/android/gyp/create_size_info_files.py +../../gn_helpers.py +create_size_info_files.py +util/__init__.py +util/build_utils.py +util/jar_info_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/create_stack_script.py b/deps/v8/build/android/gyp/create_stack_script.py new file mode 100755 index 0000000000..6ccdc384e2 --- /dev/null +++ b/deps/v8/build/android/gyp/create_stack_script.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python +# Copyright 2017 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import argparse +import os +import sys +import textwrap + +from util import build_utils + +SCRIPT_TEMPLATE = textwrap.dedent( + """\ + #!/usr/bin/env python + # + # This file was generated by build/android/gyp/create_stack_script.py + + import os + import sys + + def main(argv): + script_directory = os.path.dirname(__file__) + resolve = lambda p: os.path.abspath(os.path.join(script_directory, p)) + script_path = resolve('{script_path}') + script_args = {script_args} + script_path_args = {script_path_args} + for arg, path in script_path_args: + script_args.extend([arg, resolve(path)]) + script_cmd = [script_path] + script_args + argv + print ' '.join(script_cmd) + os.execv(script_path, script_cmd) + + if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) + """) + + +def main(args): + + parser = argparse.ArgumentParser() + parser.add_argument( + '--script-path', + help='Path to the wrapped script.') + parser.add_argument( + '--script-output-path', + help='Path to the output script.') + group = parser.add_argument_group('Path arguments') + group.add_argument('--output-directory') + group.add_argument('--packed-libs') + + args, script_args = parser.parse_known_args(build_utils.ExpandFileArgs(args)) + + def relativize(p): + return os.path.relpath(p, os.path.dirname(args.script_output_path)) + + script_path = relativize(args.script_path) + + script_path_args = [] + if args.output_directory: + script_path_args.append( + ('--output-directory', relativize(args.output_directory))) + if args.packed_libs: + for p in build_utils.ParseGnList(args.packed_libs): + script_path_args.append(('--packed-lib', relativize(p))) + + with build_utils.AtomicOutput(args.script_output_path) as script: + script.write(SCRIPT_TEMPLATE.format( + script_path=script_path, + script_args=script_args, + script_path_args=script_path_args)) + + os.chmod(args.script_output_path, 0750) + + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/deps/v8/build/android/gyp/create_stack_script.pydeps b/deps/v8/build/android/gyp/create_stack_script.pydeps new file mode 100644 index 0000000000..7bddb156f4 --- /dev/null +++ b/deps/v8/build/android/gyp/create_stack_script.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_stack_script.pydeps build/android/gyp/create_stack_script.py +../../gn_helpers.py +create_stack_script.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/create_tool_wrapper.py b/deps/v8/build/android/gyp/create_tool_wrapper.py new file mode 100755 index 0000000000..4433004541 --- /dev/null +++ b/deps/v8/build/android/gyp/create_tool_wrapper.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +# +# Copyright 2016 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates a simple wrapper script that passes the correct --output-directory. +""" + +import argparse +import os + +_TEMPLATE = """\ +#!/usr/bin/env python +# +# This file was generated by //build/android/gyp/create_tool_script.py + +import os +import sys + +cmd = '{cmd}' +args = [os.path.basename(cmd), '{flag_name}={output_directory}'] + sys.argv[1:] +os.execv(cmd, args) +""" + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--output', help='Output path for executable script.') + parser.add_argument('--target', help='Path to script being wrapped.') + parser.add_argument('--output-directory', help='Value for --output-directory') + parser.add_argument('--flag-name', + help='Flag name to use instead of --output-directory', + default='--output-directory') + args = parser.parse_args() + + with open(args.output, 'w') as script: + script.write(_TEMPLATE.format( + cmd=os.path.abspath(args.target), + flag_name=args.flag_name, + output_directory=os.path.abspath(args.output_directory))) + + os.chmod(args.output, 0750) + + +if __name__ == '__main__': + main() diff --git a/deps/v8/build/android/gyp/create_tool_wrapper.pydeps b/deps/v8/build/android/gyp/create_tool_wrapper.pydeps new file mode 100644 index 0000000000..75b8326e70 --- /dev/null +++ b/deps/v8/build/android/gyp/create_tool_wrapper.pydeps @@ -0,0 +1,3 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/create_tool_wrapper.pydeps build/android/gyp/create_tool_wrapper.py +create_tool_wrapper.py diff --git a/deps/v8/build/android/gyp/create_ui_locale_resources.py b/deps/v8/build/android/gyp/create_ui_locale_resources.py new file mode 100755 index 0000000000..97868cbfde --- /dev/null +++ b/deps/v8/build/android/gyp/create_ui_locale_resources.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python +# +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Generate a zip archive containing localized locale name Android resource +strings! + +This script takes a list of input Chrome-specific locale names, as well as an +output zip file path. + +Each output file will contain the definition of a single string resource, +named 'current_locale', whose value will be the matching Chromium locale name. +E.g. values-en-rUS/strings.xml will define 'current_locale' as 'en-US'. +""" + +import argparse +import os +import sys +import zipfile + +sys.path.insert( + 0, + os.path.join( + os.path.dirname(__file__), '..', '..', '..', 'build', 'android', 'gyp')) + +from util import build_utils +from util import resource_utils + +# A small string template for the content of each strings.xml file. +# NOTE: The name is chosen to avoid any conflicts with other string defined +# by other resource archives. +_TEMPLATE = """\ +<?xml version="1.0" encoding="utf-8"?> +<resources> + <string name="current_detected_ui_locale_name">{resource_text}</string> +</resources> +""" + +# The default Chrome locale value. +_DEFAULT_CHROME_LOCALE = 'en-US' + + +def _GenerateLocaleStringsXml(locale): + return _TEMPLATE.format(resource_text=locale) + + +def _AddLocaleResourceFileToZip(out_zip, android_locale, locale): + locale_data = _GenerateLocaleStringsXml(locale) + if android_locale: + zip_path = 'values-%s/strings.xml' % android_locale + else: + zip_path = 'values/strings.xml' + build_utils.AddToZipHermetic( + out_zip, zip_path, data=locale_data, compress=False) + + +def main(): + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) + + build_utils.AddDepfileOption(parser) + parser.add_argument( + '--locale-list', + required=True, + help='GN-list of Chrome-specific locale names.') + parser.add_argument( + '--output-zip', required=True, help='Output zip archive path.') + + args = parser.parse_args() + + locale_list = build_utils.ParseGnList(args.locale_list) + if not locale_list: + raise Exception('Locale list cannot be empty!') + + with build_utils.AtomicOutput(args.output_zip) as tmp_file: + with zipfile.ZipFile(tmp_file, 'w') as out_zip: + # First, write the default value, since aapt requires one. + _AddLocaleResourceFileToZip(out_zip, '', _DEFAULT_CHROME_LOCALE) + + for locale in locale_list: + android_locale = resource_utils.ToAndroidLocaleName(locale) + _AddLocaleResourceFileToZip(out_zip, android_locale, locale) + + if args.depfile: + build_utils.WriteDepfile(args.depfile, args.output_zip) + + +if __name__ == '__main__': + main() diff --git a/deps/v8/build/android/gyp/desugar.py b/deps/v8/build/android/gyp/desugar.py new file mode 100755 index 0000000000..b9d04059e5 --- /dev/null +++ b/deps/v8/build/android/gyp/desugar.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +# +# Copyright 2017 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import sys + +from util import build_utils + + +def main(): + args = build_utils.ExpandFileArgs(sys.argv[1:]) + parser = argparse.ArgumentParser() + build_utils.AddDepfileOption(parser) + parser.add_argument('--desugar-jar', required=True, + help='Path to Desugar.jar.') + parser.add_argument('--input-jar', required=True, + help='Jar input path to include .class files from.') + parser.add_argument('--output-jar', required=True, + help='Jar output path.') + parser.add_argument('--classpath', required=True, + help='Classpath.') + parser.add_argument('--bootclasspath', required=True, + help='Path to javac bootclasspath interface jar.') + options = parser.parse_args(args) + + options.bootclasspath = build_utils.ParseGnList(options.bootclasspath) + options.classpath = build_utils.ParseGnList(options.classpath) + + cmd = [ + 'java', + '-jar', + options.desugar_jar, + '--input', + options.input_jar, + '--output', + options.output_jar, + # Don't include try-with-resources files in every .jar. Instead, they + # are included via //third_party/bazel/desugar:desugar_runtime_java. + '--desugar_try_with_resources_omit_runtime_classes', + ] + for path in options.bootclasspath: + cmd += ['--bootclasspath_entry', path] + for path in options.classpath: + cmd += ['--classpath_entry', path] + build_utils.CheckOutput(cmd, print_stdout=False) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, + options.output_jar, + inputs=options.bootclasspath + options.classpath, + add_pydeps=False) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/deps/v8/build/android/gyp/desugar.pydeps b/deps/v8/build/android/gyp/desugar.pydeps new file mode 100644 index 0000000000..a40f3aa7dd --- /dev/null +++ b/deps/v8/build/android/gyp/desugar.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/desugar.pydeps build/android/gyp/desugar.py +../../gn_helpers.py +desugar.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/dex.py b/deps/v8/build/android/gyp/dex.py new file mode 100755 index 0000000000..cba8c7f17f --- /dev/null +++ b/deps/v8/build/android/gyp/dex.py @@ -0,0 +1,388 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import json +import logging +import optparse +import os +import re +import shutil +import sys +import tempfile +import zipfile + +from util import build_utils + +sys.path.insert(1, os.path.join(os.path.dirname(__file__), os.path.pardir)) + +import convert_dex_profile + + +def _CheckFilePathEndsWithJar(parser, file_path): + if not file_path.endswith(".jar"): + parser.error("%s does not end in .jar" % file_path) + + +def _CheckFilePathsEndWithJar(parser, file_paths): + for file_path in file_paths: + _CheckFilePathEndsWithJar(parser, file_path) + + +def _ParseArgs(args): + args = build_utils.ExpandFileArgs(args) + + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--output-directory', + default=os.getcwd(), + help='Path to the output build directory.') + parser.add_option('--dex-path', help='Dex output path.') + parser.add_option('--configuration-name', + help='The build CONFIGURATION_NAME.') + parser.add_option('--proguard-enabled', + help='"true" if proguard is enabled.') + parser.add_option('--debug-build-proguard-enabled', + help='"true" if proguard is enabled for debug build.') + parser.add_option('--proguard-enabled-input-path', + help=('Path to dex in Release mode when proguard ' + 'is enabled.')) + parser.add_option('--inputs', help='A list of additional input paths.') + parser.add_option('--excluded-paths', + help='A list of paths to exclude from the dex file.') + parser.add_option('--main-dex-list-path', + help='A file containing a list of the classes to ' + 'include in the main dex.') + parser.add_option('--multidex-configuration-path', + help='A JSON file containing multidex build configuration.') + parser.add_option('--multi-dex', default=False, action='store_true', + help='Generate multiple dex files.') + parser.add_option('--d8-jar-path', help='Path to D8 jar.') + parser.add_option('--release', action='store_true', default=False, + help='Run D8 in release mode. Release mode maximises main ' + 'dex and deletes non-essential line number information ' + '(vs debug which minimizes main dex and keeps all line ' + 'number information, and then some.') + parser.add_option('--min-api', + help='Minimum Android API level compatibility.') + + parser.add_option('--dexlayout-profile', + help=('Text profile for dexlayout. If present, a dexlayout ' + 'pass will happen')) + parser.add_option('--profman-path', + help=('Path to ART profman binary. There should be a ' + 'lib/ directory at the same path containing shared ' + 'libraries (shared with dexlayout).')) + parser.add_option('--dexlayout-path', + help=('Path to ART dexlayout binary. There should be a ' + 'lib/ directory at the same path containing shared ' + 'libraries (shared with dexlayout).')) + parser.add_option('--dexdump-path', help='Path to dexdump binary.') + parser.add_option( + '--proguard-mapping-path', + help=('Path to proguard map from obfuscated symbols in the jar to ' + 'unobfuscated symbols present in the code. If not ' + 'present, the jar is assumed not to be obfuscated.')) + + options, paths = parser.parse_args(args) + + required_options = ('d8_jar_path',) + build_utils.CheckOptions(options, parser, required=required_options) + + if options.dexlayout_profile: + build_utils.CheckOptions( + options, + parser, + required=('profman_path', 'dexlayout_path', 'dexdump_path')) + elif options.proguard_mapping_path is not None: + raise Exception('Unexpected proguard mapping without dexlayout') + + if options.multidex_configuration_path: + with open(options.multidex_configuration_path) as multidex_config_file: + multidex_config = json.loads(multidex_config_file.read()) + options.multi_dex = multidex_config.get('enabled', False) + + if options.multi_dex and not options.main_dex_list_path: + logging.warning('multidex cannot be enabled without --main-dex-list-path') + options.multi_dex = False + elif options.main_dex_list_path and not options.multi_dex: + logging.warning('--main-dex-list-path is unused if multidex is not enabled') + + if options.inputs: + options.inputs = build_utils.ParseGnList(options.inputs) + _CheckFilePathsEndWithJar(parser, options.inputs) + if options.excluded_paths: + options.excluded_paths = build_utils.ParseGnList(options.excluded_paths) + + if options.proguard_enabled_input_path: + _CheckFilePathEndsWithJar(parser, options.proguard_enabled_input_path) + _CheckFilePathsEndWithJar(parser, paths) + + return options, paths + + +def _MoveTempDexFile(tmp_dex_dir, dex_path): + """Move the temp dex file out of |tmp_dex_dir|. + + Args: + tmp_dex_dir: Path to temporary directory created with tempfile.mkdtemp(). + The directory should have just a single file. + dex_path: Target path to move dex file to. + + Raises: + Exception if there are multiple files in |tmp_dex_dir|. + """ + tempfiles = os.listdir(tmp_dex_dir) + if len(tempfiles) > 1: + raise Exception('%d files created, expected 1' % len(tempfiles)) + + tmp_dex_path = os.path.join(tmp_dex_dir, tempfiles[0]) + shutil.move(tmp_dex_path, dex_path) + + +def _NoClassFiles(jar_paths): + """Returns True if there are no .class files in the given JARs. + + Args: + jar_paths: list of strings representing JAR file paths. + + Returns: + (bool) True if no .class files are found. + """ + for jar_path in jar_paths: + with zipfile.ZipFile(jar_path) as jar: + if any(name.endswith('.class') for name in jar.namelist()): + return False + return True + + +def _RunD8(dex_cmd, input_paths, output_path): + dex_cmd += ['--output', output_path] + dex_cmd += input_paths + build_utils.CheckOutput(dex_cmd, print_stderr=False) + + +def _EnvWithArtLibPath(binary_path): + """Return an environment dictionary for ART host shared libraries. + + Args: + binary_path: the path to an ART host binary. + + Returns: + An environment dictionary where LD_LIBRARY_PATH has been augmented with the + shared library path for the binary. This assumes that there is a lib/ + directory in the same location as the binary. + """ + lib_path = os.path.join(os.path.dirname(binary_path), 'lib') + env = os.environ.copy() + libraries = [l for l in env.get('LD_LIBRARY_PATH', '').split(':') if l] + libraries.append(lib_path) + env['LD_LIBRARY_PATH'] = ':'.join(libraries) + return env + + +def _CreateBinaryProfile(text_profile, input_dex, profman_path, temp_dir): + """Create a binary profile for dexlayout. + + Args: + text_profile: The ART text profile that will be converted to a binary + profile. + input_dex: The input dex file to layout. + profman_path: Path to the profman binary. + temp_dir: Directory to work in. + + Returns: + The name of the binary profile, which will live in temp_dir. + """ + binary_profile = os.path.join( + temp_dir, 'binary_profile-for-' + os.path.basename(text_profile)) + open(binary_profile, 'w').close() # Touch binary_profile. + profman_cmd = [profman_path, + '--apk=' + input_dex, + '--dex-location=' + input_dex, + '--create-profile-from=' + text_profile, + '--reference-profile-file=' + binary_profile] + build_utils.CheckOutput( + profman_cmd, + env=_EnvWithArtLibPath(profman_path), + stderr_filter=lambda output: + build_utils.FilterLines(output, '|'.join( + [r'Could not find (method_id|proto_id|name):', + r'Could not create type list']))) + return binary_profile + + +def _LayoutDex(binary_profile, input_dex, dexlayout_path, temp_dir): + """Layout a dexfile using a profile. + + Args: + binary_profile: An ART binary profile, eg output from _CreateBinaryProfile. + input_dex: The dex file used to create the binary profile. + dexlayout_path: Path to the dexlayout binary. + temp_dir: Directory to work in. + + Returns: + List of output files produced by dexlayout. This will be one if the input + was a single dexfile, or multiple files if the input was a multidex + zip. These output files are located in temp_dir. + """ + dexlayout_output_dir = os.path.join(temp_dir, 'dexlayout_output') + os.mkdir(dexlayout_output_dir) + dexlayout_cmd = [ dexlayout_path, + '-u', # Update checksum + '-p', binary_profile, + '-w', dexlayout_output_dir, + input_dex ] + build_utils.CheckOutput( + dexlayout_cmd, + env=_EnvWithArtLibPath(dexlayout_path), + stderr_filter=lambda output: + build_utils.FilterLines(output, + r'Can.t mmap dex file.*please zipalign')) + output_files = os.listdir(dexlayout_output_dir) + if not output_files: + raise Exception('dexlayout unexpectedly produced no output') + return [os.path.join(dexlayout_output_dir, f) for f in output_files] + + +def _ZipMultidex(file_dir, dex_files): + """Zip dex files into a multidex. + + Args: + file_dir: The directory into which to write the output. + dex_files: The dexfiles forming the multizip. Their names must end with + classes.dex, classes2.dex, ... + + Returns: + The name of the multidex file, which will live in file_dir. + """ + ordered_files = [] # List of (archive name, file name) + for f in dex_files: + if f.endswith('classes.dex.zip'): + ordered_files.append(('classes.dex', f)) + break + if not ordered_files: + raise Exception('Could not find classes.dex multidex file in %s', + dex_files) + for dex_idx in xrange(2, len(dex_files) + 1): + archive_name = 'classes%d.dex' % dex_idx + for f in dex_files: + if f.endswith(archive_name): + ordered_files.append((archive_name, f)) + break + else: + raise Exception('Could not find classes%d.dex multidex file in %s', + dex_files) + if len(set(f[1] for f in ordered_files)) != len(ordered_files): + raise Exception('Unexpected clashing filenames for multidex in %s', + dex_files) + + zip_name = os.path.join(file_dir, 'multidex_classes.zip') + build_utils.DoZip(((archive_name, os.path.join(file_dir, file_name)) + for archive_name, file_name in ordered_files), + zip_name) + return zip_name + + +def _ZipSingleDex(dex_file, zip_name): + """Zip up a single dex file. + + Args: + dex_file: A dexfile whose name is ignored. + zip_name: The output file in which to write the zip. + """ + build_utils.DoZip([('classes.dex', dex_file)], zip_name) + + +def main(args): + options, paths = _ParseArgs(args) + if ((options.proguard_enabled == 'true' + and options.configuration_name == 'Release') + or (options.debug_build_proguard_enabled == 'true' + and options.configuration_name == 'Debug')): + paths = [options.proguard_enabled_input_path] + + if options.inputs: + paths += options.inputs + + if options.excluded_paths: + # Excluded paths are relative to the output directory. + exclude_paths = options.excluded_paths + paths = [p for p in paths if not + os.path.relpath(p, options.output_directory) in exclude_paths] + + input_paths = list(paths) + if options.multi_dex: + input_paths.append(options.main_dex_list_path) + + dex_cmd = ['java', '-jar', options.d8_jar_path, '--no-desugaring'] + if options.multi_dex: + dex_cmd += ['--main-dex-list', options.main_dex_list_path] + if options.release: + dex_cmd += ['--release'] + if options.min_api: + dex_cmd += ['--min-api', options.min_api] + + is_dex = options.dex_path.endswith('.dex') + is_jar = options.dex_path.endswith('.jar') + + with build_utils.TempDir() as tmp_dir: + tmp_dex_dir = os.path.join(tmp_dir, 'tmp_dex_dir') + os.mkdir(tmp_dex_dir) + if is_jar and _NoClassFiles(paths): + # Handle case where no classfiles are specified in inputs + # by creating an empty JAR + with zipfile.ZipFile(options.dex_path, 'w') as outfile: + outfile.comment = 'empty' + else: + # .dex files can't specify a name for D8. Instead, we output them to a + # temp directory then move them after the command has finished running + # (see _MoveTempDexFile). For other files, tmp_dex_dir is None. + _RunD8(dex_cmd, paths, tmp_dex_dir) + + tmp_dex_output = os.path.join(tmp_dir, 'tmp_dex_output') + if is_dex: + _MoveTempDexFile(tmp_dex_dir, tmp_dex_output) + else: + # d8 supports outputting to a .zip, but does not have deterministic file + # ordering: https://issuetracker.google.com/issues/119945929 + build_utils.ZipDir(tmp_dex_output, tmp_dex_dir) + + if options.dexlayout_profile: + if options.proguard_mapping_path is not None: + matching_profile = os.path.join(tmp_dir, 'obfuscated_profile') + convert_dex_profile.ObfuscateProfile( + options.dexlayout_profile, tmp_dex_output, + options.proguard_mapping_path, options.dexdump_path, + matching_profile) + else: + logging.warning('No obfuscation for %s', options.dexlayout_profile) + matching_profile = options.dexlayout_profile + binary_profile = _CreateBinaryProfile(matching_profile, tmp_dex_output, + options.profman_path, tmp_dir) + output_files = _LayoutDex(binary_profile, tmp_dex_output, + options.dexlayout_path, tmp_dir) + target = None + if len(output_files) > 1: + target = _ZipMultidex(tmp_dir, output_files) + else: + output = output_files[0] + if not zipfile.is_zipfile(output): + target = os.path.join(tmp_dir, 'dex_classes.zip') + _ZipSingleDex(output, target) + else: + target = output + shutil.move(os.path.join(tmp_dir, target), tmp_dex_output) + + # The dex file is complete and can be moved out of tmp_dir. + shutil.move(tmp_dex_output, options.dex_path) + + build_utils.WriteDepfile( + options.depfile, options.dex_path, input_paths, add_pydeps=False) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/deps/v8/build/android/gyp/dex.pydeps b/deps/v8/build/android/gyp/dex.pydeps new file mode 100644 index 0000000000..e5ecbd2335 --- /dev/null +++ b/deps/v8/build/android/gyp/dex.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dex.pydeps build/android/gyp/dex.py +../../gn_helpers.py +../convert_dex_profile.py +dex.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/dexsplitter.py b/deps/v8/build/android/gyp/dexsplitter.py new file mode 100755 index 0000000000..a0761581bd --- /dev/null +++ b/deps/v8/build/android/gyp/dexsplitter.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python +# +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import shutil +import sys +import zipfile + +from util import build_utils + + +def _ParseOptions(args): + parser = argparse.ArgumentParser() + parser.add_argument('--depfile', help='Path to the depfile to write to.') + parser.add_argument('--stamp', help='Path to stamp to mark when finished.') + parser.add_argument('--r8-path', help='Path to the r8.jar to use.') + parser.add_argument( + '--input-dex-zip', help='Path to dex files in zip being split.') + parser.add_argument( + '--proguard-mapping-file', help='Path to proguard mapping file.') + parser.add_argument( + '--feature-name', + action='append', + dest='feature_names', + help='The name of the feature module.') + parser.add_argument( + '--feature-jars', + action='append', + help='GN list of path to jars which compirse the corresponding feature.') + parser.add_argument( + '--dex-dest', + action='append', + dest='dex_dests', + help='Destination for dex file of the corresponding feature.') + options = parser.parse_args(args) + + assert len(options.feature_names) == len(options.feature_jars) and len( + options.feature_names) == len(options.dex_dests) + options.features = {} + for i, name in enumerate(options.feature_names): + options.features[name] = build_utils.ParseGnList(options.feature_jars[i]) + + return options + + +def _RunDexsplitter(options, output_dir): + cmd = [ + 'java', + '-jar', + options.r8_path, + 'dexsplitter', + '--output', + output_dir, + '--proguard-map', + options.proguard_mapping_file, + ] + + for base_jar in options.features['base']: + cmd += ['--base-jar', base_jar] + + base_jars_lookup = set(options.features['base']) + for feature in options.features: + if feature == 'base': + continue + for feature_jar in options.features[feature]: + if feature_jar not in base_jars_lookup: + cmd += ['--feature-jar', feature_jar + ':' + feature] + + with build_utils.TempDir() as temp_dir: + unzipped_files = build_utils.ExtractAll(options.input_dex_zip, temp_dir) + for file_name in unzipped_files: + cmd += ['--input', file_name] + build_utils.CheckOutput(cmd) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + options = _ParseOptions(args) + + input_paths = [] + for feature_jars in options.features.itervalues(): + for feature_jar in feature_jars: + input_paths.append(feature_jar) + + with build_utils.TempDir() as dexsplitter_output_dir: + curr_location_to_dest = [] + if len(options.features) == 1: + # Don't run dexsplitter since it needs at least 1 feature module. + curr_location_to_dest.append((options.input_dex_zip, + options.dex_dests[0])) + else: + _RunDexsplitter(options, dexsplitter_output_dir) + + for i, dest in enumerate(options.dex_dests): + module_dex_file = os.path.join(dexsplitter_output_dir, + options.feature_names[i], 'classes.dex') + if os.path.exists(module_dex_file): + curr_location_to_dest.append((module_dex_file, dest)) + else: + module_dex_file += '.zip' + assert os.path.exists( + module_dex_file), 'Dexsplitter tool output not found.' + curr_location_to_dest.append((module_dex_file + '.zip', dest)) + + for curr_location, dest in curr_location_to_dest: + with build_utils.AtomicOutput(dest) as f: + if curr_location.endswith('.zip'): + if dest.endswith('.zip'): + shutil.copy(curr_location, f.name) + else: + with zipfile.ZipFile(curr_location, 'r') as z: + namelist = z.namelist() + assert len(namelist) == 1, ( + 'Unzipping to single dex file, but not single dex file in ' + + options.input_dex_zip) + z.extract(namelist[0], f.name) + else: + if dest.endswith('.zip'): + build_utils.ZipDir( + f.name, os.path.abspath(os.path.join(curr_location, os.pardir))) + else: + shutil.move(curr_location, f.name) + + build_utils.Touch(options.stamp) + build_utils.WriteDepfile(options.depfile, options.stamp, inputs=input_paths) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/deps/v8/build/android/gyp/dexsplitter.pydeps b/deps/v8/build/android/gyp/dexsplitter.pydeps new file mode 100644 index 0000000000..5935d23885 --- /dev/null +++ b/deps/v8/build/android/gyp/dexsplitter.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dexsplitter.pydeps build/android/gyp/dexsplitter.py +../../gn_helpers.py +dexsplitter.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/dist_aar.py b/deps/v8/build/android/gyp/dist_aar.py new file mode 100755 index 0000000000..ed823f18b7 --- /dev/null +++ b/deps/v8/build/android/gyp/dist_aar.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python +# +# Copyright 2017 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Creates an Android .aar file.""" + +import argparse +import os +import posixpath +import shutil +import sys +import tempfile +import zipfile + +from util import build_utils + + +_ANDROID_BUILD_DIR = os.path.dirname(os.path.dirname(__file__)) + + +def _MergeRTxt(r_paths): + """Merging the given R.txt files and returns them as a string.""" + all_lines = set() + for r_path in r_paths: + with open(r_path) as f: + all_lines.update(f.readlines()) + return ''.join(sorted(all_lines)) + + +def _MergeProguardConfigs(proguard_configs): + """Merging the given proguard config files and returns them as a string.""" + ret = [] + for config in proguard_configs: + ret.append('# FROM: {}'.format(config)) + with open(config) as f: + ret.append(f.read()) + return '\n'.join(ret) + + +def _AddResources(aar_zip, resource_zips): + """Adds all resource zips to the given aar_zip. + + Ensures all res/values/* files have unique names by prefixing them. + """ + for i, path in enumerate(resource_zips): + with zipfile.ZipFile(path) as res_zip: + for info in res_zip.infolist(): + data = res_zip.read(info) + dirname, basename = posixpath.split(info.filename) + if 'values' in dirname: + basename = '{}_{}'.format(basename, i) + info.filename = posixpath.join(dirname, basename) + info.filename = posixpath.join('res', info.filename) + aar_zip.writestr(info, data) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = argparse.ArgumentParser() + build_utils.AddDepfileOption(parser) + parser.add_argument('--output', required=True, help='Path to output aar.') + parser.add_argument('--jars', required=True, help='GN list of jar inputs.') + parser.add_argument('--dependencies-res-zips', required=True, + help='GN list of resource zips') + parser.add_argument('--r-text-files', required=True, + help='GN list of R.txt files to merge') + parser.add_argument('--proguard-configs', required=True, + help='GN list of ProGuard flag files to merge.') + parser.add_argument( + '--android-manifest', + help='Path to AndroidManifest.xml to include.', + default=os.path.join(_ANDROID_BUILD_DIR, 'AndroidManifest.xml')) + parser.add_argument('--native-libraries', default='', + help='GN list of native libraries. If non-empty then ' + 'ABI must be specified.') + parser.add_argument('--abi', + help='ABI (e.g. armeabi-v7a) for native libraries.') + + options = parser.parse_args(args) + + if options.native_libraries and not options.abi: + parser.error('You must provide --abi if you have native libs') + + options.jars = build_utils.ParseGnList(options.jars) + options.dependencies_res_zips = build_utils.ParseGnList( + options.dependencies_res_zips) + options.r_text_files = build_utils.ParseGnList(options.r_text_files) + options.proguard_configs = build_utils.ParseGnList(options.proguard_configs) + options.native_libraries = build_utils.ParseGnList(options.native_libraries) + + with tempfile.NamedTemporaryFile(delete=False) as staging_file: + try: + with zipfile.ZipFile(staging_file.name, 'w') as z: + build_utils.AddToZipHermetic( + z, 'AndroidManifest.xml', src_path=options.android_manifest) + + with tempfile.NamedTemporaryFile() as jar_file: + build_utils.MergeZips(jar_file.name, options.jars) + build_utils.AddToZipHermetic(z, 'classes.jar', src_path=jar_file.name) + + build_utils.AddToZipHermetic( + z, 'R.txt', data=_MergeRTxt(options.r_text_files)) + build_utils.AddToZipHermetic(z, 'public.txt', data='') + + if options.proguard_configs: + build_utils.AddToZipHermetic( + z, 'proguard.txt', + data=_MergeProguardConfigs(options.proguard_configs)) + + _AddResources(z, options.dependencies_res_zips) + + for native_library in options.native_libraries: + libname = os.path.basename(native_library) + build_utils.AddToZipHermetic( + z, os.path.join('jni', options.abi, libname), + src_path=native_library) + except: + os.unlink(staging_file.name) + raise + shutil.move(staging_file.name, options.output) + + if options.depfile: + all_inputs = (options.jars + options.dependencies_res_zips + + options.r_text_files + options.proguard_configs) + build_utils.WriteDepfile(options.depfile, options.output, all_inputs, + add_pydeps=False) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/deps/v8/build/android/gyp/dist_aar.pydeps b/deps/v8/build/android/gyp/dist_aar.pydeps new file mode 100644 index 0000000000..da5ea8da23 --- /dev/null +++ b/deps/v8/build/android/gyp/dist_aar.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/dist_aar.pydeps build/android/gyp/dist_aar.py +../../gn_helpers.py +dist_aar.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/emma_instr.py b/deps/v8/build/android/gyp/emma_instr.py new file mode 100755 index 0000000000..cbe913eb26 --- /dev/null +++ b/deps/v8/build/android/gyp/emma_instr.py @@ -0,0 +1,271 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Instruments classes and jar files. + +This script corresponds to the 'emma_instr' action in the java build process. +Depending on whether emma_instrument is set, the 'emma_instr' action will either +call the instrument command or the copy command. + +Possible commands are: +- instrument_jar: Accepts a jar and instruments it using emma.jar. +- copy: Called when EMMA coverage is not enabled. This allows us to make + this a required step without necessarily instrumenting on every build. + Also removes any stale coverage files. +""" + +import collections +import json +import optparse +import os +import shutil +import sys +import tempfile + +from util import build_utils + + +def _AddCommonOptions(option_parser): + """Adds common options to |option_parser|.""" + option_parser.add_option('--input-path', + help=('Path to input file(s). Either the classes ' + 'directory, or the path to a jar.')) + option_parser.add_option('--output-path', + help=('Path to output final file(s) to. Either the ' + 'final classes directory, or the directory in ' + 'which to place the instrumented/copied jar.')) + option_parser.add_option('--coverage-file', + help='File to create with coverage metadata.') + option_parser.add_option('--sources-list-file', + help='File to create with the list of sources.') + + +def _AddInstrumentOptions(option_parser): + """Adds options related to instrumentation to |option_parser|.""" + _AddCommonOptions(option_parser) + option_parser.add_option('--source-dirs', + help='Space separated list of source directories. ' + 'source-files should not be specified if ' + 'source-dirs is specified') + option_parser.add_option('--source-files', + help='Space separated list of source files. ' + 'source-dirs should not be specified if ' + 'source-files is specified') + option_parser.add_option('--java-sources-file', + help='File containing newline-separated .java paths') + option_parser.add_option('--src-root', + help='Root of the src repository.') + option_parser.add_option('--emma-jar', + help='Path to emma.jar.') + option_parser.add_option( + '--filter-string', default='', + help=('Filter string consisting of a list of inclusion/exclusion ' + 'patterns separated with whitespace and/or comma.')) + + +def _RunCopyCommand(_command, options, _, option_parser): + """Copies the jar from input to output locations. + + Also removes any old coverage/sources file. + + Args: + command: String indicating the command that was received to trigger + this function. + options: optparse options dictionary. + args: List of extra args from optparse. + option_parser: optparse.OptionParser object. + + Returns: + An exit code. + """ + if not (options.input_path and options.output_path and + options.coverage_file and options.sources_list_file): + option_parser.error('All arguments are required.') + + if os.path.exists(options.coverage_file): + os.remove(options.coverage_file) + if os.path.exists(options.sources_list_file): + os.remove(options.sources_list_file) + + shutil.copy(options.input_path, options.output_path) + + +def _GetSourceDirsFromSourceFiles(source_files): + """Returns list of directories for the files in |source_files|. + + Args: + source_files: List of source files. + + Returns: + List of source directories. + """ + return list(set(os.path.dirname(source_file) for source_file in source_files)) + + +def _CreateSourcesListFile(source_dirs, sources_list_file, src_root): + """Adds all normalized source directories to |sources_list_file|. + + Args: + source_dirs: List of source directories. + sources_list_file: File into which to write the JSON list of sources. + src_root: Root which sources added to the file should be relative to. + + Returns: + An exit code. + """ + src_root = os.path.abspath(src_root) + relative_sources = [] + for s in source_dirs: + abs_source = os.path.abspath(s) + if abs_source[:len(src_root)] != src_root: + print ('Error: found source directory not under repository root: %s %s' + % (abs_source, src_root)) + return 1 + rel_source = os.path.relpath(abs_source, src_root) + + relative_sources.append(rel_source) + + with open(sources_list_file, 'w') as f: + json.dump(relative_sources, f) + + +def _RunInstrumentCommand(_command, options, _, option_parser): + """Instruments jar files using EMMA. + + Args: + command: String indicating the command that was received to trigger + this function. + options: optparse options dictionary. + args: List of extra args from optparse. + option_parser: optparse.OptionParser object. + + Returns: + An exit code. + """ + if not (options.input_path and options.output_path and + options.coverage_file and options.sources_list_file and + (options.source_files or options.source_dirs or + options.java_sources_file) and + options.src_root and options.emma_jar): + option_parser.error('All arguments are required.') + + if os.path.exists(options.coverage_file): + os.remove(options.coverage_file) + temp_dir = tempfile.mkdtemp() + try: + cmd = ['java', '-cp', options.emma_jar, + 'emma', 'instr', + '-ip', options.input_path, + '-ix', options.filter_string, + '-d', temp_dir, + '-out', options.coverage_file, + '-m', 'fullcopy'] + build_utils.CheckOutput(cmd) + + # File is not generated when filter_string doesn't match any files. + if not os.path.exists(options.coverage_file): + build_utils.Touch(options.coverage_file) + + temp_jar_dir = os.path.join(temp_dir, 'lib') + jars = os.listdir(temp_jar_dir) + if len(jars) != 1: + print('Error: multiple output files in: %s' % (temp_jar_dir)) + return 1 + + # Delete output_path first to avoid modifying input_path in the case where + # input_path is a hardlink to output_path. http://crbug.com/571642 + if os.path.exists(options.output_path): + os.unlink(options.output_path) + shutil.move(os.path.join(temp_jar_dir, jars[0]), options.output_path) + finally: + shutil.rmtree(temp_dir) + + if options.source_dirs: + source_dirs = build_utils.ParseGnList(options.source_dirs) + else: + source_files = [] + if options.source_files: + source_files += build_utils.ParseGnList(options.source_files) + if options.java_sources_file: + source_files.extend( + build_utils.ReadSourcesList(options.java_sources_file)) + source_dirs = _GetSourceDirsFromSourceFiles(source_files) + + # TODO(GYP): In GN, we are passed the list of sources, detecting source + # directories, then walking them to re-establish the list of sources. + # This can obviously be simplified! + _CreateSourcesListFile(source_dirs, options.sources_list_file, + options.src_root) + + return 0 + + +CommandFunctionTuple = collections.namedtuple( + 'CommandFunctionTuple', ['add_options_func', 'run_command_func']) +VALID_COMMANDS = { + 'copy': CommandFunctionTuple(_AddCommonOptions, + _RunCopyCommand), + 'instrument_jar': CommandFunctionTuple(_AddInstrumentOptions, + _RunInstrumentCommand), +} + + +class CommandOptionParser(optparse.OptionParser): + """Wrapper class for OptionParser to help with listing commands.""" + + def __init__(self, *args, **kwargs): + """Creates a CommandOptionParser. + + Args: + commands_dict: A dictionary mapping command strings to an object defining + - add_options_func: Adds options to the option parser + - run_command_func: Runs the command itself. + example: An example command. + everything else: Passed to optparse.OptionParser contructor. + """ + self.commands_dict = kwargs.pop('commands_dict', {}) + self.example = kwargs.pop('example', '') + if not 'usage' in kwargs: + kwargs['usage'] = 'Usage: %prog <command> [options]' + optparse.OptionParser.__init__(self, *args, **kwargs) + + #override + def get_usage(self): + normal_usage = optparse.OptionParser.get_usage(self) + command_list = self.get_command_list() + example = self.get_example() + return self.expand_prog_name(normal_usage + example + command_list) + + #override + def get_command_list(self): + if self.commands_dict.keys(): + return '\nCommands:\n %s\n' % '\n '.join( + sorted(self.commands_dict.keys())) + return '' + + def get_example(self): + if self.example: + return '\nExample:\n %s\n' % self.example + return '' + + +def main(): + option_parser = CommandOptionParser(commands_dict=VALID_COMMANDS) + argv = sys.argv + + if len(argv) < 2 or argv[1] not in option_parser.commands_dict: + # Parse args first, if this is '--help', optparse will print help and exit + option_parser.parse_args(argv) + option_parser.error('Invalid command.') + + cmd = option_parser.commands_dict[argv[1]] + cmd.add_options_func(option_parser) + options, args = option_parser.parse_args(argv) + return cmd.run_command_func(argv[1], options, args, option_parser) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/deps/v8/build/android/gyp/emma_instr.pydeps b/deps/v8/build/android/gyp/emma_instr.pydeps new file mode 100644 index 0000000000..88f752a0f9 --- /dev/null +++ b/deps/v8/build/android/gyp/emma_instr.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/emma_instr.pydeps build/android/gyp/emma_instr.py +../../gn_helpers.py +emma_instr.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/extract_unwind_tables.py b/deps/v8/build/android/gyp/extract_unwind_tables.py new file mode 100755 index 0000000000..37a8421449 --- /dev/null +++ b/deps/v8/build/android/gyp/extract_unwind_tables.py @@ -0,0 +1,288 @@ +#!/usr/bin/env python +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Extracts the unwind tables in from breakpad symbol files + +Runs dump_syms on the given binary file and extracts the CFI data into the +given output file. +The output file is a binary file containing CFI rows ordered based on function +address. The output file only contains rows that match the most popular rule +type in CFI table, to reduce the output size and specify data in compact format. +See doc https://github.com/google/breakpad/blob/master/docs/symbol_files.md. +1. The CFA rules should be of postfix form "SP <val> +". +2. The RA rules should be of postfix form "CFA <val> + ^". +Note: breakpad represents dereferencing address with '^' operator. + +The output file has 2 tables UNW_INDEX and UNW_DATA, inspired from ARM EHABI +format. The first table contains function addresses and an index into the +UNW_DATA table. The second table contains one or more rows for the function +unwind information. + +The output file starts with 4 bytes counting the size of UNW_INDEX in bytes. +Then UNW_INDEX table and UNW_DATA table. + +UNW_INDEX contains two columns of N rows each, where N is the number of +functions. + 1. First column 4 byte rows of all the function start address as offset from + start of the binary, in sorted order. + 2. For each function addr, the second column contains 2 byte indices in order. + The indices are offsets (in count of 2 bytes) of the CFI data from start of + UNW_DATA. +The last entry in the table always contains CANT_UNWIND index to specify the +end address of the last function. + +UNW_DATA contains data of all the functions. Each function data contains N rows. +The data found at the address pointed from UNW_INDEX will be: + 2 bytes: N - number of rows that belong to current function. + N * 4 bytes: N rows of data. 16 bits : Address offset from function start. + 14 bits : CFA offset / 4. + 2 bits : RA offset / 4. + +The function is not added to the unwind table in following conditions: +C1. If length of the function code (number of instructions) is greater than + 0xFFFF (2 byte address span). This is because we use 16 bits to refer to + offset of instruction from start of the address. +C2. If the function moves the SP by more than 0xFFFF bytes. This is because we + use 14 bits to denote CFA offset (last 2 bits are 0). +C3. If the Return Address is stored at an offset >= 16 from the CFA. Some + functions which have variable arguments can have offset upto 16. + TODO(ssid): We can actually store offset 16 by subtracting 1 from RA/4 since + we never have 0. +C4: Some functions do not have unwind information defined in dwarf info. These + functions have index value CANT_UNWIND(0xFFFF) in UNW_INDEX table. + + +Usage: + extract_unwind_tables.py --input_path [root path to unstripped chrome.so] + --output_path [output path] --dump_syms_path [path to dump_syms binary] +""" + +import argparse +import re +import struct +import subprocess +import sys +import tempfile + + +_CFA_REG = '.cfa' +_RA_REG = '.ra' + +_ADDR_ENTRY = 0 +_LENGTH_ENTRY = 1 + +_CANT_UNWIND = 0xFFFF + + +def _Write4Bytes(output_file, val): + """Writes a 32 bit unsigned integer to the given output file.""" + output_file.write(struct.pack('<L', val)); + + +def _Write2Bytes(output_file, val): + """Writes a 16 bit unsigned integer to the given output file.""" + output_file.write(struct.pack('<H', val)); + + +def _FindRuleForRegister(cfi_row, reg): + """Returns the postfix expression as string for a given register. + + Breakpad CFI row format specifies rules for unwinding each register in postfix + expression form separated by space. Each rule starts with register name and a + colon. Eg: "CFI R1: <rule> R2: <rule>". + """ + out = [] + found_register = False + for part in cfi_row: + if found_register: + if part[-1] == ':': + break + out.append(part) + elif part == reg + ':': + found_register = True + return ' '.join(out) + + +def _GetCfaAndRaOffset(cfi_row): + """Returns a tuple with 2 numbers (cfa_offset, ra_offset). + + Returns right values if rule matches the predefined criteria. Returns (0, 0) + otherwise. The criteria for CFA rule is postfix form "SP <val> +" and RA rule + is postfix form "CFA -<val> + ^". + """ + cfa_offset = 0 + ra_offset = 0 + cfa_rule = _FindRuleForRegister(cfi_row, _CFA_REG) + ra_rule = _FindRuleForRegister(cfi_row, _RA_REG) + if cfa_rule and re.match(r'sp [0-9]+ \+', cfa_rule): + cfa_offset = int(cfa_rule.split()[1], 10) + if ra_rule: + if not re.match(r'.cfa -[0-9]+ \+ \^', ra_rule): + return (0, 0) + ra_offset = -1 * int(ra_rule.split()[1], 10) + return (cfa_offset, ra_offset) + + +def _GetAllCfiRows(symbol_file): + """Returns parsed CFI data from given symbol_file. + + Each entry in the cfi data dictionary returned is a map from function start + address to array of function rows, starting with FUNCTION type, followed by + one or more CFI rows. + """ + cfi_data = {} + current_func = [] + for line in symbol_file: + if 'STACK CFI' not in line: + continue + + parts = line.split() + data = {} + if parts[2] == 'INIT': + # Add the previous function to the output + if len(current_func) > 1: + cfi_data[current_func[0][_ADDR_ENTRY]] = current_func + current_func = [] + + # The function line is of format "STACK CFI INIT <addr> <length> ..." + data[_ADDR_ENTRY] = int(parts[3], 16) + data[_LENGTH_ENTRY] = int(parts[4], 16) + + # Condition C1: Skip if length is large. + if data[_LENGTH_ENTRY] == 0 or data[_LENGTH_ENTRY] > 0xffff: + continue # Skip the current function. + else: + # The current function is skipped. + if len(current_func) == 0: + continue + + # The CFI row is of format "STACK CFI <addr> .cfa: <expr> .ra: <expr> ..." + data[_ADDR_ENTRY] = int(parts[2], 16) + (data[_CFA_REG], data[_RA_REG]) = _GetCfaAndRaOffset(parts) + + # Condition C2 and C3: Skip based on limits on offsets. + if data[_CFA_REG] == 0 or data[_RA_REG] >= 16 or data[_CFA_REG] > 0xffff: + current_func = [] + continue + assert data[_CFA_REG] % 4 == 0 + # Since we skipped functions with code size larger than 0xffff, we should + # have no function offset larger than the same value. + assert data[_ADDR_ENTRY] - current_func[0][_ADDR_ENTRY] < 0xffff + + if data[_ADDR_ENTRY] == 0: + # Skip current function, delete all previous entries. + current_func = [] + continue + assert data[_ADDR_ENTRY] % 2 == 0 + current_func.append(data) + + # Condition C4: Skip function without CFI rows. + if len(current_func) > 1: + cfi_data[current_func[0][_ADDR_ENTRY]] = current_func + return cfi_data + + +def _WriteCfiData(cfi_data, out_file): + """Writes the CFI data in defined format to out_file.""" + # Stores the final data that will be written to UNW_DATA table, in order + # with 2 byte items. + unw_data = [] + + # Represent all the CFI data of functions as set of numbers and map them to an + # index in the |unw_data|. This index is later written to the UNW_INDEX table + # for each function. This map is used to find index of the data for functions. + data_to_index = {} + # Store mapping between the functions to the index. + func_addr_to_index = {} + previous_func_end = 0 + for addr, function in sorted(cfi_data.iteritems()): + # Add an empty function entry when functions CFIs are missing between 2 + # functions. + if previous_func_end != 0 and addr - previous_func_end > 4: + func_addr_to_index[previous_func_end + 2] = _CANT_UNWIND + previous_func_end = addr + cfi_data[addr][0][_LENGTH_ENTRY] + + assert len(function) > 1 + func_data_arr = [] + func_data = 0 + # The first row contains the function address and length. The rest of the + # rows have CFI data. Create function data array as given in the format. + for row in function[1:]: + addr_offset = row[_ADDR_ENTRY] - addr + cfa_offset = (row[_CFA_REG]) | (row[_RA_REG] / 4) + + func_data_arr.append(addr_offset) + func_data_arr.append(cfa_offset) + + # Consider all the rows in the data as one large integer and add it as a key + # to the |data_to_index|. + for data in func_data_arr: + func_data = (func_data << 16) | data + + row_count = len(func_data_arr) / 2 + if func_data not in data_to_index: + # When data is not found, create a new index = len(unw_data), and write + # the data to |unw_data|. + index = len(unw_data) + data_to_index[func_data] = index + unw_data.append(row_count) + for row in func_data_arr: + unw_data.append(row) + else: + # If the data was found, then use the same index for the function. + index = data_to_index[func_data] + assert row_count == unw_data[index] + func_addr_to_index[addr] = data_to_index[func_data] + + # Mark the end end of last function entry. + func_addr_to_index[previous_func_end + 2] = _CANT_UNWIND + + # Write the size of UNW_INDEX file in bytes. + _Write4Bytes(out_file, len(func_addr_to_index) * 6) + + # Write the UNW_INDEX table. First list of addresses and then indices. + sorted_unw_index = sorted(func_addr_to_index.iteritems()) + for addr, index in sorted_unw_index: + _Write4Bytes(out_file, addr) + for addr, index in sorted_unw_index: + _Write2Bytes(out_file, index) + + # Write the UNW_DATA table. + for data in unw_data: + _Write2Bytes(out_file, data) + + +def _ParseCfiData(sym_file, output_path): + with open(sym_file, 'r') as f: + cfi_data = _GetAllCfiRows(f) + + with open(output_path, 'wb') as out_file: + _WriteCfiData(cfi_data, out_file) + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + '--input_path', required=True, + help='The input path of the unstripped binary') + parser.add_argument( + '--output_path', required=True, + help='The path of the output file') + parser.add_argument( + '--dump_syms_path', required=True, + help='The path of the dump_syms binary') + + args = parser.parse_args() + + with tempfile.NamedTemporaryFile() as sym_file: + out = subprocess.call( + ['./' +args.dump_syms_path, args.input_path], stdout=sym_file) + assert not out + sym_file.flush() + _ParseCfiData(sym_file.name, args.output_path) + return 0 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/deps/v8/build/android/gyp/extract_unwind_tables_tests.py b/deps/v8/build/android/gyp/extract_unwind_tables_tests.py new file mode 100755 index 0000000000..02c70eb049 --- /dev/null +++ b/deps/v8/build/android/gyp/extract_unwind_tables_tests.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Tests for extract_unwind_tables.py + +This test suite contains various tests for extracting CFI tables from breakpad +symbol files. +""" + +import optparse +import os +import struct +import sys +import tempfile +import unittest + +import extract_unwind_tables + +sys.path.append(os.path.join(os.path.dirname(__file__), "gyp")) +from util import build_utils + + +class TestExtractUnwindTables(unittest.TestCase): + def testExtractCfi(self): + with tempfile.NamedTemporaryFile() as input_file, \ + tempfile.NamedTemporaryFile() as output_file: + input_file.write(""" +MODULE Linux arm CDE12FE1DF2B37A9C6560B4CBEE056420 lib_chrome.so +INFO CODE_ID E12FE1CD2BDFA937C6560B4CBEE05642 +FILE 0 ../../base/allocator/allocator_check.cc +FILE 1 ../../base/allocator/allocator_extension.cc +FILE 2 ../../base/allocator/allocator_shim.cc +FUNC 1adcb60 54 0 i2d_name_canon +1adcb60 1a 509 17054 +3b94c70 2 69 40 +PUBLIC e17001 0 assist_ranker::(anonymous namespace)::FakePredict::Initialize() +PUBLIC e17005 0 (anonymous namespace)::FileDeleter(base::File) +STACK CFI INIT e17000 4 .cfa: sp 0 + .ra: lr +STACK CFI INIT 0 4 .cfa: sp 0 + .ra: lr +STACK CFI 2 .cfa: sp 4 + +STACK CFI 4 .cfa: sp 12 + .ra: .cfa -8 + ^ r7: .cfa -12 + ^ +STACK CFI 6 .cfa: sp 16 + +STACK CFI INIT e1a96e 20 .cfa: sp 0 + .ra: lr +STACK CFI e1a970 .cfa: sp 4 + +STACK CFI e1a972 .cfa: sp 12 + .ra: .cfa -8 + ^ r7: .cfa -12 + ^ +STACK CFI e1a974 .cfa: sp 16 + +STACK CFI INIT e1a1e4 b0 .cfa: sp 0 + .ra: lr +STACK CFI e1a1e6 .cfa: sp 16 + .ra: .cfa -4 + ^ r4: .cfa -16 + ^ r5: .cfa -12 + +STACK CFI e1a1e8 .cfa: sp 80 + +STACK CFI INIT 0 4 .cfa: sp 0 + .ra: lr +STACK CFI INIT 3b92e24 3c .cfa: sp 0 + .ra: lr +STACK CFI 3b92e4c .cfa: sp 16 + .ra: .cfa -12 + ^ +STACK CFI INIT e17004 0 .cfa: sp 0 + .ra: lr +STACK CFI e17004 2 .cfa: sp 0 + .ra: lr +STACK CFI INIT 3b92e70 38 .cfa: sp 0 + .ra: lr +STACK CFI 3b92e74 .cfa: sp 8 + .ra: .cfa -4 + ^ r4: .cfa -8 + ^ +STACK CFI 3b92e90 .cfa: sp 0 + .ra: .ra r4: r4 +STACK CFI INIT 3b93114 6c .cfa: sp 0 + .ra: lr +STACK CFI 3b93118 .cfa: r7 16 + .ra: .cfa -4 + ^ +STACK CFI INIT 3b92114 6c .cfa: sp 0 + .ra: lr +STACK CFI 3b92118 .cfa: r7 16 + .ra: .cfa -20 + ^ +STACK CFI INIT 3b93214 fffff .cfa: sp 0 + .ra: lr +STACK CFI 3b93218 .cfa: r7 16 + .ra: .cfa -4 + ^ +""") + input_file.flush() + extract_unwind_tables._ParseCfiData(input_file.name, output_file.name) + + expected_cfi_data = { + 0xe1a1e4 : [0x2, 0x11, 0x4, 0x50], + 0xe1a296 : [], + 0xe1a96e : [0x2, 0x4, 0x4, 0xe, 0x6, 0x10], + 0xe1a990 : [], + 0x3b92e24: [0x28, 0x13], + 0x3b92e62: [], + } + expected_function_count = len(expected_cfi_data) + + actual_output = [] + with open(output_file.name, 'rb') as f: + while True: + read = f.read(2) + if not read: + break + actual_output.append(struct.unpack('H', read)[0]) + + # First value is size of unw_index table. + unw_index_size = actual_output[1] << 16 | actual_output[0] + # Each function index is 6 bytes data. + self.assertEqual(expected_function_count * 6, unw_index_size) + # |actual_output| is in blocks of 2 bytes. Skip first 4 bytes representing + # size. + unw_index_start = 2 + unw_index_addr_end = unw_index_start + expected_function_count * 2 + unw_index_end = unw_index_addr_end + expected_function_count + unw_index_addr_col = actual_output[unw_index_start : unw_index_addr_end] + unw_index_index_col = actual_output[unw_index_addr_end : unw_index_end] + + unw_data_start = unw_index_end + unw_data = actual_output[unw_data_start:] + + for func_iter in range(0, expected_function_count): + func_addr = (unw_index_addr_col[func_iter * 2 + 1] << 16 | + unw_index_addr_col[func_iter * 2]) + index = unw_index_index_col[func_iter] + # If index is CANT_UNWIND then invalid function. + if index == 0xFFFF: + self.assertEqual(expected_cfi_data[func_addr], []) + continue + + func_start = index + 1 + func_end = func_start + unw_data[index] * 2 + self.assertEquals( + len(expected_cfi_data[func_addr]), func_end - func_start) + func_cfi = unw_data[func_start : func_end] + self.assertEqual(expected_cfi_data[func_addr], func_cfi) + + +if __name__ == '__main__': + unittest.main() diff --git a/deps/v8/build/android/gyp/filter_zip.py b/deps/v8/build/android/gyp/filter_zip.py new file mode 100755 index 0000000000..2182042df5 --- /dev/null +++ b/deps/v8/build/android/gyp/filter_zip.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python +# +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import sys + +from util import build_utils + + +_RESOURCE_CLASSES = [ + "R.class", + "R##*.class", + "Manifest.class", + "Manifest##*.class", +] + + +def _CreatePathTransform(exclude_globs, include_globs, + strip_resource_classes_for): + exclude_globs = list(exclude_globs or []) + if strip_resource_classes_for: + exclude_globs.extend(p.replace('.', '/') + '/' + f + for p in strip_resource_classes_for + for f in _RESOURCE_CLASSES) + def path_transform(path): + # Exclude filters take precidence over include filters. + if build_utils.MatchesGlob(path, exclude_globs): + return None + if include_globs and not build_utils.MatchesGlob(path, include_globs): + return None + return path + + return path_transform + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--input', required=True, + help='Input zip file.') + parser.add_argument('--output', required=True, + help='Output zip file') + parser.add_argument('--exclude-globs', + help='GN list of exclude globs') + parser.add_argument('--include-globs', + help='GN list of include globs') + parser.add_argument('--strip-resource-classes-for', + help='GN list of java package names exclude R.class files in.') + + argv = build_utils.ExpandFileArgs(sys.argv[1:]) + args = parser.parse_args(argv) + + if args.exclude_globs: + args.exclude_globs = build_utils.ParseGnList(args.exclude_globs) + if args.include_globs: + args.include_globs= build_utils.ParseGnList(args.include_globs) + if args.strip_resource_classes_for: + args.strip_resource_classes_for = build_utils.ParseGnList( + args.strip_resource_classes_for) + + path_transform = _CreatePathTransform( + args.exclude_globs, args.include_globs, args.strip_resource_classes_for) + with build_utils.AtomicOutput(args.output) as f: + build_utils.MergeZips( + f.name, [args.input], path_transform=path_transform) + + +if __name__ == '__main__': + main() diff --git a/deps/v8/build/android/gyp/filter_zip.pydeps b/deps/v8/build/android/gyp/filter_zip.pydeps new file mode 100644 index 0000000000..67c989cf88 --- /dev/null +++ b/deps/v8/build/android/gyp/filter_zip.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/filter_zip.pydeps build/android/gyp/filter_zip.py +../../gn_helpers.py +filter_zip.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/finalize_apk.py b/deps/v8/build/android/gyp/finalize_apk.py new file mode 100644 index 0000000000..2440fe40a1 --- /dev/null +++ b/deps/v8/build/android/gyp/finalize_apk.py @@ -0,0 +1,32 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Signs and aligns an APK.""" + +import argparse +import shutil +import subprocess +import tempfile + + +def FinalizeApk(apksigner_path, zipalign_path, unsigned_apk_path, + final_apk_path, key_path, key_passwd, key_name): + # Use a tempfile so that Ctrl-C does not leave the file with a fresh mtime + # and a corrupted state. + with tempfile.NamedTemporaryFile() as staging_file: + # v2 signing requires that zipalign happen first. + subprocess.check_output([ + zipalign_path, '-p', '-f', '4', + unsigned_apk_path, staging_file.name]) + subprocess.check_output([ + apksigner_path, 'sign', + '--in', staging_file.name, + '--out', staging_file.name, + '--ks', key_path, + '--ks-key-alias', key_name, + '--ks-pass', 'pass:' + key_passwd, + # Force SHA-1 (makes signing faster; insecure is fine for local builds). + '--min-sdk-version', '1', + ]) + shutil.move(staging_file.name, final_apk_path) + staging_file.delete = False diff --git a/deps/v8/build/android/gyp/find.py b/deps/v8/build/android/gyp/find.py new file mode 100755 index 0000000000..a9f1d49855 --- /dev/null +++ b/deps/v8/build/android/gyp/find.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Finds files in directories. +""" + +import fnmatch +import optparse +import os +import sys + + +def main(argv): + parser = optparse.OptionParser() + parser.add_option('--pattern', default='*', help='File pattern to match.') + options, directories = parser.parse_args(argv) + + for d in directories: + if not os.path.exists(d): + print >> sys.stderr, '%s does not exist' % d + return 1 + for root, _, filenames in os.walk(d): + for f in fnmatch.filter(filenames, options.pattern): + print os.path.join(root, f) + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/deps/v8/build/android/gyp/find_sun_tools_jar.py b/deps/v8/build/android/gyp/find_sun_tools_jar.py new file mode 100755 index 0000000000..7cd4c33984 --- /dev/null +++ b/deps/v8/build/android/gyp/find_sun_tools_jar.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""This finds the java distribution's tools.jar and copies it somewhere. +""" + +import argparse +import os +import re +import shutil +import sys + +from util import build_utils + +RT_JAR_FINDER = re.compile(r'\[Opened (.*)/jre/lib/rt.jar\]') + +def main(): + parser = argparse.ArgumentParser(description='Find Sun Tools Jar') + parser.add_argument('--depfile', + help='Path to depfile. This must be specified as the ' + 'action\'s first output.') + parser.add_argument('--output', required=True) + args = parser.parse_args() + + sun_tools_jar_path = FindSunToolsJarPath() + + if sun_tools_jar_path is None: + raise Exception("Couldn\'t find tools.jar") + + # Using copyfile instead of copy() because copy() calls copymode() + # We don't want the locked mode because we may copy over this file again + shutil.copyfile(sun_tools_jar_path, args.output) + + if args.depfile: + build_utils.WriteDepfile(args.depfile, args.output, [sun_tools_jar_path]) + + +def FindSunToolsJarPath(): + # This works with at least openjdk 1.6, 1.7 and sun java 1.6, 1.7 + stdout = build_utils.CheckOutput( + ["java", "-verbose", "-version"], print_stderr=False) + for ln in stdout.splitlines(): + match = RT_JAR_FINDER.match(ln) + if match: + return os.path.join(match.group(1), 'lib', 'tools.jar') + + return None + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/deps/v8/build/android/gyp/gcc_preprocess.py b/deps/v8/build/android/gyp/gcc_preprocess.py new file mode 100755 index 0000000000..8b3444c2b0 --- /dev/null +++ b/deps/v8/build/android/gyp/gcc_preprocess.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import optparse +import os +import sys + +from util import build_utils + +def DoGcc(options): + build_utils.MakeDirectory(os.path.dirname(options.output)) + + gcc_cmd = [ 'gcc' ] # invoke host gcc. + if options.defines: + gcc_cmd.extend(sum(map(lambda w: ['-D', w], options.defines), [])) + + with build_utils.AtomicOutput(options.output) as f: + gcc_cmd.extend([ + '-E', # stop after preprocessing. + '-D', 'ANDROID', # Specify ANDROID define for pre-processor. + '-x', 'c-header', # treat sources as C header files + '-P', # disable line markers, i.e. '#line 309' + '-I', options.include_path, + '-o', f.name, + options.template + ]) + + build_utils.CheckOutput(gcc_cmd) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--include-path', help='Include path for gcc.') + parser.add_option('--template', help='Path to template.') + parser.add_option('--output', help='Path for generated file.') + parser.add_option('--defines', help='Pre-defines macros', action='append') + + options, _ = parser.parse_args(args) + + DoGcc(options) + + if options.depfile: + build_utils.WriteDepfile(options.depfile, options.output, add_pydeps=False) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/deps/v8/build/android/gyp/gcc_preprocess.pydeps b/deps/v8/build/android/gyp/gcc_preprocess.pydeps new file mode 100644 index 0000000000..64e776b633 --- /dev/null +++ b/deps/v8/build/android/gyp/gcc_preprocess.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/gcc_preprocess.pydeps build/android/gyp/gcc_preprocess.py +../../gn_helpers.py +gcc_preprocess.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/generate_android_wrapper.py b/deps/v8/build/android/gyp/generate_android_wrapper.py new file mode 100755 index 0000000000..f8e1815324 --- /dev/null +++ b/deps/v8/build/android/gyp/generate_android_wrapper.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +# Copyright 2019 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import re +import sys + +from util import build_utils + +sys.path.append( + os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', '..', 'util'))) + +import generate_wrapper + +_WRAPPED_PATH_LIST_RE = re.compile(r'@WrappedPathList\(([^,]+), ([^)]+)\)') + + +def ExpandWrappedPathLists(args): + expanded_args = [] + for arg in args: + m = _WRAPPED_PATH_LIST_RE.match(arg) + if m: + for p in build_utils.ParseGnList(m.group(2)): + expanded_args.extend([m.group(1), '@WrappedPath(%s)' % p]) + else: + expanded_args.append(arg) + return expanded_args + + +def main(raw_args): + parser = generate_wrapper.CreateArgumentParser() + expanded_raw_args = build_utils.ExpandFileArgs(raw_args) + expanded_raw_args = ExpandWrappedPathLists(expanded_raw_args) + args = parser.parse_args(expanded_raw_args) + return generate_wrapper.Wrap(args) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/deps/v8/build/android/gyp/generate_linker_version_script.py b/deps/v8/build/android/gyp/generate_linker_version_script.py new file mode 100755 index 0000000000..34c72eb818 --- /dev/null +++ b/deps/v8/build/android/gyp/generate_linker_version_script.py @@ -0,0 +1,72 @@ +#!/usr/bin/env vpython +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Generate linker version scripts for Chrome on Android shared libraries.""" + +import argparse +import os + +from util import build_utils + +_SCRIPT_HEADER = """\ +# AUTO-GENERATED FILE. DO NOT MODIFY. +# +# See: %s + +{ + global: +""" % os.path.relpath(__file__, build_utils.DIR_SOURCE_ROOT) + +_SCRIPT_FOOTER = """\ + local: + *; +}; +""" + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + '--output', + required=True, + help='Path to output linker version script file.') + parser.add_argument( + '--export-java-symbols', + action='store_true', + help='Export Java_* JNI methods') + parser.add_argument( + '--export-symbol-whitelist-file', + help='Path to input file containing whitelist of extra ' + 'symbols to export. One symbol per line.') + options = parser.parse_args() + + # JNI_OnLoad is always exported. + # CrashpadHandlerMain() is the entry point to the Crashpad handler, required + # for libcrashpad_handler_trampoline.so. + symbol_list = ['CrashpadHandlerMain', 'JNI_OnLoad'] + + if options.export_java_symbols: + symbol_list.append('Java_*') + + if options.export_symbol_whitelist_file: + with open(options.export_symbol_whitelist_file, 'rt') as f: + for line in f: + line = line.strip() + if not line or line[0] == '#': + continue + symbol_list.append(line) + + script_content = [_SCRIPT_HEADER] + for symbol in symbol_list: + script_content.append(' %s;\n' % symbol) + script_content.append(_SCRIPT_FOOTER) + + script = ''.join(script_content) + + with build_utils.AtomicOutput(options.output) as f: + f.write(script) + + +if __name__ == '__main__': + main() diff --git a/deps/v8/build/android/gyp/generate_linker_version_script.pydeps b/deps/v8/build/android/gyp/generate_linker_version_script.pydeps new file mode 100644 index 0000000000..d1e3ad6181 --- /dev/null +++ b/deps/v8/build/android/gyp/generate_linker_version_script.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/generate_linker_version_script.pydeps build/android/gyp/generate_linker_version_script.py +../../gn_helpers.py +generate_linker_version_script.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/generate_resource_rewriter.py b/deps/v8/build/android/gyp/generate_resource_rewriter.py new file mode 100755 index 0000000000..ba635a293d --- /dev/null +++ b/deps/v8/build/android/gyp/generate_resource_rewriter.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python +# +# Copyright (c) 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Generate ResourceRewriter.java which overwrites the given package's + resource id. +""" + +import argparse +import os +import sys +import zipfile + +from util import build_utils + +# Import jinja2 from third_party/jinja2 +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), + '..', + '..', + '..', + 'third_party'))) +import jinja2 + + +RESOURCE_REWRITER_JAVA="ResourceRewriter.java" + +RESOURCE_REWRITER="""/* AUTO-GENERATED FILE. DO NOT MODIFY. */ + +package {{ package }}; +/** + * Helper class used to fix up resource ids. + */ +class ResourceRewriter { + /** + * Rewrite the R 'constants' for the WebView. + */ + public static void rewriteRValues(final int packageId) { + {% for res_package in res_packages %} + {{ res_package }}.R.onResourcesLoaded(packageId); + {% endfor %} + } +} +""" + +def ParseArgs(args): + """Parses command line options. + + Returns: + An Namespace from argparse.parse_args() + """ + parser = argparse.ArgumentParser(prog='generate_resource_rewriter') + + parser.add_argument('--package-name', + required=True, + help='The package name of ResourceRewriter.') + parser.add_argument('--dep-packages', + required=True, + help='A list of packages whose resource id will be' + 'overwritten in ResourceRewriter.') + parser.add_argument('--output-dir', + help='A output directory of generated' + ' ResourceRewriter.java') + parser.add_argument('--srcjar', + help='The path of generated srcjar which has' + ' ResourceRewriter.java') + + return parser.parse_args(args) + + +def CreateResourceRewriter(package, res_packages, output_dir): + build_utils.MakeDirectory(output_dir) + java_path = os.path.join(output_dir, RESOURCE_REWRITER_JAVA) + template = jinja2.Template(RESOURCE_REWRITER, + trim_blocks=True, + lstrip_blocks=True) + output = template.render(package=package, res_packages=res_packages) + with open(java_path, 'w') as f: + f.write(output) + +def CreateResourceRewriterSrcjar(package, res_packages, srcjar_path): + with build_utils.TempDir() as temp_dir: + output_dir = os.path.join(temp_dir, *package.split('.')) + CreateResourceRewriter(package, res_packages, output_dir) + build_utils.DoZip([os.path.join(output_dir, RESOURCE_REWRITER_JAVA)], + srcjar_path, + temp_dir) + + +def main(): + options = ParseArgs(build_utils.ExpandFileArgs(sys.argv[1:])) + package = options.package_name + if options.output_dir: + output_dir = os.path.join(options.output_dir, *package.split('.')) + CreateResourceRewriter( + package, + build_utils.ParseGnList(options.dep_packages), + output_dir) + else: + CreateResourceRewriterSrcjar( + package, + build_utils.ParseGnList(options.dep_packages), + options.srcjar) + + return 0 + +if __name__ == '__main__': + sys.exit(main()) diff --git a/deps/v8/build/android/gyp/generate_v14_compatible_resources.py b/deps/v8/build/android/gyp/generate_v14_compatible_resources.py new file mode 100755 index 0000000000..f9e8a3783a --- /dev/null +++ b/deps/v8/build/android/gyp/generate_v14_compatible_resources.py @@ -0,0 +1,281 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Convert Android xml resources to API 14 compatible. + +There are two reasons that we cannot just use API 17 attributes, +so we are generating another set of resources by this script. + +1. paddingStart attribute can cause a crash on Galaxy Tab 2. +2. There is a bug that paddingStart does not override paddingLeft on + JB-MR1. This is fixed on JB-MR2. b/8654490 + +Therefore, this resource generation script can be removed when +we drop the support for JB-MR1. + +Please refer to http://crbug.com/235118 for the details. +""" + +import codecs +import os +import re +import shutil +import sys +import xml.dom.minidom as minidom + +from util import build_utils + +# Note that we are assuming 'android:' is an alias of +# the namespace 'http://schemas.android.com/apk/res/android'. + +GRAVITY_ATTRIBUTES = ('android:gravity', 'android:layout_gravity') + +# Almost all the attributes that has "Start" or "End" in +# its name should be mapped. +ATTRIBUTES_TO_MAP = {'paddingStart' : 'paddingLeft', + 'drawableStart' : 'drawableLeft', + 'layout_alignStart' : 'layout_alignLeft', + 'layout_marginStart' : 'layout_marginLeft', + 'layout_alignParentStart' : 'layout_alignParentLeft', + 'layout_toStartOf' : 'layout_toLeftOf', + 'paddingEnd' : 'paddingRight', + 'drawableEnd' : 'drawableRight', + 'layout_alignEnd' : 'layout_alignRight', + 'layout_marginEnd' : 'layout_marginRight', + 'layout_alignParentEnd' : 'layout_alignParentRight', + 'layout_toEndOf' : 'layout_toRightOf'} + +ATTRIBUTES_TO_MAP = dict(['android:' + k, 'android:' + v] for k, v + in ATTRIBUTES_TO_MAP.iteritems()) + +ATTRIBUTES_TO_MAP_REVERSED = dict([v, k] for k, v + in ATTRIBUTES_TO_MAP.iteritems()) + + +def IterateXmlElements(node): + """minidom helper function that iterates all the element nodes. + Iteration order is pre-order depth-first.""" + if node.nodeType == node.ELEMENT_NODE: + yield node + for child_node in node.childNodes: + for child_node_element in IterateXmlElements(child_node): + yield child_node_element + + +def ParseAndReportErrors(filename): + try: + return minidom.parse(filename) + except Exception: # pylint: disable=broad-except + import traceback + traceback.print_exc() + sys.stderr.write('Failed to parse XML file: %s\n' % filename) + sys.exit(1) + + +def AssertNotDeprecatedAttribute(name, value, filename): + """Raises an exception if the given attribute is deprecated.""" + msg = None + if name in ATTRIBUTES_TO_MAP_REVERSED: + msg = '{0} should use {1} instead of {2}'.format(filename, + ATTRIBUTES_TO_MAP_REVERSED[name], name) + elif name in GRAVITY_ATTRIBUTES and ('left' in value or 'right' in value): + msg = '{0} should use start/end instead of left/right for {1}'.format( + filename, name) + + if msg: + msg += ('\nFor background, see: http://android-developers.blogspot.com/' + '2013/03/native-rtl-support-in-android-42.html\n' + 'If you have a legitimate need for this attribute, discuss with ' + 'kkimlabs@chromium.org or newt@chromium.org') + raise Exception(msg) + + +def WriteDomToFile(dom, filename): + """Write the given dom to filename.""" + build_utils.MakeDirectory(os.path.dirname(filename)) + with codecs.open(filename, 'w', 'utf-8') as f: + dom.writexml(f, '', ' ', '\n', encoding='utf-8') + + +def HasStyleResource(dom): + """Return True if the dom is a style resource, False otherwise.""" + root_node = IterateXmlElements(dom).next() + return bool(root_node.nodeName == 'resources' and + list(root_node.getElementsByTagName('style'))) + + +def ErrorIfStyleResourceExistsInDir(input_dir): + """If a style resource is in input_dir, raises an exception.""" + for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'): + dom = ParseAndReportErrors(input_filename) + if HasStyleResource(dom): + # Allow style file in third_party to exist in non-v17 directories so long + # as they do not contain deprecated attributes. + if not 'third_party' in input_dir or ( + GenerateV14StyleResourceDom(dom, input_filename)): + raise Exception('error: style file ' + input_filename + + ' should be under ' + input_dir + + '-v17 directory. Please refer to ' + 'http://crbug.com/243952 for the details.') + + +def GenerateV14LayoutResourceDom(dom, filename, assert_not_deprecated=True): + """Convert layout resource to API 14 compatible layout resource. + + Args: + dom: Parsed minidom object to be modified. + filename: Filename that the DOM was parsed from. + assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will + cause an exception to be thrown. + + Returns: + True if dom is modified, False otherwise. + """ + is_modified = False + + # Iterate all the elements' attributes to find attributes to convert. + for element in IterateXmlElements(dom): + for name, value in list(element.attributes.items()): + # Convert any API 17 Start/End attributes to Left/Right attributes. + # For example, from paddingStart="10dp" to paddingLeft="10dp" + # Note: gravity attributes are not necessary to convert because + # start/end values are backward-compatible. Explained at + # https://plus.sandbox.google.com/+RomanNurik/posts/huuJd8iVVXY?e=Showroom + if name in ATTRIBUTES_TO_MAP: + element.setAttribute(ATTRIBUTES_TO_MAP[name], value) + del element.attributes[name] + is_modified = True + elif assert_not_deprecated: + AssertNotDeprecatedAttribute(name, value, filename) + + return is_modified + + +def GenerateV14StyleResourceDom(dom, filename, assert_not_deprecated=True): + """Convert style resource to API 14 compatible style resource. + + Args: + dom: Parsed minidom object to be modified. + filename: Filename that the DOM was parsed from. + assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will + cause an exception to be thrown. + + Returns: + True if dom is modified, False otherwise. + """ + is_modified = False + + for style_element in dom.getElementsByTagName('style'): + for item_element in style_element.getElementsByTagName('item'): + name = item_element.attributes['name'].value + value = item_element.childNodes[0].nodeValue + if name in ATTRIBUTES_TO_MAP: + item_element.attributes['name'].value = ATTRIBUTES_TO_MAP[name] + is_modified = True + elif assert_not_deprecated: + AssertNotDeprecatedAttribute(name, value, filename) + + return is_modified + + +def GenerateV14LayoutResource(input_filename, output_v14_filename, + output_v17_filename): + """Convert API 17 layout resource to API 14 compatible layout resource. + + It's mostly a simple replacement, s/Start/Left s/End/Right, + on the attribute names. + If the generated resource is identical to the original resource, + don't do anything. If not, write the generated resource to + output_v14_filename, and copy the original resource to output_v17_filename. + """ + dom = ParseAndReportErrors(input_filename) + is_modified = GenerateV14LayoutResourceDom(dom, input_filename) + + if is_modified: + # Write the generated resource. + WriteDomToFile(dom, output_v14_filename) + + # Copy the original resource. + build_utils.MakeDirectory(os.path.dirname(output_v17_filename)) + shutil.copy2(input_filename, output_v17_filename) + + +def GenerateV14StyleResource(input_filename, output_v14_filename): + """Convert API 17 style resources to API 14 compatible style resource. + + Write the generated style resource to output_v14_filename. + It's mostly a simple replacement, s/Start/Left s/End/Right, + on the attribute names. + """ + dom = ParseAndReportErrors(input_filename) + GenerateV14StyleResourceDom(dom, input_filename) + + # Write the generated resource. + WriteDomToFile(dom, output_v14_filename) + + +def GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir, output_v17_dir): + """Convert layout resources to API 14 compatible resources in input_dir.""" + for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'): + rel_filename = os.path.relpath(input_filename, input_dir) + output_v14_filename = os.path.join(output_v14_dir, rel_filename) + output_v17_filename = os.path.join(output_v17_dir, rel_filename) + GenerateV14LayoutResource(input_filename, output_v14_filename, + output_v17_filename) + + +def GenerateV14StyleResourcesInDir(input_dir, output_v14_dir): + """Convert style resources to API 14 compatible resources in input_dir.""" + for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'): + rel_filename = os.path.relpath(input_filename, input_dir) + output_v14_filename = os.path.join(output_v14_dir, rel_filename) + GenerateV14StyleResource(input_filename, output_v14_filename) + + +def GenerateV14Resources(res_dir, res_v14_dir): + for name in os.listdir(res_dir): + if not os.path.isdir(os.path.join(res_dir, name)): + continue + + dir_pieces = name.split('-') + resource_type = dir_pieces[0] + qualifiers = dir_pieces[1:] + + api_level_qualifier_index = -1 + api_level_qualifier = '' + for index, qualifier in enumerate(qualifiers): + if re.match('v[0-9]+$', qualifier): + api_level_qualifier_index = index + api_level_qualifier = qualifier + break + + # Android pre-v17 API doesn't support RTL. Skip. + if 'ldrtl' in qualifiers: + continue + + input_dir = os.path.abspath(os.path.join(res_dir, name)) + + # We also need to copy the original v17 resource to *-v17 directory + # because the generated v14 resource will hide the original resource. + output_v14_dir = os.path.join(res_v14_dir, name) + output_v17_dir = os.path.join(res_v14_dir, name + '-v17') + + # We only convert layout resources under layout*/, xml*/, + # and style resources under values*/. + if resource_type in ('layout', 'xml'): + if not api_level_qualifier: + GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir, + output_v17_dir) + elif resource_type == 'values': + if api_level_qualifier == 'v17': + output_qualifiers = qualifiers[:] + del output_qualifiers[api_level_qualifier_index] + output_v14_dir = os.path.join(res_v14_dir, + '-'.join([resource_type] + + output_qualifiers)) + GenerateV14StyleResourcesInDir(input_dir, output_v14_dir) + elif not api_level_qualifier: + ErrorIfStyleResourceExistsInDir(input_dir)
\ No newline at end of file diff --git a/deps/v8/build/android/gyp/ijar.py b/deps/v8/build/android/gyp/ijar.py new file mode 100755 index 0000000000..89108087ed --- /dev/null +++ b/deps/v8/build/android/gyp/ijar.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python +# +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import subprocess +import sys + +from util import build_utils + + +def main(): + # The point of this wrapper is to use AtomicOutput so that output timestamps + # are not updated when outputs are unchanged. + ijar_bin, in_jar, out_jar = sys.argv[1:] + with build_utils.AtomicOutput(out_jar) as f: + subprocess.check_call([ijar_bin, in_jar, f.name]) + + +if __name__ == '__main__': + main() diff --git a/deps/v8/build/android/gyp/ijar.pydeps b/deps/v8/build/android/gyp/ijar.pydeps new file mode 100644 index 0000000000..ca10697c1f --- /dev/null +++ b/deps/v8/build/android/gyp/ijar.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/ijar.pydeps build/android/gyp/ijar.py +../../gn_helpers.py +ijar.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/jar.py b/deps/v8/build/android/gyp/jar.py new file mode 100755 index 0000000000..7f2c9f0b21 --- /dev/null +++ b/deps/v8/build/android/gyp/jar.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import optparse +import os +import sys + +from util import build_utils + + +def Jar(class_files, + classes_dir, + jar_path, + provider_configurations=None, + additional_files=None): + files = [(os.path.relpath(f, classes_dir), f) for f in class_files] + + if additional_files: + for filepath, jar_filepath in additional_files: + files.append((jar_filepath, filepath)) + + if provider_configurations: + for config in provider_configurations: + files.append(('META-INF/services/' + os.path.basename(config), config)) + + # Zeros out timestamps so that builds are hermetic. + with build_utils.AtomicOutput(jar_path) as f: + build_utils.DoZip(files, f) + + +def JarDirectory(classes_dir, + jar_path, + predicate=None, + provider_configurations=None, + additional_files=None): + all_files = build_utils.FindInDirectory(classes_dir, '*') + if predicate: + all_files = [ + f for f in all_files if predicate(os.path.relpath(f, classes_dir))] + all_files.sort() + + Jar(all_files, + classes_dir, + jar_path, + provider_configurations=provider_configurations, + additional_files=additional_files) + + +def _CreateFilterPredicate(excluded_classes, included_classes): + if not excluded_classes and not included_classes: + return None + + def predicate(f): + # Exclude filters take precidence over include filters. + if build_utils.MatchesGlob(f, excluded_classes): + return False + if included_classes and not build_utils.MatchesGlob(f, included_classes): + return False + return True + + return predicate + + +# TODO(agrieve): Change components/cronet/android/BUILD.gn to use filter_zip.py +# and delete main(). +def main(): + parser = optparse.OptionParser() + parser.add_option('--classes-dir', help='Directory containing .class files.') + parser.add_option('--jar-path', help='Jar output path.') + parser.add_option('--excluded-classes', + help='GN list of .class file patterns to exclude from the jar.') + parser.add_option('--included-classes', + help='GN list of .class file patterns to include in the jar.') + + args = build_utils.ExpandFileArgs(sys.argv[1:]) + options, _ = parser.parse_args(args) + + excluded_classes = [] + if options.excluded_classes: + excluded_classes = build_utils.ParseGnList(options.excluded_classes) + included_classes = [] + if options.included_classes: + included_classes = build_utils.ParseGnList(options.included_classes) + + predicate = _CreateFilterPredicate(excluded_classes, included_classes) + JarDirectory(options.classes_dir, options.jar_path, predicate=predicate) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/deps/v8/build/android/gyp/java_cpp_enum.py b/deps/v8/build/android/gyp/java_cpp_enum.py new file mode 100755 index 0000000000..bacc8e3d46 --- /dev/null +++ b/deps/v8/build/android/gyp/java_cpp_enum.py @@ -0,0 +1,435 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import collections +from datetime import date +import re +import optparse +import os +from string import Template +import sys +import textwrap +import zipfile + +from util import build_utils +from util import java_cpp_utils + +# List of C++ types that are compatible with the Java code generated by this +# script. +# +# This script can parse .idl files however, at present it ignores special +# rules such as [cpp_enum_prefix_override="ax_attr"]. +ENUM_FIXED_TYPE_WHITELIST = ['char', 'unsigned char', + 'short', 'unsigned short', + 'int', 'int8_t', 'int16_t', 'int32_t', 'uint8_t', 'uint16_t'] + +class EnumDefinition(object): + def __init__(self, original_enum_name=None, class_name_override=None, + enum_package=None, entries=None, comments=None, fixed_type=None): + self.original_enum_name = original_enum_name + self.class_name_override = class_name_override + self.enum_package = enum_package + self.entries = collections.OrderedDict(entries or []) + self.comments = collections.OrderedDict(comments or []) + self.prefix_to_strip = None + self.fixed_type = fixed_type + + def AppendEntry(self, key, value): + if key in self.entries: + raise Exception('Multiple definitions of key %s found.' % key) + self.entries[key] = value + + def AppendEntryComment(self, key, value): + if key in self.comments: + raise Exception('Multiple definitions of key %s found.' % key) + self.comments[key] = value + + @property + def class_name(self): + return self.class_name_override or self.original_enum_name + + def Finalize(self): + self._Validate() + self._AssignEntryIndices() + self._StripPrefix() + self._NormalizeNames() + + def _Validate(self): + assert self.class_name + assert self.enum_package + assert self.entries + if self.fixed_type and self.fixed_type not in ENUM_FIXED_TYPE_WHITELIST: + raise Exception('Fixed type %s for enum %s not whitelisted.' % + (self.fixed_type, self.class_name)) + + def _AssignEntryIndices(self): + # Enums, if given no value, are given the value of the previous enum + 1. + if not all(self.entries.values()): + prev_enum_value = -1 + for key, value in self.entries.iteritems(): + if not value: + self.entries[key] = prev_enum_value + 1 + elif value in self.entries: + self.entries[key] = self.entries[value] + else: + try: + self.entries[key] = int(value) + except ValueError: + raise Exception('Could not interpret integer from enum value "%s" ' + 'for key %s.' % (value, key)) + prev_enum_value = self.entries[key] + + + def _StripPrefix(self): + prefix_to_strip = self.prefix_to_strip + if not prefix_to_strip: + shout_case = self.original_enum_name + shout_case = re.sub('(?!^)([A-Z]+)', r'_\1', shout_case).upper() + shout_case += '_' + + prefixes = [shout_case, self.original_enum_name, + 'k' + self.original_enum_name] + + for prefix in prefixes: + if all([w.startswith(prefix) for w in self.entries.keys()]): + prefix_to_strip = prefix + break + else: + prefix_to_strip = '' + + def StripEntries(entries): + ret = collections.OrderedDict() + for k, v in entries.iteritems(): + stripped_key = k.replace(prefix_to_strip, '', 1) + if isinstance(v, basestring): + stripped_value = v.replace(prefix_to_strip, '') + else: + stripped_value = v + ret[stripped_key] = stripped_value + + return ret + + self.entries = StripEntries(self.entries) + self.comments = StripEntries(self.comments) + + def _NormalizeNames(self): + self.entries = _TransformKeys(self.entries, java_cpp_utils.KCamelToShouty) + self.comments = _TransformKeys(self.comments, java_cpp_utils.KCamelToShouty) + + +def _TransformKeys(d, func): + """Normalize keys in |d| and update references to old keys in |d| values.""" + normal_keys = {k: func(k) for k in d} + ret = collections.OrderedDict() + for k, v in d.iteritems(): + # Need to transform values as well when the entry value was explicitly set + # (since it could contain references to other enum entry values). + if isinstance(v, basestring): + for normal_key in normal_keys: + v = v.replace(normal_key, normal_keys[normal_key]) + ret[normal_keys[k]] = v + return ret + + +class DirectiveSet(object): + class_name_override_key = 'CLASS_NAME_OVERRIDE' + enum_package_key = 'ENUM_PACKAGE' + prefix_to_strip_key = 'PREFIX_TO_STRIP' + + known_keys = [class_name_override_key, enum_package_key, prefix_to_strip_key] + + def __init__(self): + self._directives = {} + + def Update(self, key, value): + if key not in DirectiveSet.known_keys: + raise Exception("Unknown directive: " + key) + self._directives[key] = value + + @property + def empty(self): + return len(self._directives) == 0 + + def UpdateDefinition(self, definition): + definition.class_name_override = self._directives.get( + DirectiveSet.class_name_override_key, '') + definition.enum_package = self._directives.get( + DirectiveSet.enum_package_key) + definition.prefix_to_strip = self._directives.get( + DirectiveSet.prefix_to_strip_key) + + +class HeaderParser(object): + single_line_comment_re = re.compile(r'\s*//\s*([^\n]*)') + multi_line_comment_start_re = re.compile(r'\s*/\*') + enum_line_re = re.compile(r'^\s*(\w+)(\s*\=\s*([^,\n]+))?,?') + enum_end_re = re.compile(r'^\s*}\s*;\.*$') + generator_error_re = re.compile(r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*$') + generator_directive_re = re.compile( + r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*([\.\w]+)$') + multi_line_generator_directive_start_re = re.compile( + r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*\(([\.\w]*)$') + multi_line_directive_continuation_re = re.compile(r'^\s*//\s+([\.\w]+)$') + multi_line_directive_end_re = re.compile(r'^\s*//\s+([\.\w]*)\)$') + + optional_class_or_struct_re = r'(class|struct)?' + enum_name_re = r'(\w+)' + optional_fixed_type_re = r'(\:\s*(\w+\s*\w+?))?' + enum_start_re = re.compile(r'^\s*(?:\[cpp.*\])?\s*enum\s+' + + optional_class_or_struct_re + '\s*' + enum_name_re + '\s*' + + optional_fixed_type_re + '\s*{\s*') + enum_single_line_re = re.compile( + r'^\s*(?:\[cpp.*\])?\s*enum.*{(?P<enum_entries>.*)}.*$') + + def __init__(self, lines, path=''): + self._lines = lines + self._path = path + self._enum_definitions = [] + self._in_enum = False + self._current_definition = None + self._current_comments = [] + self._generator_directives = DirectiveSet() + self._multi_line_generator_directive = None + self._current_enum_entry = '' + + def _ApplyGeneratorDirectives(self): + self._generator_directives.UpdateDefinition(self._current_definition) + self._generator_directives = DirectiveSet() + + def ParseDefinitions(self): + for line in self._lines: + self._ParseLine(line) + return self._enum_definitions + + def _ParseLine(self, line): + if self._multi_line_generator_directive: + self._ParseMultiLineDirectiveLine(line) + elif not self._in_enum: + self._ParseRegularLine(line) + else: + self._ParseEnumLine(line) + + def _ParseEnumLine(self, line): + if HeaderParser.multi_line_comment_start_re.match(line): + raise Exception('Multi-line comments in enums are not supported in ' + + self._path) + + enum_comment = HeaderParser.single_line_comment_re.match(line) + if enum_comment: + comment = enum_comment.groups()[0] + if comment: + self._current_comments.append(comment) + elif HeaderParser.enum_end_re.match(line): + self._FinalizeCurrentEnumDefinition() + else: + self._AddToCurrentEnumEntry(line) + if ',' in line: + self._ParseCurrentEnumEntry() + + def _ParseSingleLineEnum(self, line): + for entry in line.split(','): + self._AddToCurrentEnumEntry(entry) + self._ParseCurrentEnumEntry() + + self._FinalizeCurrentEnumDefinition() + + def _ParseCurrentEnumEntry(self): + if not self._current_enum_entry: + return + + enum_entry = HeaderParser.enum_line_re.match(self._current_enum_entry) + if not enum_entry: + raise Exception('Unexpected error while attempting to parse %s as enum ' + 'entry.' % self._current_enum_entry) + + enum_key = enum_entry.groups()[0] + enum_value = enum_entry.groups()[2] + self._current_definition.AppendEntry(enum_key, enum_value) + if self._current_comments: + self._current_definition.AppendEntryComment( + enum_key, ' '.join(self._current_comments)) + self._current_comments = [] + self._current_enum_entry = '' + + def _AddToCurrentEnumEntry(self, line): + self._current_enum_entry += ' ' + line.strip() + + def _FinalizeCurrentEnumDefinition(self): + if self._current_enum_entry: + self._ParseCurrentEnumEntry() + self._ApplyGeneratorDirectives() + self._current_definition.Finalize() + self._enum_definitions.append(self._current_definition) + self._current_definition = None + self._in_enum = False + + def _ParseMultiLineDirectiveLine(self, line): + multi_line_directive_continuation = ( + HeaderParser.multi_line_directive_continuation_re.match(line)) + multi_line_directive_end = ( + HeaderParser.multi_line_directive_end_re.match(line)) + + if multi_line_directive_continuation: + value_cont = multi_line_directive_continuation.groups()[0] + self._multi_line_generator_directive[1].append(value_cont) + elif multi_line_directive_end: + directive_name = self._multi_line_generator_directive[0] + directive_value = "".join(self._multi_line_generator_directive[1]) + directive_value += multi_line_directive_end.groups()[0] + self._multi_line_generator_directive = None + self._generator_directives.Update(directive_name, directive_value) + else: + raise Exception('Malformed multi-line directive declaration in ' + + self._path) + + def _ParseRegularLine(self, line): + enum_start = HeaderParser.enum_start_re.match(line) + generator_directive_error = HeaderParser.generator_error_re.match(line) + generator_directive = HeaderParser.generator_directive_re.match(line) + multi_line_generator_directive_start = ( + HeaderParser.multi_line_generator_directive_start_re.match(line)) + single_line_enum = HeaderParser.enum_single_line_re.match(line) + + if generator_directive_error: + raise Exception('Malformed directive declaration in ' + self._path + + '. Use () for multi-line directives. E.g.\n' + + '// GENERATED_JAVA_ENUM_PACKAGE: (\n' + + '// foo.package)') + elif generator_directive: + directive_name = generator_directive.groups()[0] + directive_value = generator_directive.groups()[1] + self._generator_directives.Update(directive_name, directive_value) + elif multi_line_generator_directive_start: + directive_name = multi_line_generator_directive_start.groups()[0] + directive_value = multi_line_generator_directive_start.groups()[1] + self._multi_line_generator_directive = (directive_name, [directive_value]) + elif enum_start or single_line_enum: + if self._generator_directives.empty: + return + self._current_definition = EnumDefinition( + original_enum_name=enum_start.groups()[1], + fixed_type=enum_start.groups()[3]) + self._in_enum = True + if single_line_enum: + self._ParseSingleLineEnum(single_line_enum.group('enum_entries')) + + +def DoGenerate(source_paths): + for source_path in source_paths: + enum_definitions = DoParseHeaderFile(source_path) + if not enum_definitions: + raise Exception('No enums found in %s\n' + 'Did you forget prefixing enums with ' + '"// GENERATED_JAVA_ENUM_PACKAGE: foo"?' % + source_path) + for enum_definition in enum_definitions: + package_path = enum_definition.enum_package.replace('.', os.path.sep) + file_name = enum_definition.class_name + '.java' + output_path = os.path.join(package_path, file_name) + output = GenerateOutput(source_path, enum_definition) + yield output_path, output + + +def DoParseHeaderFile(path): + with open(path) as f: + return HeaderParser(f.readlines(), path).ParseDefinitions() + + +def GenerateOutput(source_path, enum_definition): + template = Template(""" +// Copyright ${YEAR} The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is autogenerated by +// ${SCRIPT_NAME} +// From +// ${SOURCE_PATH} + +package ${PACKAGE}; + +import android.support.annotation.IntDef; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +@IntDef({ +${INT_DEF} +}) +@Retention(RetentionPolicy.SOURCE) +public @interface ${CLASS_NAME} { +${ENUM_ENTRIES} +} +""") + + enum_template = Template(' int ${NAME} = ${VALUE};') + enum_entries_string = [] + enum_names = [] + for enum_name, enum_value in enum_definition.entries.iteritems(): + values = { + 'NAME': enum_name, + 'VALUE': enum_value, + } + enum_comments = enum_definition.comments.get(enum_name) + if enum_comments: + enum_comments_indent = ' * ' + comments_line_wrapper = textwrap.TextWrapper( + initial_indent=enum_comments_indent, + subsequent_indent=enum_comments_indent, + width=100) + enum_entries_string.append(' /**') + enum_entries_string.append('\n'.join( + comments_line_wrapper.wrap(enum_comments))) + enum_entries_string.append(' */') + enum_entries_string.append(enum_template.substitute(values)) + if enum_name != "NUM_ENTRIES": + enum_names.append(enum_definition.class_name + '.' + enum_name) + enum_entries_string = '\n'.join(enum_entries_string) + + enum_names_indent = ' ' * 4 + wrapper = textwrap.TextWrapper(initial_indent = enum_names_indent, + subsequent_indent = enum_names_indent, + width = 100) + enum_names_string = '\n'.join(wrapper.wrap(', '.join(enum_names))) + + values = { + 'CLASS_NAME': enum_definition.class_name, + 'ENUM_ENTRIES': enum_entries_string, + 'PACKAGE': enum_definition.enum_package, + 'INT_DEF': enum_names_string, + 'SCRIPT_NAME': java_cpp_utils.GetScriptName(), + 'SOURCE_PATH': source_path, + 'YEAR': str(date.today().year) + } + return template.substitute(values) + + +def DoMain(argv): + usage = 'usage: %prog [options] [output_dir] input_file(s)...' + parser = optparse.OptionParser(usage=usage) + build_utils.AddDepfileOption(parser) + + parser.add_option('--srcjar', + help='When specified, a .srcjar at the given path is ' + 'created instead of individual .java files.') + + options, args = parser.parse_args(argv) + + if not args: + parser.error('Need to specify at least one input file') + input_paths = args + + with build_utils.AtomicOutput(options.srcjar) as f: + with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar: + for output_path, data in DoGenerate(input_paths): + build_utils.AddToZipHermetic(srcjar, output_path, data=data) + + if options.depfile: + build_utils.WriteDepfile(options.depfile, options.srcjar, add_pydeps=False) + + +if __name__ == '__main__': + DoMain(sys.argv[1:]) diff --git a/deps/v8/build/android/gyp/java_cpp_enum.pydeps b/deps/v8/build/android/gyp/java_cpp_enum.pydeps new file mode 100644 index 0000000000..d5869edddf --- /dev/null +++ b/deps/v8/build/android/gyp/java_cpp_enum.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_enum.pydeps build/android/gyp/java_cpp_enum.py +../../gn_helpers.py +java_cpp_enum.py +util/__init__.py +util/build_utils.py +util/java_cpp_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/java_cpp_enum_tests.py b/deps/v8/build/android/gyp/java_cpp_enum_tests.py new file mode 100755 index 0000000000..5717047c7a --- /dev/null +++ b/deps/v8/build/android/gyp/java_cpp_enum_tests.py @@ -0,0 +1,747 @@ +#!/usr/bin/env python +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Tests for enum_preprocess.py. + +This test suite contains various tests for the C++ -> Java enum generator. +""" + +import collections +from datetime import date +import unittest + +import java_cpp_enum +from java_cpp_enum import EnumDefinition, GenerateOutput +from java_cpp_enum import HeaderParser +from util import java_cpp_utils + + +class TestPreprocess(unittest.TestCase): + def testOutput(self): + definition = EnumDefinition(original_enum_name='ClassName', + enum_package='some.package', + entries=[('E1', 1), ('E2', '2 << 2')], + comments=[('E2', 'This is a comment.'), + ('E1', 'This is a multiple line ' + 'comment that is really long. ' + 'This is a multiple line ' + 'comment that is really ' + 'really long.')]) + output = GenerateOutput('path/to/file', definition) + expected = """ +// Copyright %d The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is autogenerated by +// %s +// From +// path/to/file + +package some.package; + +import android.support.annotation.IntDef; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +@IntDef({ + ClassName.E1, ClassName.E2 +}) +@Retention(RetentionPolicy.SOURCE) +public @interface ClassName { + /** + * %s + * really really long. + */ + int E1 = 1; + /** + * This is a comment. + */ + int E2 = 2 << 2; +} +""" + long_comment = ('This is a multiple line comment that is really long. ' + 'This is a multiple line comment that is') + self.assertEqual( + expected % (date.today().year, java_cpp_utils.GetScriptName(), + long_comment), output) + + def testParseSimpleEnum(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumName { + VALUE_ZERO, + VALUE_ONE, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('EnumName', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('VALUE_ZERO', 0), + ('VALUE_ONE', 1)]), + definition.entries) + + def testParseBitShifts(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumName { + VALUE_ZERO = 1 << 0, + VALUE_ONE = 1 << 1, + }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumName { + ENUM_NAME_ZERO = 1 << 0, + ENUM_NAME_ONE = 1 << 1, + ENUM_NAME_TWO = ENUM_NAME_ZERO | ENUM_NAME_ONE, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(2, len(definitions)) + definition = definitions[0] + self.assertEqual('EnumName', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('VALUE_ZERO', '1 << 0'), + ('VALUE_ONE', '1 << 1')]), + definition.entries) + + definition = definitions[1] + expected_entries = collections.OrderedDict([ + ('ZERO', '1 << 0'), + ('ONE', '1 << 1'), + ('TWO', 'ZERO | ONE')]) + self.assertEqual(expected_entries, definition.entries) + + def testParseMultilineEnumEntry(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace + enum Foo { + VALUE_ZERO = 1 << 0, + VALUE_ONE = + SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | ControlKey, + VALUE_TWO = 1 << 18, + }; + """.split('\n') + expected_entries = collections.OrderedDict([ + ('VALUE_ZERO', '1 << 0'), + ('VALUE_ONE', 'SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | ' + 'ControlKey'), + ('VALUE_TWO', '1 << 18')]) + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('bar.namespace', definition.enum_package) + self.assertEqual(expected_entries, definition.entries) + + def testParseEnumEntryWithTrailingMultilineEntry(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace + enum Foo { + VALUE_ZERO = 1, + VALUE_ONE = + SymbolKey | FnKey | AltGrKey | MetaKey | + AltKey | ControlKey | ShiftKey, + }; + """.split('\n') + expected_entries = collections.OrderedDict([ + ('VALUE_ZERO', '1'), + ('VALUE_ONE', 'SymbolKey | FnKey | AltGrKey | MetaKey | AltKey | ' + 'ControlKey | ShiftKey')]) + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('bar.namespace', definition.enum_package) + self.assertEqual(expected_entries, definition.entries) + + def testParseNoCommaAfterLastEntry(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: bar.namespace + enum Foo { + VALUE_ZERO = 1, + + // This is a multiline + // + // comment with an empty line. + VALUE_ONE = 2 + }; + """.split('\n') + expected_entries = collections.OrderedDict([ + ('VALUE_ZERO', '1'), + ('VALUE_ONE', '2')]) + expected_comments = collections.OrderedDict([ + ('VALUE_ONE', 'This is a multiline comment with an empty line.')]) + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('bar.namespace', definition.enum_package) + self.assertEqual(expected_entries, definition.entries) + self.assertEqual(expected_comments, definition.comments) + + def testParseClassNameOverride(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName + enum EnumName { + FOO + }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OtherOverride + enum PrefixTest { + PREFIX_TEST_A, + PREFIX_TEST_B, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(2, len(definitions)) + definition = definitions[0] + self.assertEqual('OverrideName', definition.class_name) + + definition = definitions[1] + self.assertEqual('OtherOverride', definition.class_name) + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1)]), + definition.entries) + + def testParsePreservesCommentsWhenPrefixStripping(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumOne { + ENUM_ONE_A = 1, + // Comment there + ENUM_ONE_B = A, + }; + + enum EnumIgnore { + C, D, E + }; + + // GENERATED_JAVA_ENUM_PACKAGE: other.package + // GENERATED_JAVA_PREFIX_TO_STRIP: P_ + enum EnumTwo { + P_A, + // This comment spans + // two lines. + P_B + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(2, len(definitions)) + definition = definitions[0] + self.assertEqual('EnumOne', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('A', '1'), + ('B', 'A')]), + definition.entries) + self.assertEqual(collections.OrderedDict([('B', 'Comment there')]), + definition.comments) + definition = definitions[1] + self.assertEqual('EnumTwo', definition.class_name) + self.assertEqual('other.package', definition.enum_package) + self.assertEqual(collections.OrderedDict( + [('B', 'This comment spans two lines.')]), definition.comments) + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1)]), + definition.entries) + + def testParseTwoEnums(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum AnEnum { + ENUM_ONE_A = 1, + ENUM_ONE_B = A, + }; + + enum EnumIgnore { + C, D, E + }; + + // GENERATED_JAVA_ENUM_PACKAGE: other.package + enum EnumTwo { + P_A, + P_B + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(2, len(definitions)) + definition = definitions[0] + self.assertEqual('AnEnum', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('ENUM_ONE_A', '1'), + ('ENUM_ONE_B', 'A')]), + definition.entries) + definition = definitions[1] + self.assertEqual('EnumTwo', definition.class_name) + self.assertEqual('other.package', definition.enum_package) + self.assertEqual(collections.OrderedDict([('P_A', 0), + ('P_B', 1)]), + definition.entries) + + def testParseSingleLineEnum(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: other.package + // GENERATED_JAVA_PREFIX_TO_STRIP: P_ + enum EnumTwo { P_A, P_B }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + definition = definitions[0] + self.assertEqual('EnumTwo', definition.class_name) + self.assertEqual('other.package', definition.enum_package) + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1)]), + definition.entries) + + def testParseWithStrippingAndRelativeReferences(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: other.package + // GENERATED_JAVA_PREFIX_TO_STRIP: P_ + enum EnumTwo { + P_A = 1, + // P_A is old-don't use P_A. + P_B = P_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + definition = definitions[0] + self.assertEqual('EnumTwo', definition.class_name) + self.assertEqual('other.package', definition.enum_package) + self.assertEqual(collections.OrderedDict([('A', '1'), + ('B', 'A')]), + definition.entries) + self.assertEqual(collections.OrderedDict([('B', 'A is old-don\'t use A.')]), + definition.comments) + + def testParseSingleLineAndRegularEnum(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumOne { + ENUM_ONE_A = 1, + // Comment there + ENUM_ONE_B = A, + }; + + // GENERATED_JAVA_ENUM_PACKAGE: other.package + enum EnumTwo { P_A, P_B }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName + enum EnumName { + ENUM_NAME_FOO + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + definition = definitions[0] + self.assertEqual( + collections.OrderedDict([('A', '1'), ('B', 'A')]), definition.entries) + self.assertEqual(collections.OrderedDict([('B', 'Comment there')]), + definition.comments) + + self.assertEqual(3, len(definitions)) + definition = definitions[1] + self.assertEqual( + collections.OrderedDict([('P_A', 0), ('P_B', 1)]), definition.entries) + + definition = definitions[2] + self.assertEqual(collections.OrderedDict([('FOO', 0)]), definition.entries) + + def testParseWithCamelCaseNames(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumTest { + EnumTestA = 1, + // comment for EnumTestB. + EnumTestB = 2, + }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + // GENERATED_JAVA_PREFIX_TO_STRIP: Test + enum AnEnum { + TestHTTPOption, + TestHTTPSOption, + }; + + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + definition = definitions[0] + self.assertEqual( + collections.OrderedDict([('A', '1'), ('B', '2')]), + definition.entries) + self.assertEqual( + collections.OrderedDict([('B', 'comment for B.')]), + definition.comments) + + definition = definitions[1] + self.assertEqual( + collections.OrderedDict([('HTTP_OPTION', 0), ('HTTPS_OPTION', 1)]), + definition.entries) + + def testParseWithKCamelCaseNames(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumOne { + kEnumOne = 1, + // comment for kEnumTwo. + kEnumTwo = 2, + }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName + enum EnumName { + kEnumNameFoo, + kEnumNameBar + }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum EnumName { + kEnumNameFoo, + kEnumBar, + }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum Keys { + kSymbolKey = 1 << 0, + kAltKey = 1 << 1, + kUpKey = 1 << 2, + kKeyModifiers = kSymbolKey | kAltKey | kUpKey | kKeyModifiers, + }; + + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum Mixed { + kTestVal, + kCodecMPEG2 + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + definition = definitions[0] + self.assertEqual( + collections.OrderedDict([('ENUM_ONE', '1'), ('ENUM_TWO', '2')]), + definition.entries) + self.assertEqual( + collections.OrderedDict([('ENUM_TWO', 'comment for ENUM_TWO.')]), + definition.comments) + + definition = definitions[1] + self.assertEqual( + collections.OrderedDict([('FOO', 0), ('BAR', 1)]), + definition.entries) + + definition = definitions[2] + self.assertEqual( + collections.OrderedDict([('ENUM_NAME_FOO', 0), ('ENUM_BAR', 1)]), + definition.entries) + + definition = definitions[3] + expected_entries = collections.OrderedDict([ + ('SYMBOL_KEY', '1 << 0'), + ('ALT_KEY', '1 << 1'), + ('UP_KEY', '1 << 2'), + ('KEY_MODIFIERS', 'SYMBOL_KEY | ALT_KEY | UP_KEY | KEY_MODIFIERS')]) + self.assertEqual(expected_entries, definition.entries) + + definition = definitions[4] + self.assertEqual( + collections.OrderedDict([('TEST_VAL', 0), ('CODEC_MPEG2', 1)]), + definition.entries) + + def testParseThrowsOnUnknownDirective(self): + test_data = """ + // GENERATED_JAVA_UNKNOWN: Value + enum EnumName { + VALUE_ONE, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseReturnsEmptyListWithoutDirectives(self): + test_data = """ + enum EnumName { + VALUE_ONE, + }; + """.split('\n') + self.assertEqual([], HeaderParser(test_data).ParseDefinitions()) + + def testParseEnumClass(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum class Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('A', 0)]), + definition.entries) + + def testParseEnumStruct(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum struct Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual(collections.OrderedDict([('A', 0)]), + definition.entries) + + def testParseFixedTypeEnum(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum Foo : int { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual('int', definition.fixed_type) + self.assertEqual(collections.OrderedDict([('A', 0)]), + definition.entries) + + def testParseFixedTypeEnumClass(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum class Foo: unsigned short { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual(1, len(definitions)) + definition = definitions[0] + self.assertEqual('Foo', definition.class_name) + self.assertEqual('test.namespace', definition.enum_package) + self.assertEqual('unsigned short', definition.fixed_type) + self.assertEqual(collections.OrderedDict([('A', 0)]), + definition.entries) + + def testParseUnknownFixedTypeRaises(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: test.namespace + enum class Foo: foo_type { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseSimpleMultiLineDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + // test.namespace) + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar + enum Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual('test.namespace', definitions[0].enum_package) + self.assertEqual('Bar', definitions[0].class_name) + + def testParseMultiLineDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: (te + // st.name + // space) + enum Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual('test.namespace', definitions[0].enum_package) + + def testParseMultiLineDirectiveWithOtherDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + // test.namespace) + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: ( + // Ba + // r + // ) + enum Foo { + FOO_A, + }; + """.split('\n') + definitions = HeaderParser(test_data).ParseDefinitions() + self.assertEqual('test.namespace', definitions[0].enum_package) + self.assertEqual('Bar', definitions[0].class_name) + + def testParseMalformedMultiLineDirectiveWithOtherDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + // test.name + // space + // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar + enum Foo { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseMalformedMultiLineDirective(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + // test.name + // space + enum Foo { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseMalformedMultiLineDirectiveShort(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: ( + enum Foo { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testParseMalformedMultiLineDirectiveMissingBrackets(self): + test_data = """ + // GENERATED_JAVA_ENUM_PACKAGE: + // test.namespace + enum Foo { + FOO_A, + }; + """.split('\n') + with self.assertRaises(Exception): + HeaderParser(test_data).ParseDefinitions() + + def testEnumValueAssignmentNoneDefined(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', None) + definition.AppendEntry('C', None) + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1), + ('C', 2)]), + definition.entries) + + def testEnumValueAssignmentAllDefined(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', '1') + definition.AppendEntry('B', '2') + definition.AppendEntry('C', '3') + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', '1'), + ('B', '2'), + ('C', '3')]), + definition.entries) + + def testEnumValueAssignmentReferences(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', 'A') + definition.AppendEntry('C', None) + definition.AppendEntry('D', 'C') + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 0), + ('C', 1), + ('D', 1)]), + definition.entries) + + def testEnumValueAssignmentSet(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', '2') + definition.AppendEntry('C', None) + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 2), + ('C', 3)]), + definition.entries) + + def testEnumValueAssignmentSetReferences(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', 'A') + definition.AppendEntry('C', 'B') + definition.AppendEntry('D', None) + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 0), + ('C', 0), + ('D', 1)]), + definition.entries) + + def testEnumValueAssignmentRaises(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', 'foo') + definition.AppendEntry('C', None) + with self.assertRaises(Exception): + definition.Finalize() + + def testExplicitPrefixStripping(self): + definition = EnumDefinition(original_enum_name='c', enum_package='p') + definition.AppendEntry('P_A', None) + definition.AppendEntry('B', None) + definition.AppendEntry('P_C', None) + definition.AppendEntry('P_LAST', 'P_C') + definition.prefix_to_strip = 'P_' + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1), + ('C', 2), + ('LAST', 2)]), + definition.entries) + + def testImplicitPrefixStripping(self): + definition = EnumDefinition(original_enum_name='ClassName', + enum_package='p') + definition.AppendEntry('CLASS_NAME_A', None) + definition.AppendEntry('CLASS_NAME_B', None) + definition.AppendEntry('CLASS_NAME_C', None) + definition.AppendEntry('CLASS_NAME_LAST', 'CLASS_NAME_C') + definition.Finalize() + self.assertEqual(collections.OrderedDict([('A', 0), + ('B', 1), + ('C', 2), + ('LAST', 2)]), + definition.entries) + + def testImplicitPrefixStrippingRequiresAllConstantsToBePrefixed(self): + definition = EnumDefinition(original_enum_name='Name', + enum_package='p') + definition.AppendEntry('A', None) + definition.AppendEntry('B', None) + definition.AppendEntry('NAME_LAST', None) + definition.Finalize() + self.assertEqual(['A', 'B', 'NAME_LAST'], definition.entries.keys()) + + def testGenerateThrowsOnEmptyInput(self): + with self.assertRaises(Exception): + original_do_parse = java_cpp_enum.DoParseHeaderFile + try: + java_cpp_enum.DoParseHeaderFile = lambda _: [] + for _ in java_cpp_enum.DoGenerate(['file']): + pass + finally: + java_cpp_enum.DoParseHeaderFile = original_do_parse + + +if __name__ == '__main__': + unittest.main() diff --git a/deps/v8/build/android/gyp/java_cpp_strings.py b/deps/v8/build/android/gyp/java_cpp_strings.py new file mode 100755 index 0000000000..acaaf223ef --- /dev/null +++ b/deps/v8/build/android/gyp/java_cpp_strings.py @@ -0,0 +1,213 @@ +#!/user/bin/env python +# +# Copyright 2019 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import os +import re +import sys +import zipfile + +from util import build_utils +from util import java_cpp_utils + + +def _ToUpper(match): + return match.group(1).upper() + + +def _GetClassName(source_path): + name = os.path.basename(os.path.abspath(source_path)) + (name, _) = os.path.splitext(name) + name = re.sub(r'_([a-z])', _ToUpper, name) + name = re.sub(r'^(.)', _ToUpper, name) + return name + + +class _String(object): + + def __init__(self, name, value, comments): + self.name = java_cpp_utils.KCamelToShouty(name) + self.value = value + self.comments = '\n'.join(' ' + x for x in comments) + + def Format(self): + return '%s\n public static final String %s = %s;' % ( + self.comments, self.name, self.value) + + +def ParseTemplateFile(lines): + package_re = re.compile(r'^package (.*);') + class_re = re.compile(r'.*class (.*) {') + package = '' + class_name = '' + for line in lines: + package_line = package_re.match(line) + if package_line: + package = package_line.groups()[0] + class_line = class_re.match(line) + if class_line: + class_name = class_line.groups()[0] + break + return package, class_name + + +# TODO(crbug.com/937282): It should be possible to parse a file for more than +# string constants. However, this currently only handles extracting string +# constants from a file (and all string constants from that file). Work will +# be needed if we want to annotate specific constants or non string constants +# in the file to be parsed. +class StringFileParser(object): + SINGLE_LINE_COMMENT_RE = re.compile(r'\s*(// [^\n]*)') + STRING_RE = re.compile(r'\s*const char k(.*)\[\]\s*=\s*(?:(".*"))?') + VALUE_RE = re.compile(r'\s*("[^"]*")') + + def __init__(self, lines, path=''): + self._lines = lines + self._path = path + self._in_string = False + self._in_comment = False + self._package = '' + self._current_comments = [] + self._current_name = '' + self._current_value = '' + self._strings = [] + + def _Reset(self): + self._current_comments = [] + self._current_name = '' + self._current_value = '' + self._in_string = False + self._in_comment = False + + def _AppendString(self): + self._strings.append( + _String(self._current_name, self._current_value, + self._current_comments)) + self._Reset() + + def _ParseValue(self, line): + value_line = StringFileParser.VALUE_RE.match(line) + if value_line: + self._current_value = value_line.groups()[0] + self._AppendString() + else: + self._Reset() + + def _ParseComment(self, line): + comment_line = StringFileParser.SINGLE_LINE_COMMENT_RE.match(line) + if comment_line: + self._current_comments.append(comment_line.groups()[0]) + self._in_comment = True + self._in_string = True + return True + else: + self._in_comment = False + return False + + def _ParseString(self, line): + string_line = StringFileParser.STRING_RE.match(line) + if string_line: + self._current_name = string_line.groups()[0] + if string_line.groups()[1]: + self._current_value = string_line.groups()[1] + self._AppendString() + return True + else: + self._in_string = False + return False + + def _ParseLine(self, line): + if not self._in_string: + if not self._ParseString(line): + self._ParseComment(line) + return + + if self._in_comment: + if self._ParseComment(line): + return + if not self._ParseString(line): + self._Reset() + return + + if self._in_string: + self._ParseValue(line) + + def Parse(self): + for line in self._lines: + self._ParseLine(line) + return self._strings + + +def _GenerateOutput(template, source_path, template_path, strings): + description_template = """ + // This following string constants were inserted by + // {SCRIPT_NAME} + // From + // {SOURCE_PATH} + // Into + // {TEMPLATE_PATH} + +""" + values = { + 'SCRIPT_NAME': java_cpp_utils.GetScriptName(), + 'SOURCE_PATH': source_path, + 'TEMPLATE_PATH': template_path, + } + description = description_template.format(**values) + native_strings = '\n\n'.join(x.Format() for x in strings) + + values = { + 'NATIVE_STRINGS': description + native_strings, + } + return template.format(**values) + + +def _ParseStringFile(path): + with open(path) as f: + return StringFileParser(f.readlines(), path).Parse() + + +def _Generate(source_paths, template_path): + with open(template_path) as f: + lines = f.readlines() + template = ''.join(lines) + for source_path in source_paths: + strings = _ParseStringFile(source_path) + package, class_name = ParseTemplateFile(lines) + package_path = package.replace('.', os.path.sep) + file_name = class_name + '.java' + output_path = os.path.join(package_path, file_name) + output = _GenerateOutput(template, source_path, template_path, strings) + yield output, output_path + + +def _Main(argv): + parser = argparse.ArgumentParser() + + parser.add_argument( + '--srcjar', + required=True, + help='When specified, a .srcjar at the given path is ' + 'created instead of individual .java files.') + + parser.add_argument( + '--template', + required=True, + help='Can be used to provide a context into which the' + 'new string constants will be inserted.') + + parser.add_argument( + 'inputs', nargs='+', help='Input file(s)', metavar='INPUTFILE') + args = parser.parse_args(argv) + + with build_utils.AtomicOutput(args.srcjar) as f: + with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar: + for data, path in _Generate(args.inputs, args.template): + build_utils.AddToZipHermetic(srcjar, path, data=data) + + +if __name__ == '__main__': + _Main(sys.argv[1:]) diff --git a/deps/v8/build/android/gyp/java_cpp_strings.pydeps b/deps/v8/build/android/gyp/java_cpp_strings.pydeps new file mode 100644 index 0000000000..901b580e89 --- /dev/null +++ b/deps/v8/build/android/gyp/java_cpp_strings.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/java_cpp_strings.pydeps build/android/gyp/java_cpp_strings.py +../../gn_helpers.py +java_cpp_strings.py +util/__init__.py +util/build_utils.py +util/java_cpp_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/java_cpp_strings_tests.py b/deps/v8/build/android/gyp/java_cpp_strings_tests.py new file mode 100755 index 0000000000..acf51e428e --- /dev/null +++ b/deps/v8/build/android/gyp/java_cpp_strings_tests.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python + +# Copyright 2019 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Tests for java_cpp_strings.py. + +This test suite contains various tests for the C++ -> Java string generator. +""" + +import unittest + +import java_cpp_strings + + +class _TestStringsParser(unittest.TestCase): + + def testParseComments(self): + test_data = """ +/** + * This should be ignored as well. + */ + +// Comment followed by a blank line. + +// Comment followed by unrelated code. +int foo() { return 3; } + +// Real comment. +const char kASwitch[] = "a-value"; + +// Real comment that spans +// multiple lines. +const char kAnotherSwitch[] = "another-value"; + +// Comment followed by nothing. +""".split('\n') + strings = java_cpp_strings.StringFileParser(test_data).Parse() + self.assertEqual(2, len(strings)) + self.assertEqual('A_SWITCH', strings[0].name) + self.assertEqual('"a-value"', strings[0].value) + self.assertEqual(1, len(strings[0].comments.split('\n'))) + self.assertEqual('ANOTHER_SWITCH', strings[1].name) + self.assertEqual('"another-value"', strings[1].value) + self.assertEqual(2, len(strings[1].comments.split('\n'))) + + def testStringValues(self): + test_data = """ +// Single line string constants. +const char kAString[] = "a-value"; +const char kNoComment[] = "no-comment"; + +// Single line switch with a big space. +const char kAStringWithSpace[] = "a-value"; + +// Wrapped constant definition. +const char kAStringWithAVeryLongNameThatWillHaveToWrap[] = + "a-string-with-a-very-long-name-that-will-have-to-wrap"; + +// This is erroneous and should be ignored. +const char kInvalidLineBreak[] = + + "invalid-line-break"; +""".split('\n') + strings = java_cpp_strings.StringFileParser(test_data).Parse() + self.assertEqual(4, len(strings)) + self.assertEqual('A_STRING', strings[0].name) + self.assertEqual('"a-value"', strings[0].value) + self.assertEqual('NO_COMMENT', strings[1].name) + self.assertEqual('"no-comment"', strings[1].value) + self.assertEqual('A_STRING_WITH_SPACE', strings[2].name) + self.assertEqual('"a-value"', strings[2].value) + self.assertEqual('A_STRING_WITH_A_VERY_LONG_NAME_THAT_WILL_HAVE_TO_WRAP', + strings[3].name) + self.assertEqual('"a-string-with-a-very-long-name-that-will-have-to-wrap"', + strings[3].value) + + def testTemplateParsing(self): + test_data = """ +// Copyright {YEAR} The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is autogenerated by +// {SCRIPT_NAME} +// From +// {SOURCE_PATH}, and +// {TEMPLATE_PATH} + +package my.java.package; + +public any sort of class MyClass {{ + +{NATIVE_STRINGS} + +}} +""".split('\n') + package, class_name = java_cpp_strings.ParseTemplateFile(test_data) + self.assertEqual('my.java.package', package) + self.assertEqual('MyClass', class_name) + + +if __name__ == '__main__': + unittest.main() diff --git a/deps/v8/build/android/gyp/java_google_api_keys.py b/deps/v8/build/android/gyp/java_google_api_keys.py new file mode 100755 index 0000000000..349821a8fc --- /dev/null +++ b/deps/v8/build/android/gyp/java_google_api_keys.py @@ -0,0 +1,123 @@ +#!/usr/bin/env python +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Generates a Java file with API keys. + +import argparse +import os +import string +import sys +import zipfile + +from util import build_utils + +sys.path.append( + os.path.abspath(os.path.join(sys.path[0], '../../../google_apis'))) +import google_api_keys + + +PACKAGE = 'org.chromium.chrome' +CLASSNAME = 'GoogleAPIKeys' + + +def GetScriptName(): + return os.path.relpath(__file__, build_utils.DIR_SOURCE_ROOT) + + +def GenerateOutput(constant_definitions): + template = string.Template(""" +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is autogenerated by +// ${SCRIPT_NAME} +// From +// ${SOURCE_PATH} + +package ${PACKAGE}; + +public class ${CLASS_NAME} { +${CONSTANT_ENTRIES} +} +""") + + constant_template = string.Template( + ' public static final String ${NAME} = "${VALUE}";') + constant_entries_list = [] + for constant_name, constant_value in constant_definitions.iteritems(): + values = { + 'NAME': constant_name, + 'VALUE': constant_value, + } + constant_entries_list.append(constant_template.substitute(values)) + constant_entries_string = '\n'.join(constant_entries_list) + + values = { + 'CLASS_NAME': CLASSNAME, + 'CONSTANT_ENTRIES': constant_entries_string, + 'PACKAGE': PACKAGE, + 'SCRIPT_NAME': GetScriptName(), + 'SOURCE_PATH': 'google_api_keys/google_api_keys.h', + } + return template.substitute(values) + + +def _DoWriteJavaOutput(output_path, constant_definition): + folder = os.path.dirname(output_path) + if folder and not os.path.exists(folder): + os.makedirs(folder) + with open(output_path, 'w') as out_file: + out_file.write(GenerateOutput(constant_definition)) + + +def _DoWriteJarOutput(output_path, constant_definition): + folder = os.path.dirname(output_path) + if folder and not os.path.exists(folder): + os.makedirs(folder) + with zipfile.ZipFile(output_path, 'w') as srcjar: + path = '%s/%s' % (PACKAGE.replace('.', '/'), CLASSNAME + '.java') + data = GenerateOutput(constant_definition) + build_utils.AddToZipHermetic(srcjar, path, data=data) + + +def _DoMain(argv): + parser = argparse.ArgumentParser() + parser.add_argument("--out", help="Path for java output.") + parser.add_argument("--srcjar", help="Path for srcjar output.") + options = parser.parse_args(argv) + if not options.out and not options.srcjar: + parser.print_help() + sys.exit(-1) + + values = {} + values['GOOGLE_API_KEY'] = google_api_keys.GetAPIKey() + values['GOOGLE_API_KEY_PHYSICAL_WEB_TEST'] = (google_api_keys. + GetAPIKeyPhysicalWebTest()) + values['GOOGLE_CLIENT_ID_MAIN'] = google_api_keys.GetClientID('MAIN') + values['GOOGLE_CLIENT_SECRET_MAIN'] = google_api_keys.GetClientSecret('MAIN') + values['GOOGLE_CLIENT_ID_CLOUD_PRINT'] = google_api_keys.GetClientID( + 'CLOUD_PRINT') + values['GOOGLE_CLIENT_SECRET_CLOUD_PRINT'] = google_api_keys.GetClientSecret( + 'CLOUD_PRINT') + values['GOOGLE_CLIENT_ID_REMOTING'] = google_api_keys.GetClientID('REMOTING') + values['GOOGLE_CLIENT_SECRET_REMOTING'] = google_api_keys.GetClientSecret( + 'REMOTING') + values['GOOGLE_CLIENT_ID_REMOTING_HOST'] = google_api_keys.GetClientID( + 'REMOTING_HOST') + values['GOOGLE_CLIENT_SECRET_REMOTING_HOST'] = (google_api_keys. + GetClientSecret('REMOTING_HOST')) + values['GOOGLE_CLIENT_ID_REMOTING_IDENTITY_API'] = (google_api_keys. + GetClientID('REMOTING_IDENTITY_API')) + + if options.out: + _DoWriteJavaOutput(options.out, values) + if options.srcjar: + _DoWriteJarOutput(options.srcjar, values) + + +if __name__ == '__main__': + _DoMain(sys.argv[1:]) diff --git a/deps/v8/build/android/gyp/java_google_api_keys_tests.py b/deps/v8/build/android/gyp/java_google_api_keys_tests.py new file mode 100755 index 0000000000..6529a5397e --- /dev/null +++ b/deps/v8/build/android/gyp/java_google_api_keys_tests.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Tests for java_google_api_keys.py. + +This test suite contains various tests for the C++ -> Java Google API Keys +generator. +""" + +import unittest + +import java_google_api_keys + + +class TestJavaGoogleAPIKeys(unittest.TestCase): + def testOutput(self): + definition = {'E1': 'abc', 'E2': 'defgh'} + output = java_google_api_keys.GenerateOutput(definition) + expected = """ +// Copyright 2015 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is autogenerated by +// %s +// From +// google_api_keys/google_api_keys.h + +package org.chromium.chrome; + +public class GoogleAPIKeys { + public static final String E1 = "abc"; + public static final String E2 = "defgh"; +} +""" + self.assertEqual(expected % java_google_api_keys.GetScriptName(), output) + + +if __name__ == '__main__': + unittest.main() diff --git a/deps/v8/build/android/gyp/javac.py b/deps/v8/build/android/gyp/javac.py new file mode 100755 index 0000000000..7cbe74c4b1 --- /dev/null +++ b/deps/v8/build/android/gyp/javac.py @@ -0,0 +1,595 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import distutils.spawn +import itertools +import logging +import multiprocessing +import optparse +import os +import shutil +import re +import sys +import zipfile + +from util import build_utils +from util import md5_check +from util import jar_info_utils + +import jar + +sys.path.insert( + 0, + os.path.join(build_utils.DIR_SOURCE_ROOT, 'third_party', 'colorama', 'src')) +import colorama + + +ERRORPRONE_WARNINGS_TO_TURN_OFF = [ + # TODO(crbug.com/834807): Follow steps in bug + 'DoubleBraceInitialization', + # TODO(crbug.com/834790): Follow steps in bug. + 'CatchAndPrintStackTrace', + # TODO(crbug.com/801210): Follow steps in bug. + 'SynchronizeOnNonFinalField', + # TODO(crbug.com/802073): Follow steps in bug. + 'TypeParameterUnusedInFormals', + # TODO(crbug.com/803484): Follow steps in bug. + 'CatchFail', + # TODO(crbug.com/803485): Follow steps in bug. + 'JUnitAmbiguousTestClass', + # Android platform default is always UTF-8. + # https://developer.android.com/reference/java/nio/charset/Charset.html#defaultCharset() + 'DefaultCharset', + # Low priority since the alternatives still work. + 'JdkObsolete', + # We don't use that many lambdas. + 'FunctionalInterfaceClash', + # There are lots of times when we just want to post a task. + 'FutureReturnValueIgnored', + # Nice to be explicit about operators, but not necessary. + 'OperatorPrecedence', + # Just false positives in our code. + 'ThreadJoinLoop', + # Low priority corner cases with String.split. + # Linking Guava and using Splitter was rejected + # in the https://chromium-review.googlesource.com/c/chromium/src/+/871630. + 'StringSplitter', + # Preferred to use another method since it propagates exceptions better. + 'ClassNewInstance', + # Nice to have static inner classes but not necessary. + 'ClassCanBeStatic', + # Explicit is better than implicit. + 'FloatCast', + # Results in false positives. + 'ThreadLocalUsage', + # Also just false positives. + 'Finally', + # False positives for Chromium. + 'FragmentNotInstantiable', + # Low priority to fix. + 'HidingField', + # Low priority. + 'IntLongMath', + # Low priority. + 'BadComparable', + # Low priority. + 'EqualsHashCode', + # Nice to fix but low priority. + 'TypeParameterShadowing', + # Good to have immutable enums, also low priority. + 'ImmutableEnumChecker', + # False positives for testing. + 'InputStreamSlowMultibyteRead', + # Nice to have better primitives. + 'BoxedPrimitiveConstructor', + # Not necessary for tests. + 'OverrideThrowableToString', + # Nice to have better type safety. + 'CollectionToArraySafeParameter', + # Makes logcat debugging more difficult, and does not provide obvious + # benefits in the Chromium codebase. + 'ObjectToString', +] + +ERRORPRONE_WARNINGS_TO_ERROR = [ + # Add warnings to this after fixing/suppressing all instances in our codebase. + 'ArgumentSelectionDefectChecker', + 'AssertionFailureIgnored', + 'FloatingPointLiteralPrecision', + 'JavaLangClash', + 'MissingFail', + 'MissingOverride', + 'NarrowingCompoundAssignment', + 'OrphanedFormatString', + 'ParameterName', + 'ParcelableCreator', + 'ReferenceEquality', + 'StaticGuardedByInstance', + 'StaticQualifiedUsingExpression', + 'UseCorrectAssertInTests', +] + + +def ProcessJavacOutput(output): + fileline_prefix = r'(?P<fileline>(?P<file>[-.\w/\\]+.java):(?P<line>[0-9]+):)' + warning_re = re.compile( + fileline_prefix + r'(?P<full_message> warning: (?P<message>.*))$') + error_re = re.compile( + fileline_prefix + r'(?P<full_message> (?P<message>.*))$') + marker_re = re.compile(r'\s*(?P<marker>\^)\s*$') + + # These warnings cannot be suppressed even for third party code. Deprecation + # warnings especially do not help since we must support older android version. + deprecated_re = re.compile( + r'(Note: .* uses? or overrides? a deprecated API.)$') + unchecked_re = re.compile( + r'(Note: .* uses? unchecked or unsafe operations.)$') + recompile_re = re.compile(r'(Note: Recompile with -Xlint:.* for details.)$') + + warning_color = ['full_message', colorama.Fore.YELLOW + colorama.Style.DIM] + error_color = ['full_message', colorama.Fore.MAGENTA + colorama.Style.BRIGHT] + marker_color = ['marker', colorama.Fore.BLUE + colorama.Style.BRIGHT] + + def Colorize(line, regex, color): + match = regex.match(line) + start = match.start(color[0]) + end = match.end(color[0]) + return (line[:start] + + color[1] + line[start:end] + + colorama.Fore.RESET + colorama.Style.RESET_ALL + + line[end:]) + + def ApplyFilters(line): + return not (deprecated_re.match(line) + or unchecked_re.match(line) + or recompile_re.match(line)) + + def ApplyColors(line): + if warning_re.match(line): + line = Colorize(line, warning_re, warning_color) + elif error_re.match(line): + line = Colorize(line, error_re, error_color) + elif marker_re.match(line): + line = Colorize(line, marker_re, marker_color) + return line + + return '\n'.join(map(ApplyColors, filter(ApplyFilters, output.split('\n')))) + + +def _ExtractClassFiles(jar_path, dest_dir, java_files): + """Extracts all .class files not corresponding to |java_files|.""" + # Two challenges exist here: + # 1. |java_files| have prefixes that are not represented in the the jar paths. + # 2. A single .java file results in multiple .class files when it contains + # nested classes. + # Here's an example: + # source path: ../../base/android/java/src/org/chromium/Foo.java + # jar paths: org/chromium/Foo.class, org/chromium/Foo$Inner.class + # To extract only .class files not related to the given .java files, we strip + # off ".class" and "$*.class" and use a substring match against java_files. + def extract_predicate(path): + if not path.endswith('.class'): + return False + path_without_suffix = re.sub(r'(?:\$|\.)[^/]*class$', '', path) + partial_java_path = path_without_suffix + '.java' + return not any(p.endswith(partial_java_path) for p in java_files) + + logging.info('Extracting class files from %s', jar_path) + build_utils.ExtractAll(jar_path, path=dest_dir, predicate=extract_predicate) + for path in build_utils.FindInDirectory(dest_dir, '*.class'): + shutil.copystat(jar_path, path) + + +def _ParsePackageAndClassNames(java_file): + package_name = '' + class_names = [] + with open(java_file) as f: + for l in f: + # Strip unindented comments. + # Considers a leading * as a continuation of a multi-line comment (our + # linter doesn't enforce a space before it like there should be). + l = re.sub(r'^(?://.*|/?\*.*?(?:\*/\s*|$))', '', l) + + m = re.match(r'package\s+(.*?);', l) + if m and not package_name: + package_name = m.group(1) + + # Not exactly a proper parser, but works for sources that Chrome uses. + # In order to not match nested classes, it just checks for lack of indent. + m = re.match(r'(?:\S.*?)?(?:class|@?interface|enum)\s+(.+?)\b', l) + if m: + class_names.append(m.group(1)) + return package_name, class_names + + +def _CheckPathMatchesClassName(java_file, package_name, class_name): + parts = package_name.split('.') + [class_name + '.java'] + expected_path_suffix = os.path.sep.join(parts) + if not java_file.endswith(expected_path_suffix): + raise Exception(('Java package+class name do not match its path.\n' + 'Actual path: %s\nExpected path: %s') % + (java_file, expected_path_suffix)) + + +def _MoveGeneratedJavaFilesToGenDir(classes_dir, generated_java_dir): + # Move any Annotation Processor-generated .java files into $out/gen + # so that codesearch can find them. + javac_generated_sources = [] + for src_path in build_utils.FindInDirectory(classes_dir, '*.java'): + dst_path = os.path.join(generated_java_dir, + os.path.relpath(src_path, classes_dir)) + build_utils.MakeDirectory(os.path.dirname(dst_path)) + shutil.move(src_path, dst_path) + javac_generated_sources.append(dst_path) + return javac_generated_sources + + +def _ProcessJavaFileForInfo(java_file): + package_name, class_names = _ParsePackageAndClassNames(java_file) + return java_file, package_name, class_names + + +def _ProcessInfo(java_file, package_name, class_names, source, chromium_code): + for class_name in class_names: + yield '{}.{}'.format(package_name, class_name) + # Skip aidl srcjars since they don't indent code correctly. + if '_aidl.srcjar' in source: + continue + assert not chromium_code or len(class_names) == 1, ( + 'Chromium java files must only have one class: {}'.format(source)) + if chromium_code: + # This check is not necessary but nice to check this somewhere. + _CheckPathMatchesClassName(java_file, package_name, class_names[0]) + + +def _CreateInfoFile(java_files, jar_path, chromium_code, srcjar_files, + classes_dir, generated_java_dir): + """Writes a .jar.info file. + + This maps fully qualified names for classes to either the java file that they + are defined in or the path of the srcjar that they came from. + """ + output_path = jar_path + '.info' + logging.info('Start creating info file: %s', output_path) + javac_generated_sources = _MoveGeneratedJavaFilesToGenDir( + classes_dir, generated_java_dir) + logging.info('Finished moving generated java files: %s', output_path) + # 2 processes saves ~0.9s, 3 processes saves ~1.2s, 4 processes saves ~1.2s. + pool = multiprocessing.Pool(processes=3) + results = pool.imap_unordered( + _ProcessJavaFileForInfo, + itertools.chain(java_files, javac_generated_sources), + chunksize=10) + pool.close() + all_info_data = {} + for java_file, package_name, class_names in results: + source = srcjar_files.get(java_file, java_file) + for fully_qualified_name in _ProcessInfo( + java_file, package_name, class_names, source, chromium_code): + all_info_data[fully_qualified_name] = java_file + logging.info('Writing info file: %s', output_path) + with build_utils.AtomicOutput(output_path) as f: + jar_info_utils.WriteJarInfoFile(f, all_info_data, srcjar_files) + logging.info('Completed info file: %s', output_path) + + +def _CreateJarFile(jar_path, provider_configurations, additional_jar_files, + classes_dir): + logging.info('Start creating jar file: %s', jar_path) + with build_utils.AtomicOutput(jar_path) as f: + jar.JarDirectory( + classes_dir, + f.name, + # Avoid putting generated java files into the jar since + # _MoveGeneratedJavaFilesToGenDir has not completed yet + predicate=lambda name: not name.endswith('.java'), + provider_configurations=provider_configurations, + additional_files=additional_jar_files) + logging.info('Completed jar file: %s', jar_path) + + +def _OnStaleMd5(options, javac_cmd, java_files, classpath): + logging.info('Starting _OnStaleMd5') + + # Compiles with Error Prone take twice as long to run as pure javac. Thus GN + # rules run both in parallel, with Error Prone only used for checks. + save_outputs = not options.enable_errorprone + + with build_utils.TempDir() as temp_dir: + srcjars = options.java_srcjars + + classes_dir = os.path.join(temp_dir, 'classes') + os.makedirs(classes_dir) + + if save_outputs: + generated_java_dir = options.generated_dir + else: + generated_java_dir = os.path.join(temp_dir, 'gen') + + shutil.rmtree(generated_java_dir, True) + + srcjar_files = {} + if srcjars: + logging.info('Extracting srcjars to %s', generated_java_dir) + build_utils.MakeDirectory(generated_java_dir) + jar_srcs = [] + for srcjar in options.java_srcjars: + extracted_files = build_utils.ExtractAll( + srcjar, no_clobber=True, path=generated_java_dir, pattern='*.java') + for path in extracted_files: + # We want the path inside the srcjar so the viewer can have a tree + # structure. + srcjar_files[path] = '{}/{}'.format( + srcjar, os.path.relpath(path, generated_java_dir)) + jar_srcs.extend(extracted_files) + logging.info('Done extracting srcjars') + java_files.extend(jar_srcs) + + if java_files: + # Don't include the output directory in the initial set of args since it + # being in a temp dir makes it unstable (breaks md5 stamping). + cmd = javac_cmd + ['-d', classes_dir] + + # Pass classpath and source paths as response files to avoid extremely + # long command lines that are tedius to debug. + if classpath: + cmd += ['-classpath', ':'.join(classpath)] + + java_files_rsp_path = os.path.join(temp_dir, 'files_list.txt') + with open(java_files_rsp_path, 'w') as f: + f.write(' '.join(java_files)) + cmd += ['@' + java_files_rsp_path] + + logging.debug('Build command %s', cmd) + build_utils.CheckOutput( + cmd, + print_stdout=options.chromium_code, + stderr_filter=ProcessJavacOutput) + logging.info('Finished build command') + + if save_outputs: + # Creating the jar file takes the longest, start it first on a separate + # process to unblock the rest of the post-processing steps. + jar_file_worker = multiprocessing.Process( + target=_CreateJarFile, + args=(options.jar_path, options.provider_configurations, + options.additional_jar_files, classes_dir)) + jar_file_worker.start() + else: + jar_file_worker = None + build_utils.Touch(options.jar_path) + + if save_outputs: + _CreateInfoFile(java_files, options.jar_path, options.chromium_code, + srcjar_files, classes_dir, generated_java_dir) + else: + build_utils.Touch(options.jar_path + '.info') + + if jar_file_worker: + jar_file_worker.join() + logging.info('Completed all steps in _OnStaleMd5') + + +def _ParseOptions(argv): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option( + '--java-srcjars', + action='append', + default=[], + help='List of srcjars to include in compilation.') + parser.add_option( + '--generated-dir', + help='Subdirectory within target_gen_dir to place extracted srcjars and ' + 'annotation processor output for codesearch to find.') + parser.add_option( + '--bootclasspath', + action='append', + default=[], + help='Boot classpath for javac. If this is specified multiple times, ' + 'they will all be appended to construct the classpath.') + parser.add_option( + '--java-version', + help='Java language version to use in -source and -target args to javac.') + parser.add_option( + '--full-classpath', + action='append', + help='Classpath to use when annotation processors are present.') + parser.add_option( + '--interface-classpath', + action='append', + help='Classpath to use when no annotation processors are present.') + parser.add_option( + '--processors', + action='append', + help='GN list of annotation processor main classes.') + parser.add_option( + '--processorpath', + action='append', + help='GN list of jars that comprise the classpath used for Annotation ' + 'Processors.') + parser.add_option( + '--processor-arg', + dest='processor_args', + action='append', + help='key=value arguments for the annotation processors.') + parser.add_option( + '--provider-configuration', + dest='provider_configurations', + action='append', + help='File to specify a service provider. Will be included ' + 'in the jar under META-INF/services.') + parser.add_option( + '--additional-jar-file', + dest='additional_jar_files', + action='append', + help='Additional files to package into jar. By default, only Java .class ' + 'files are packaged into the jar. Files should be specified in ' + 'format <filename>:<path to be placed in jar>.') + parser.add_option( + '--chromium-code', + type='int', + help='Whether code being compiled should be built with stricter ' + 'warnings for chromium code.') + parser.add_option( + '--errorprone-path', help='Use the Errorprone compiler at this path.') + parser.add_option( + '--enable-errorprone', + action='store_true', + help='Enable errorprone checks') + parser.add_option('--jar-path', help='Jar output path.') + parser.add_option( + '--javac-arg', + action='append', + default=[], + help='Additional arguments to pass to javac.') + + options, args = parser.parse_args(argv) + build_utils.CheckOptions(options, parser, required=('jar_path',)) + + options.bootclasspath = build_utils.ParseGnList(options.bootclasspath) + options.full_classpath = build_utils.ParseGnList(options.full_classpath) + options.interface_classpath = build_utils.ParseGnList( + options.interface_classpath) + options.processorpath = build_utils.ParseGnList(options.processorpath) + options.processors = build_utils.ParseGnList(options.processors) + options.java_srcjars = build_utils.ParseGnList(options.java_srcjars) + + if options.java_version == '1.8' and options.bootclasspath: + # Android's boot jar doesn't contain all java 8 classes. + # See: https://github.com/evant/gradle-retrolambda/issues/23. + # Get the path of the jdk folder by searching for the 'jar' executable. We + # cannot search for the 'javac' executable because goma provides a custom + # version of 'javac'. + jar_path = os.path.realpath(distutils.spawn.find_executable('jar')) + jdk_dir = os.path.dirname(os.path.dirname(jar_path)) + rt_jar = os.path.join(jdk_dir, 'jre', 'lib', 'rt.jar') + options.bootclasspath.append(rt_jar) + + additional_jar_files = [] + for arg in options.additional_jar_files or []: + filepath, jar_filepath = arg.split(':') + additional_jar_files.append((filepath, jar_filepath)) + options.additional_jar_files = additional_jar_files + + java_files = [] + for arg in args: + # Interpret a path prefixed with @ as a file containing a list of sources. + if arg.startswith('@'): + java_files.extend(build_utils.ReadSourcesList(arg[1:])) + else: + java_files.append(arg) + + return options, java_files + + +def main(argv): + logging.basicConfig( + level=logging.INFO if os.environ.get('_JAVAC_DEBUG') else logging.WARNING, + format='%(levelname).1s %(relativeCreated)6d %(message)s') + colorama.init() + + argv = build_utils.ExpandFileArgs(argv) + options, java_files = _ParseOptions(argv) + + # Until we add a version of javac via DEPS, use errorprone with all checks + # disabled rather than javac. This ensures builds are reproducible. + # https://crbug.com/693079 + # As of Jan 2019, on a z920, compiling chrome_java times: + # * With javac: 17 seconds + # * With errorprone (checks disabled): 20 seconds + # * With errorprone (checks enabled): 30 seconds + if options.errorprone_path: + javac_path = options.errorprone_path + else: + javac_path = distutils.spawn.find_executable('javac') + + javac_cmd = [ + javac_path, + '-g', + # Chromium only allows UTF8 source files. Being explicit avoids + # javac pulling a default encoding from the user's environment. + '-encoding', + 'UTF-8', + # Prevent compiler from compiling .java files not listed as inputs. + # See: http://blog.ltgt.net/most-build-tools-misuse-javac/ + '-sourcepath', + ':', + ] + + if options.enable_errorprone: + for warning in ERRORPRONE_WARNINGS_TO_TURN_OFF: + javac_cmd.append('-Xep:{}:OFF'.format(warning)) + for warning in ERRORPRONE_WARNINGS_TO_ERROR: + javac_cmd.append('-Xep:{}:ERROR'.format(warning)) + elif options.errorprone_path: + javac_cmd.append('-XepDisableAllChecks') + + if options.java_version: + javac_cmd.extend([ + '-source', options.java_version, + '-target', options.java_version, + ]) + + if options.chromium_code: + javac_cmd.extend(['-Werror']) + else: + # XDignore.symbol.file makes javac compile against rt.jar instead of + # ct.sym. This means that using a java internal package/class will not + # trigger a compile warning or error. + javac_cmd.extend(['-XDignore.symbol.file']) + + if options.processors: + javac_cmd.extend(['-processor', ','.join(options.processors)]) + + if options.bootclasspath: + javac_cmd.extend(['-bootclasspath', ':'.join(options.bootclasspath)]) + + # Annotation processors crash when given interface jars. + active_classpath = ( + options.full_classpath + if options.processors else options.interface_classpath) + classpath = [] + if active_classpath: + classpath.extend(active_classpath) + + if options.processorpath: + javac_cmd.extend(['-processorpath', ':'.join(options.processorpath)]) + if options.processor_args: + for arg in options.processor_args: + javac_cmd.extend(['-A%s' % arg]) + + javac_cmd.extend(options.javac_arg) + + classpath_inputs = (options.bootclasspath + options.interface_classpath + + options.processorpath) + + # GN already knows of java_files, so listing them just make things worse when + # they change. + depfile_deps = [javac_path] + classpath_inputs + options.java_srcjars + input_paths = depfile_deps + java_files + + output_paths = [ + options.jar_path, + options.jar_path + '.info', + ] + + # List python deps in input_strings rather than input_paths since the contents + # of them does not change what gets written to the depsfile. + build_utils.CallAndWriteDepfileIfStale( + lambda: _OnStaleMd5(options, javac_cmd, java_files, classpath), + options, + depfile_deps=depfile_deps, + input_paths=input_paths, + input_strings=javac_cmd + classpath, + output_paths=output_paths, + add_pydeps=False) + logging.info('Script complete: %s', __file__) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/deps/v8/build/android/gyp/javac.pydeps b/deps/v8/build/android/gyp/javac.pydeps new file mode 100644 index 0000000000..a9d257b95f --- /dev/null +++ b/deps/v8/build/android/gyp/javac.pydeps @@ -0,0 +1,15 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/javac.pydeps build/android/gyp/javac.py +../../../third_party/colorama/src/colorama/__init__.py +../../../third_party/colorama/src/colorama/ansi.py +../../../third_party/colorama/src/colorama/ansitowin32.py +../../../third_party/colorama/src/colorama/initialise.py +../../../third_party/colorama/src/colorama/win32.py +../../../third_party/colorama/src/colorama/winterm.py +../../gn_helpers.py +jar.py +javac.py +util/__init__.py +util/build_utils.py +util/jar_info_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/jinja_template.py b/deps/v8/build/android/gyp/jinja_template.py new file mode 100755 index 0000000000..4d5c403dfe --- /dev/null +++ b/deps/v8/build/android/gyp/jinja_template.py @@ -0,0 +1,160 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Renders one or more template files using the Jinja template engine.""" + +import codecs +import argparse +import os +import sys + +from util import build_utils +from util import resource_utils + +sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir)) +from pylib.constants import host_paths + +# Import jinja2 from third_party/jinja2 +sys.path.append(os.path.join(host_paths.DIR_SOURCE_ROOT, 'third_party')) +import jinja2 # pylint: disable=F0401 + + +class _RecordingFileSystemLoader(jinja2.FileSystemLoader): + def __init__(self, searchpath): + jinja2.FileSystemLoader.__init__(self, searchpath) + self.loaded_templates = set() + + def get_source(self, environment, template): + contents, filename, uptodate = jinja2.FileSystemLoader.get_source( + self, environment, template) + self.loaded_templates.add(os.path.relpath(filename)) + return contents, filename, uptodate + + +class JinjaProcessor(object): + """Allows easy rendering of jinja templates with input file tracking.""" + def __init__(self, loader_base_dir, variables=None): + self.loader_base_dir = loader_base_dir + self.variables = variables or {} + self.loader = _RecordingFileSystemLoader(loader_base_dir) + self.env = jinja2.Environment(loader=self.loader) + self.env.undefined = jinja2.StrictUndefined + self.env.line_comment_prefix = '##' + self.env.trim_blocks = True + self.env.lstrip_blocks = True + self._template_cache = {} # Map of path -> Template + + def Render(self, input_filename, variables=None): + input_rel_path = os.path.relpath(input_filename, self.loader_base_dir) + template = self._template_cache.get(input_rel_path) + if not template: + template = self.env.get_template(input_rel_path) + self._template_cache[input_rel_path] = template + return template.render(variables or self.variables) + + def GetLoadedTemplates(self): + return list(self.loader.loaded_templates) + + +def _ProcessFile(processor, input_filename, output_filename): + output = processor.Render(input_filename) + + # If |output| is same with the file content, we skip update and + # ninja's restat will avoid rebuilding things that depend on it. + if os.path.isfile(output_filename): + with codecs.open(output_filename, 'r', 'utf-8') as f: + if f.read() == output: + return + + with codecs.open(output_filename, 'w', 'utf-8') as output_file: + output_file.write(output) + + +def _ProcessFiles(processor, input_filenames, inputs_base_dir, outputs_zip): + with build_utils.TempDir() as temp_dir: + files_to_zip = dict() + for input_filename in input_filenames: + relpath = os.path.relpath(os.path.abspath(input_filename), + os.path.abspath(inputs_base_dir)) + if relpath.startswith(os.pardir): + raise Exception('input file %s is not contained in inputs base dir %s' + % (input_filename, inputs_base_dir)) + + output_filename = os.path.join(temp_dir, relpath) + parent_dir = os.path.dirname(output_filename) + build_utils.MakeDirectory(parent_dir) + _ProcessFile(processor, input_filename, output_filename) + files_to_zip[relpath] = input_filename + + resource_utils.CreateResourceInfoFile(files_to_zip, outputs_zip) + build_utils.ZipDir(outputs_zip, temp_dir) + + +def _ParseVariables(variables_arg, error_func): + variables = {} + for v in build_utils.ParseGnList(variables_arg): + if '=' not in v: + error_func('--variables argument must contain "=": ' + v) + name, _, value = v.partition('=') + variables[name] = value + return variables + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--inputs', required=True, + help='GN-list of template files to process.') + parser.add_argument('--includes', default='', + help="GN-list of files that get {% include %}'ed.") + parser.add_argument('--output', help='The output file to generate. Valid ' + 'only if there is a single input.') + parser.add_argument('--outputs-zip', help='A zip file for the processed ' + 'templates. Required if there are multiple inputs.') + parser.add_argument('--inputs-base-dir', help='A common ancestor directory ' + 'of the inputs. Each output\'s path in the output zip ' + 'will match the relative path from INPUTS_BASE_DIR to ' + 'the input. Required if --output-zip is given.') + parser.add_argument('--loader-base-dir', help='Base path used by the ' + 'template loader. Must be a common ancestor directory of ' + 'the inputs. Defaults to DIR_SOURCE_ROOT.', + default=host_paths.DIR_SOURCE_ROOT) + parser.add_argument('--variables', help='Variables to be made available in ' + 'the template processing environment, as a GYP list ' + '(e.g. --variables "channel=beta mstone=39")', default='') + parser.add_argument('--check-includes', action='store_true', + help='Enable inputs and includes checks.') + options = parser.parse_args() + + inputs = build_utils.ParseGnList(options.inputs) + includes = build_utils.ParseGnList(options.includes) + + if (options.output is None) == (options.outputs_zip is None): + parser.error('Exactly one of --output and --output-zip must be given') + if options.output and len(inputs) != 1: + parser.error('--output cannot be used with multiple inputs') + if options.outputs_zip and not options.inputs_base_dir: + parser.error('--inputs-base-dir must be given when --output-zip is used') + + variables = _ParseVariables(options.variables, parser.error) + processor = JinjaProcessor(options.loader_base_dir, variables=variables) + + if options.output: + _ProcessFile(processor, inputs[0], options.output) + else: + _ProcessFiles(processor, inputs, options.inputs_base_dir, + options.outputs_zip) + + if options.check_includes: + all_inputs = set(processor.GetLoadedTemplates()) + all_inputs.difference_update(inputs) + all_inputs.difference_update(includes) + if all_inputs: + raise Exception('Found files not listed via --includes:\n' + + '\n'.join(sorted(all_inputs))) + + +if __name__ == '__main__': + main() diff --git a/deps/v8/build/android/gyp/jinja_template.pydeps b/deps/v8/build/android/gyp/jinja_template.pydeps new file mode 100644 index 0000000000..a2a38176bf --- /dev/null +++ b/deps/v8/build/android/gyp/jinja_template.pydeps @@ -0,0 +1,41 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/jinja_template.pydeps build/android/gyp/jinja_template.py +../../../third_party/catapult/devil/devil/__init__.py +../../../third_party/catapult/devil/devil/android/__init__.py +../../../third_party/catapult/devil/devil/android/constants/__init__.py +../../../third_party/catapult/devil/devil/android/constants/chrome.py +../../../third_party/catapult/devil/devil/android/sdk/__init__.py +../../../third_party/catapult/devil/devil/android/sdk/keyevent.py +../../../third_party/catapult/devil/devil/android/sdk/version_codes.py +../../../third_party/catapult/devil/devil/constants/__init__.py +../../../third_party/catapult/devil/devil/constants/exit_codes.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_compat.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../gn_helpers.py +../pylib/__init__.py +../pylib/constants/__init__.py +../pylib/constants/host_paths.py +jinja_template.py +util/__init__.py +util/build_utils.py +util/md5_check.py +util/resource_utils.py diff --git a/deps/v8/build/android/gyp/lint.py b/deps/v8/build/android/gyp/lint.py new file mode 100755 index 0000000000..b2f90c7e9e --- /dev/null +++ b/deps/v8/build/android/gyp/lint.py @@ -0,0 +1,399 @@ +#!/usr/bin/env python +# +# Copyright (c) 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Runs Android's lint tool.""" + + +import argparse +import os +import re +import sys +import traceback +from xml.dom import minidom + +from util import build_utils + +_LINT_MD_URL = 'https://chromium.googlesource.com/chromium/src/+/master/build/android/docs/lint.md' # pylint: disable=line-too-long + + +def _OnStaleMd5(lint_path, config_path, processed_config_path, + manifest_path, result_path, product_dir, sources, jar_path, + cache_dir, android_sdk_version, srcjars, resource_sources, + disable=None, classpath=None, can_fail_build=False, + include_unexpected=False, silent=False): + def _RebasePath(path): + """Returns relative path to top-level src dir. + + Args: + path: A path relative to cwd. + """ + ret = os.path.relpath(os.path.abspath(path), build_utils.DIR_SOURCE_ROOT) + # If it's outside of src/, just use abspath. + if ret.startswith('..'): + ret = os.path.abspath(path) + return ret + + def _ProcessConfigFile(): + if not config_path or not processed_config_path: + return + if not build_utils.IsTimeStale(processed_config_path, [config_path]): + return + + with open(config_path, 'rb') as f: + content = f.read().replace( + 'PRODUCT_DIR', _RebasePath(product_dir)) + + with open(processed_config_path, 'wb') as f: + f.write(content) + + def _ProcessResultFile(): + with open(result_path, 'rb') as f: + content = f.read().replace( + _RebasePath(product_dir), 'PRODUCT_DIR') + + with open(result_path, 'wb') as f: + f.write(content) + + def _ParseAndShowResultFile(): + dom = minidom.parse(result_path) + issues = dom.getElementsByTagName('issue') + if not silent: + print >> sys.stderr + for issue in issues: + issue_id = issue.attributes['id'].value + message = issue.attributes['message'].value + location_elem = issue.getElementsByTagName('location')[0] + path = location_elem.attributes['file'].value + line = location_elem.getAttribute('line') + if line: + error = '%s:%s %s: %s [warning]' % (path, line, message, issue_id) + else: + # Issues in class files don't have a line number. + error = '%s %s: %s [warning]' % (path, message, issue_id) + print >> sys.stderr, error.encode('utf-8') + for attr in ['errorLine1', 'errorLine2']: + error_line = issue.getAttribute(attr) + if error_line: + print >> sys.stderr, error_line.encode('utf-8') + return len(issues) + + with build_utils.TempDir() as temp_dir: + _ProcessConfigFile() + + cmd = [ + _RebasePath(lint_path), '-Werror', '--exitcode', '--showall', + '--xml', _RebasePath(result_path), + ] + if jar_path: + # --classpath is just for .class files for this one target. + cmd.extend(['--classpath', _RebasePath(jar_path)]) + if processed_config_path: + cmd.extend(['--config', _RebasePath(processed_config_path)]) + + tmp_dir_counter = [0] + def _NewTempSubdir(prefix, append_digit=True): + # Helper function to create a new sub directory based on the number of + # subdirs created earlier. + if append_digit: + tmp_dir_counter[0] += 1 + prefix += str(tmp_dir_counter[0]) + new_dir = os.path.join(temp_dir, prefix) + os.makedirs(new_dir) + return new_dir + + resource_dirs = [] + for resource_source in resource_sources: + if os.path.isdir(resource_source): + resource_dirs.append(resource_source) + else: + # This is a zip file with generated resources (e. g. strings from GRD). + # Extract it to temporary folder. + resource_dir = _NewTempSubdir(resource_source, append_digit=False) + resource_dirs.append(resource_dir) + build_utils.ExtractAll(resource_source, path=resource_dir) + + for resource_dir in resource_dirs: + cmd.extend(['--resources', _RebasePath(resource_dir)]) + + if classpath: + # --libraries is the classpath (excluding active target). + cp = ':'.join(_RebasePath(p) for p in classpath) + cmd.extend(['--libraries', cp]) + + # There may be multiple source files with the same basename (but in + # different directories). It is difficult to determine what part of the path + # corresponds to the java package, and so instead just link the source files + # into temporary directories (creating a new one whenever there is a name + # conflict). + def PathInDir(d, src): + subpath = os.path.join(d, _RebasePath(src)) + subdir = os.path.dirname(subpath) + if not os.path.exists(subdir): + os.makedirs(subdir) + return subpath + + src_dirs = [] + for src in sources: + src_dir = None + for d in src_dirs: + if not os.path.exists(PathInDir(d, src)): + src_dir = d + break + if not src_dir: + src_dir = _NewTempSubdir('SRC_ROOT') + src_dirs.append(src_dir) + cmd.extend(['--sources', _RebasePath(src_dir)]) + os.symlink(os.path.abspath(src), PathInDir(src_dir, src)) + + if srcjars: + srcjar_paths = build_utils.ParseGnList(srcjars) + if srcjar_paths: + srcjar_dir = _NewTempSubdir('SRC_ROOT') + cmd.extend(['--sources', _RebasePath(srcjar_dir)]) + for srcjar in srcjar_paths: + build_utils.ExtractAll(srcjar, path=srcjar_dir) + + if disable: + cmd.extend(['--disable', ','.join(disable)]) + + project_dir = _NewTempSubdir('SRC_ROOT') + if android_sdk_version: + # Create dummy project.properies file in a temporary "project" directory. + # It is the only way to add Android SDK to the Lint's classpath. Proper + # classpath is necessary for most source-level checks. + with open(os.path.join(project_dir, 'project.properties'), 'w') \ + as propfile: + print >> propfile, 'target=android-{}'.format(android_sdk_version) + + # Put the manifest in a temporary directory in order to avoid lint detecting + # sibling res/ and src/ directories (which should be pass explicitly if they + # are to be included). + if not manifest_path: + manifest_path = os.path.join( + build_utils.DIR_SOURCE_ROOT, 'build', 'android', + 'AndroidManifest.xml') + os.symlink(os.path.abspath(manifest_path), + os.path.join(project_dir, 'AndroidManifest.xml')) + cmd.append(project_dir) + + if os.path.exists(result_path): + os.remove(result_path) + + env = os.environ.copy() + stderr_filter = None + if cache_dir: + env['_JAVA_OPTIONS'] = '-Duser.home=%s' % _RebasePath(cache_dir) + # When _JAVA_OPTIONS is set, java prints to stderr: + # Picked up _JAVA_OPTIONS: ... + # + # We drop all lines that contain _JAVA_OPTIONS from the output + stderr_filter = lambda l: re.sub(r'.*_JAVA_OPTIONS.*\n?', '', l) + + def fail_func(returncode, stderr): + if returncode != 0: + return True + if (include_unexpected and + 'Unexpected failure during lint analysis' in stderr): + return True + return False + + try: + build_utils.CheckOutput(cmd, cwd=build_utils.DIR_SOURCE_ROOT, + env=env or None, stderr_filter=stderr_filter, + fail_func=fail_func) + except build_utils.CalledProcessError: + # There is a problem with lint usage + if not os.path.exists(result_path): + raise + + # Sometimes produces empty (almost) files: + if os.path.getsize(result_path) < 10: + if can_fail_build: + raise + elif not silent: + traceback.print_exc() + return + + # There are actual lint issues + try: + num_issues = _ParseAndShowResultFile() + except Exception: # pylint: disable=broad-except + if not silent: + print 'Lint created unparseable xml file...' + print 'File contents:' + with open(result_path) as f: + print f.read() + if can_fail_build: + traceback.print_exc() + if can_fail_build: + raise + else: + return + + _ProcessResultFile() + if num_issues == 0 and include_unexpected: + msg = 'Please refer to output above for unexpected lint failures.\n' + else: + msg = ('\nLint found %d new issues.\n' + ' - For full explanation, please refer to %s\n' + ' - For more information about lint and how to fix lint issues,' + ' please refer to %s\n' % + (num_issues, _RebasePath(result_path), _LINT_MD_URL)) + if not silent: + print >> sys.stderr, msg + if can_fail_build: + raise Exception('Lint failed.') + + +def _FindInDirectories(directories, filename_filter): + all_files = [] + for directory in directories: + all_files.extend(build_utils.FindInDirectory(directory, filename_filter)) + return all_files + + +def main(): + parser = argparse.ArgumentParser() + build_utils.AddDepfileOption(parser) + + parser.add_argument('--lint-path', required=True, + help='Path to lint executable.') + parser.add_argument('--product-dir', required=True, + help='Path to product dir.') + parser.add_argument('--result-path', required=True, + help='Path to XML lint result file.') + parser.add_argument('--cache-dir', required=True, + help='Path to the directory in which the android cache ' + 'directory tree should be stored.') + parser.add_argument('--platform-xml-path', required=True, + help='Path to api-platforms.xml') + parser.add_argument('--android-sdk-version', + help='Version (API level) of the Android SDK used for ' + 'building.') + parser.add_argument('--create-cache', action='store_true', + help='Mark the lint cache file as an output rather than ' + 'an input.') + parser.add_argument('--can-fail-build', action='store_true', + help='If set, script will exit with nonzero exit status' + ' if lint errors are present') + parser.add_argument('--include-unexpected-failures', action='store_true', + help='If set, script will exit with nonzero exit status' + ' if lint itself crashes with unexpected failures.') + parser.add_argument('--config-path', + help='Path to lint suppressions file.') + parser.add_argument('--disable', + help='List of checks to disable.') + parser.add_argument('--jar-path', + help='Jar file containing class files.') + parser.add_argument('--java-sources-file', + help='File containing a list of java files.') + parser.add_argument('--manifest-path', + help='Path to AndroidManifest.xml') + parser.add_argument('--classpath', default=[], action='append', + help='GYP-list of classpath .jar files') + parser.add_argument('--processed-config-path', + help='Path to processed lint suppressions file.') + parser.add_argument('--resource-dir', + help='Path to resource dir.') + parser.add_argument('--resource-sources', default=[], action='append', + help='GYP-list of resource sources (directories with ' + 'resources or archives created by resource-generating ' + 'tasks.') + parser.add_argument('--silent', action='store_true', + help='If set, script will not log anything.') + parser.add_argument('--src-dirs', + help='Directories containing java files.') + parser.add_argument('--srcjars', + help='GN list of included srcjars.') + + args = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:])) + + sources = [] + if args.src_dirs: + src_dirs = build_utils.ParseGnList(args.src_dirs) + sources = _FindInDirectories(src_dirs, '*.java') + elif args.java_sources_file: + sources.extend(build_utils.ReadSourcesList(args.java_sources_file)) + + if args.config_path and not args.processed_config_path: + parser.error('--config-path specified without --processed-config-path') + elif args.processed_config_path and not args.config_path: + parser.error('--processed-config-path specified without --config-path') + + input_paths = [ + args.lint_path, + args.platform_xml_path, + ] + if args.config_path: + input_paths.append(args.config_path) + if args.jar_path: + input_paths.append(args.jar_path) + if args.manifest_path: + input_paths.append(args.manifest_path) + if sources: + input_paths.extend(sources) + classpath = [] + for gyp_list in args.classpath: + classpath.extend(build_utils.ParseGnList(gyp_list)) + input_paths.extend(classpath) + + resource_sources = [] + if args.resource_dir: + # Backward compatibility with GYP + resource_sources += [ args.resource_dir ] + + for gyp_list in args.resource_sources: + resource_sources += build_utils.ParseGnList(gyp_list) + + for resource_source in resource_sources: + if os.path.isdir(resource_source): + input_paths.extend(build_utils.FindInDirectory(resource_source, '*')) + else: + input_paths.append(resource_source) + + input_strings = [ + args.can_fail_build, + args.include_unexpected_failures, + args.silent, + ] + if args.android_sdk_version: + input_strings.append(args.android_sdk_version) + if args.processed_config_path: + input_strings.append(args.processed_config_path) + + disable = [] + if args.disable: + disable = build_utils.ParseGnList(args.disable) + input_strings.extend(disable) + + output_paths = [args.result_path, args.processed_config_path] + + build_utils.CallAndWriteDepfileIfStale( + lambda: _OnStaleMd5(args.lint_path, + args.config_path, + args.processed_config_path, + args.manifest_path, args.result_path, + args.product_dir, sources, + args.jar_path, + args.cache_dir, + args.android_sdk_version, + args.srcjars, + resource_sources, + disable=disable, + classpath=classpath, + can_fail_build=args.can_fail_build, + include_unexpected=args.include_unexpected_failures, + silent=args.silent), + args, + input_paths=input_paths, + input_strings=input_strings, + output_paths=output_paths, + depfile_deps=classpath, + add_pydeps=False) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/deps/v8/build/android/gyp/lint.pydeps b/deps/v8/build/android/gyp/lint.pydeps new file mode 100644 index 0000000000..a8616e4d37 --- /dev/null +++ b/deps/v8/build/android/gyp/lint.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/lint.pydeps build/android/gyp/lint.py +../../gn_helpers.py +lint.py +util/__init__.py +util/build_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/main_dex_list.py b/deps/v8/build/android/gyp/main_dex_list.py new file mode 100755 index 0000000000..2435859099 --- /dev/null +++ b/deps/v8/build/android/gyp/main_dex_list.py @@ -0,0 +1,174 @@ +#!/usr/bin/env python +# +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import json +import os +import sys +import tempfile +import zipfile + +from util import build_utils +from util import proguard_util + + +def main(args): + parser = argparse.ArgumentParser() + build_utils.AddDepfileOption(parser) + parser.add_argument('--shrinked-android-path', required=True, + help='Path to shrinkedAndroid.jar') + parser.add_argument('--dx-path', required=True, + help='Path to dx.jar') + parser.add_argument('--main-dex-rules-path', action='append', default=[], + dest='main_dex_rules_paths', + help='A file containing a list of proguard rules to use ' + 'in determining the class to include in the ' + 'main dex.') + parser.add_argument('--main-dex-list-path', required=True, + help='The main dex list file to generate.') + parser.add_argument('--inputs', + help='JARs for which a main dex list should be ' + 'generated.') + parser.add_argument('--proguard-path', required=True, + help='Path to the proguard executable.') + parser.add_argument('--negative-main-dex-globs', + help='GN-list of globs of .class names (e.g. org/chromium/foo/Bar.class) ' + 'that will fail the build if they match files in the main dex.') + + parser.add_argument('paths', nargs='*', default=[], + help='JARs for which a main dex list should be ' + 'generated.') + + args = parser.parse_args(build_utils.ExpandFileArgs(args)) + + depfile_deps = [] + if args.inputs: + args.inputs = build_utils.ParseGnList(args.inputs) + depfile_deps = args.inputs + args.paths.extend(args.inputs) + + if args.negative_main_dex_globs: + args.negative_main_dex_globs = build_utils.ParseGnList( + args.negative_main_dex_globs) + + proguard_cmd = [ + 'java', '-jar', args.proguard_path, + '-forceprocessing', + '-dontwarn', '-dontoptimize', '-dontobfuscate', '-dontpreverify', + '-libraryjars', args.shrinked_android_path, + ] + for m in args.main_dex_rules_paths: + proguard_cmd.extend(['-include', m]) + + main_dex_list_cmd = [ + 'java', '-cp', args.dx_path, + 'com.android.multidex.MainDexListBuilder', + # This workaround significantly increases main dex size and doesn't seem to + # be needed by Chrome. See comment in the source: + # https://android.googlesource.com/platform/dalvik/+/master/dx/src/com/android/multidex/MainDexListBuilder.java + '--disable-annotation-resolution-workaround', + ] + + input_paths = list(args.paths) + input_paths += [ + args.shrinked_android_path, + args.dx_path, + ] + input_paths += args.main_dex_rules_paths + + input_strings = [ + proguard_cmd, + main_dex_list_cmd, + ] + if args.negative_main_dex_globs: + input_strings += args.negative_main_dex_globs + + output_paths = [ + args.main_dex_list_path, + ] + + build_utils.CallAndWriteDepfileIfStale( + lambda: _OnStaleMd5(proguard_cmd, main_dex_list_cmd, args.paths, + args.main_dex_list_path, + args.negative_main_dex_globs), + args, + input_paths=input_paths, + input_strings=input_strings, + output_paths=output_paths, + depfile_deps=depfile_deps, + add_pydeps=False) + + return 0 + + +def _CheckForUnwanted(kept_classes, proguard_cmd, negative_main_dex_globs): + # Check if ProGuard kept any unwanted classes. + found_unwanted_classes = sorted( + p for p in kept_classes + if build_utils.MatchesGlob(p, negative_main_dex_globs)) + + if found_unwanted_classes: + first_class = found_unwanted_classes[0].replace( + '.class', '').replace('/', '.') + proguard_cmd += ['-whyareyoukeeping', 'class', first_class, '{}'] + output = build_utils.CheckOutput( + proguard_cmd, print_stderr=False, + stdout_filter=proguard_util.ProguardOutputFilter()) + raise Exception( + ('Found classes that should not be in the main dex:\n {}\n\n' + 'Here is the -whyareyoukeeping output for {}: \n{}').format( + '\n '.join(found_unwanted_classes), first_class, output)) + + +def _OnStaleMd5(proguard_cmd, main_dex_list_cmd, paths, main_dex_list_path, + negative_main_dex_globs): + paths_arg = ':'.join(paths) + main_dex_list = '' + try: + with tempfile.NamedTemporaryFile(suffix='.jar') as temp_jar: + # Step 1: Use ProGuard to find all @MainDex code, and all code reachable + # from @MainDex code (recursive). + proguard_cmd += [ + '-injars', paths_arg, + '-outjars', temp_jar.name + ] + build_utils.CheckOutput(proguard_cmd, print_stderr=False) + + # Record the classes kept by ProGuard. Not used by the build, but useful + # for debugging what classes are kept by ProGuard vs. MainDexListBuilder. + with zipfile.ZipFile(temp_jar.name) as z: + kept_classes = [p for p in z.namelist() if p.endswith('.class')] + with open(main_dex_list_path + '.partial', 'w') as f: + f.write('\n'.join(kept_classes) + '\n') + + if negative_main_dex_globs: + # Perform assertions before MainDexListBuilder because: + # a) MainDexListBuilder is not recursive, so being included by it isn't + # a huge deal. + # b) Errors are much more actionable. + _CheckForUnwanted(kept_classes, proguard_cmd, negative_main_dex_globs) + + # Step 2: Expand inclusion list to all classes referenced by the .class + # files of kept classes (non-recursive). + main_dex_list_cmd += [ + temp_jar.name, paths_arg + ] + main_dex_list = build_utils.CheckOutput(main_dex_list_cmd) + + except build_utils.CalledProcessError as e: + if 'output jar is empty' in e.output: + pass + elif "input doesn't contain any classes" in e.output: + pass + else: + raise + + with open(main_dex_list_path, 'w') as main_dex_list_file: + main_dex_list_file.write(main_dex_list) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/deps/v8/build/android/gyp/main_dex_list.pydeps b/deps/v8/build/android/gyp/main_dex_list.pydeps new file mode 100644 index 0000000000..8c482dfa52 --- /dev/null +++ b/deps/v8/build/android/gyp/main_dex_list.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/main_dex_list.pydeps build/android/gyp/main_dex_list.py +../../gn_helpers.py +main_dex_list.py +util/__init__.py +util/build_utils.py +util/md5_check.py +util/proguard_util.py diff --git a/deps/v8/build/android/gyp/merge_manifest.py b/deps/v8/build/android/gyp/merge_manifest.py new file mode 100755 index 0000000000..0637d43492 --- /dev/null +++ b/deps/v8/build/android/gyp/merge_manifest.py @@ -0,0 +1,174 @@ +#!/usr/bin/env python + +# Copyright 2017 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Merges dependency Android manifests into a root manifest.""" + +import argparse +import contextlib +import os +import shlex +import sys +import tempfile +import xml.dom.minidom as minidom +import xml.etree.ElementTree as ElementTree + +from util import build_utils +from util import diff_utils + +# Tools library directory - relative to Android SDK root +_SDK_TOOLS_LIB_DIR = os.path.join('tools', 'lib') + +_MANIFEST_MERGER_MAIN_CLASS = 'com.android.manifmerger.Merger' +_MANIFEST_MERGER_JARS = [ + 'common{suffix}.jar', + 'manifest-merger{suffix}.jar', + 'sdk-common{suffix}.jar', + 'sdklib{suffix}.jar', +] + +_TOOLS_NAMESPACE_PREFIX = 'tools' +_TOOLS_NAMESPACE = 'http://schemas.android.com/tools' +_ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android' + +# Without registering namespaces ElementTree converts them to "ns0" and "ns1" +ElementTree.register_namespace('tools', _TOOLS_NAMESPACE) +ElementTree.register_namespace('android', _ANDROID_NAMESPACE) + + +@contextlib.contextmanager +def _ProcessManifest(manifest_path): + """Patches an Android manifest to always include the 'tools' namespace + declaration, as it is not propagated by the manifest merger from the SDK. + + See https://issuetracker.google.com/issues/63411481 + """ + doc = minidom.parse(manifest_path) + manifests = doc.getElementsByTagName('manifest') + assert len(manifests) == 1 + manifest = manifests[0] + package = manifest.getAttribute('package') + + manifest.setAttribute('xmlns:%s' % _TOOLS_NAMESPACE_PREFIX, _TOOLS_NAMESPACE) + + tmp_prefix = os.path.basename(manifest_path) + with tempfile.NamedTemporaryFile(prefix=tmp_prefix) as patched_manifest: + doc.writexml(patched_manifest) + patched_manifest.flush() + yield patched_manifest.name, package + + +def _BuildManifestMergerClasspath(build_vars): + return ':'.join([ + os.path.join( + build_vars['android_sdk_root'], _SDK_TOOLS_LIB_DIR, + jar.format(suffix=build_vars['android_sdk_tools_version_suffix'])) + for jar in _MANIFEST_MERGER_JARS + ]) + + +def _SortAndStripElementTree(tree, reverse_toplevel=False): + for node in tree: + if node.text and node.text.isspace(): + node.text = None + _SortAndStripElementTree(node) + tree[:] = sorted(tree, key=ElementTree.tostring, reverse=reverse_toplevel) + + +def _NormalizeManifest(path): + with open(path) as f: + # This also strips comments and sorts node attributes alphabetically. + root = ElementTree.fromstring(f.read()) + + # Sort nodes alphabetically, recursively. + _SortAndStripElementTree(root, reverse_toplevel=True) + + # Fix up whitespace/indentation. + dom = minidom.parseString(ElementTree.tostring(root)) + lines = [] + for l in dom.toprettyxml(indent=' ').splitlines(): + if l.strip(): + if len(l) > 100: + indent = ' ' * l.find('<') + attributes = shlex.split(l, posix=False) + lines.append('{}{}'.format(indent, attributes[0])) + for attribute in attributes[1:]: + lines.append('{} {}'.format(indent, attribute)) + else: + lines.append(l) + + return '\n'.join(lines) + + +def main(argv): + argv = build_utils.ExpandFileArgs(argv) + parser = argparse.ArgumentParser(description=__doc__) + build_utils.AddDepfileOption(parser) + parser.add_argument('--build-vars', + help='Path to GN build vars file', + required=True) + parser.add_argument('--root-manifest', + help='Root manifest which to merge into', + required=True) + parser.add_argument( + '--expected-manifest', help='Expected contents for the merged manifest.') + parser.add_argument('--normalized-output', help='Normalized merged manifest.') + parser.add_argument( + '--verify-expected-manifest', + action='store_true', + help='Fail if expected contents do not match merged manifest contents.') + parser.add_argument('--output', help='Output manifest path', required=True) + parser.add_argument('--extras', + help='GN list of additional manifest to merge') + args = parser.parse_args(argv) + + classpath = _BuildManifestMergerClasspath( + build_utils.ReadBuildVars(args.build_vars)) + + with build_utils.AtomicOutput(args.output) as output: + cmd = [ + 'java', + '-cp', + classpath, + _MANIFEST_MERGER_MAIN_CLASS, + '--out', + output.name, + ] + + extras = build_utils.ParseGnList(args.extras) + if extras: + cmd += ['--libs', ':'.join(extras)] + + with _ProcessManifest(args.root_manifest) as tup: + root_manifest, package = tup + cmd += ['--main', root_manifest, '--property', 'PACKAGE=' + package] + build_utils.CheckOutput(cmd, + # https://issuetracker.google.com/issues/63514300: + # The merger doesn't set a nonzero exit code for failures. + fail_func=lambda returncode, stderr: returncode != 0 or + build_utils.IsTimeStale(output.name, [root_manifest] + extras)) + + if args.expected_manifest: + with build_utils.AtomicOutput(args.normalized_output) as normalized_output: + normalized_output.write(_NormalizeManifest(args.output)) + msg = diff_utils.DiffFileContents(args.expected_manifest, + args.normalized_output) + if msg: + sys.stderr.write("""\ +AndroidManifest.xml expectations file needs updating. For details see: +https://chromium.googlesource.com/chromium/src/+/HEAD/chrome/android/java/README.md +""") + sys.stderr.write(msg) + if args.verify_expected_manifest: + sys.exit(1) + + if args.depfile: + inputs = extras + classpath.split(':') + build_utils.WriteDepfile(args.depfile, args.output, inputs=inputs, + add_pydeps=False) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/deps/v8/build/android/gyp/merge_manifest.pydeps b/deps/v8/build/android/gyp/merge_manifest.pydeps new file mode 100644 index 0000000000..797cd5fbd6 --- /dev/null +++ b/deps/v8/build/android/gyp/merge_manifest.pydeps @@ -0,0 +1,8 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/merge_manifest.pydeps build/android/gyp/merge_manifest.py +../../gn_helpers.py +merge_manifest.py +util/__init__.py +util/build_utils.py +util/diff_utils.py +util/md5_check.py diff --git a/deps/v8/build/android/gyp/prepare_resources.py b/deps/v8/build/android/gyp/prepare_resources.py new file mode 100755 index 0000000000..a463f29645 --- /dev/null +++ b/deps/v8/build/android/gyp/prepare_resources.py @@ -0,0 +1,324 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Process Android resource directories to generate .resources.zip, R.txt and +.srcjar files.""" + +import argparse +import collections +import os +import re +import shutil +import sys + +import generate_v14_compatible_resources + +from util import build_utils +from util import resource_utils + +_AAPT_IGNORE_PATTERN = ':'.join([ + 'OWNERS', # Allow OWNERS files within res/ + '*.py', # PRESUBMIT.py sometimes exist. + '*.pyc', + '*~', # Some editors create these as temp files. + '.*', # Never makes sense to include dot(files/dirs). + '*.d.stamp', # Ignore stamp files + ]) + +def _ParseArgs(args): + """Parses command line options. + + Returns: + An options object as from argparse.ArgumentParser.parse_args() + """ + parser, input_opts, output_opts = resource_utils.ResourceArgsParser() + + input_opts.add_argument( + '--aapt-path', required=True, help='Path to the Android aapt tool') + + input_opts.add_argument('--resource-dirs', + default='[]', + help='A list of input directories containing resources ' + 'for this target.') + + input_opts.add_argument( + '--shared-resources', + action='store_true', + help='Make resources shareable by generating an onResourcesLoaded() ' + 'method in the R.java source file.') + + input_opts.add_argument('--custom-package', + help='Optional Java package for main R.java.') + + input_opts.add_argument( + '--android-manifest', + help='Optional AndroidManifest.xml path. Only used to extract a package ' + 'name for R.java if a --custom-package is not provided.') + + output_opts.add_argument( + '--resource-zip-out', + help='Path to a zip archive containing all resources from ' + '--resource-dirs, merged into a single directory tree. This will ' + 'also include auto-generated v14-compatible resources unless ' + '--v14-skip is used.') + + output_opts.add_argument('--srcjar-out', + help='Path to .srcjar to contain the generated R.java.') + + output_opts.add_argument('--r-text-out', + help='Path to store the generated R.txt file.') + + input_opts.add_argument( + '--v14-skip', + action="store_true", + help='Do not generate nor verify v14 resources.') + + input_opts.add_argument( + '--strip-drawables', + action="store_true", + help='Remove drawables from the resources.') + + options = parser.parse_args(args) + + resource_utils.HandleCommonOptions(options) + + options.resource_dirs = build_utils.ParseGnList(options.resource_dirs) + + return options + + +def _GenerateGlobs(pattern): + # This function processes the aapt ignore assets pattern into a list of globs + # to be used to exclude files on the python side. It removes the '!', which is + # used by aapt to mean 'not chatty' so it does not output if the file is + # ignored (we dont output anyways, so it is not required). This function does + # not handle the <dir> and <file> prefixes used by aapt and are assumed not to + # be included in the pattern string. + return pattern.replace('!', '').split(':') + + +def _ZipResources(resource_dirs, zip_path, ignore_pattern): + # Python zipfile does not provide a way to replace a file (it just writes + # another file with the same name). So, first collect all the files to put + # in the zip (with proper overriding), and then zip them. + # ignore_pattern is a string of ':' delimited list of globs used to ignore + # files that should not be part of the final resource zip. + files_to_zip = dict() + files_to_zip_without_generated = dict() + globs = _GenerateGlobs(ignore_pattern) + for d in resource_dirs: + for root, _, files in os.walk(d): + for f in files: + archive_path = f + parent_dir = os.path.relpath(root, d) + if parent_dir != '.': + archive_path = os.path.join(parent_dir, f) + path = os.path.join(root, f) + if build_utils.MatchesGlob(archive_path, globs): + continue + # We want the original resource dirs in the .info file rather than the + # generated overridden path. + if not path.startswith('/tmp'): + files_to_zip_without_generated[archive_path] = path + files_to_zip[archive_path] = path + resource_utils.CreateResourceInfoFile(files_to_zip_without_generated, + zip_path) + build_utils.DoZip(files_to_zip.iteritems(), zip_path) + + +def _GenerateRTxt(options, dep_subdirs, gen_dir): + """Generate R.txt file. + + Args: + options: The command-line options tuple. + dep_subdirs: List of directories containing extracted dependency resources. + gen_dir: Locates where the aapt-generated files will go. In particular + the output file is always generated as |{gen_dir}/R.txt|. + """ + # NOTE: This uses aapt rather than aapt2 because 'aapt2 compile' does not + # support the --output-text-symbols option yet (https://crbug.com/820460). + package_command = [options.aapt_path, + 'package', + '-m', + '-M', resource_utils.EMPTY_ANDROID_MANIFEST_PATH, + '--no-crunch', + '--auto-add-overlay', + '--no-version-vectors', + ] + for j in options.include_resources: + package_command += ['-I', j] + + ignore_pattern = _AAPT_IGNORE_PATTERN + if options.strip_drawables: + ignore_pattern += ':*drawable*' + package_command += [ + '--output-text-symbols', + gen_dir, + '-J', + gen_dir, # Required for R.txt generation. + '--ignore-assets', + ignore_pattern + ] + + # Adding all dependencies as sources is necessary for @type/foo references + # to symbols within dependencies to resolve. However, it has the side-effect + # that all Java symbols from dependencies are copied into the new R.java. + # E.g.: It enables an arguably incorrect usage of + # "mypackage.R.id.lib_symbol" where "libpackage.R.id.lib_symbol" would be + # more correct. This is just how Android works. + for d in dep_subdirs: + package_command += ['-S', d] + + for d in options.resource_dirs: + package_command += ['-S', d] + + # Only creates an R.txt + build_utils.CheckOutput( + package_command, print_stdout=False, print_stderr=False) + + +def _GenerateResourcesZip(output_resource_zip, input_resource_dirs, v14_skip, + strip_drawables, temp_dir): + """Generate a .resources.zip file fron a list of input resource dirs. + + Args: + output_resource_zip: Path to the output .resources.zip file. + input_resource_dirs: A list of input resource directories. + v14_skip: If False, then v14-compatible resource will also be + generated in |{temp_dir}/v14| and added to the final zip. + temp_dir: Path to temporary directory. + """ + if not v14_skip: + # Generate v14-compatible resources in temp_dir. + v14_dir = os.path.join(temp_dir, 'v14') + build_utils.MakeDirectory(v14_dir) + + for resource_dir in input_resource_dirs: + generate_v14_compatible_resources.GenerateV14Resources( + resource_dir, + v14_dir) + + input_resource_dirs.append(v14_dir) + + ignore_pattern = _AAPT_IGNORE_PATTERN + if strip_drawables: + ignore_pattern += ':*drawable*' + _ZipResources(input_resource_dirs, output_resource_zip, ignore_pattern) + + +def _OnStaleMd5(options): + with resource_utils.BuildContext() as build: + if options.r_text_in: + r_txt_path = options.r_text_in + else: + # Extract dependencies to resolve @foo/type references into + # dependent packages. + dep_subdirs = resource_utils.ExtractDeps(options.dependencies_res_zips, + build.deps_dir) + + _GenerateRTxt(options, dep_subdirs, build.gen_dir) + r_txt_path = build.r_txt_path + + # 'aapt' doesn't generate any R.txt file if res/ was empty. + if not os.path.exists(r_txt_path): + build_utils.Touch(r_txt_path) + + if options.r_text_out: + shutil.copyfile(r_txt_path, options.r_text_out) + + if options.srcjar_out: + package = options.custom_package + if not package and options.android_manifest: + package = resource_utils.ExtractPackageFromManifest( + options.android_manifest) + + # Don't create a .java file for the current resource target when no + # package name was provided (either by manifest or build rules). + if package: + # All resource IDs should be non-final here, but the + # onResourcesLoaded() method should only be generated if + # --shared-resources is used. + rjava_build_options = resource_utils.RJavaBuildOptions() + rjava_build_options.ExportAllResources() + rjava_build_options.ExportAllStyleables() + if options.shared_resources: + rjava_build_options.GenerateOnResourcesLoaded() + + resource_utils.CreateRJavaFiles( + build.srcjar_dir, package, r_txt_path, + options.extra_res_packages, + options.extra_r_text_files, + rjava_build_options) + + build_utils.ZipDir(options.srcjar_out, build.srcjar_dir) + + if options.resource_zip_out: + _GenerateResourcesZip(options.resource_zip_out, options.resource_dirs, + options.v14_skip, options.strip_drawables, + build.temp_dir) + + +def main(args): + args = build_utils.ExpandFileArgs(args) + options = _ParseArgs(args) + + # Order of these must match order specified in GN so that the correct one + # appears first in the depfile. + possible_output_paths = [ + options.resource_zip_out, + options.r_text_out, + options.srcjar_out, + ] + output_paths = [x for x in possible_output_paths if x] + + # List python deps in input_strings rather than input_paths since the contents + # of them does not change what gets written to the depsfile. + input_strings = options.extra_res_packages + [ + options.custom_package, + options.shared_resources, + options.v14_skip, + options.strip_drawables, + ] + + possible_input_paths = [ + options.aapt_path, + options.android_manifest, + ] + possible_input_paths += options.include_resources + input_paths = [x for x in possible_input_paths if x] + input_paths.extend(options.dependencies_res_zips) + input_paths.extend(options.extra_r_text_files) + + # Resource files aren't explicitly listed in GN. Listing them in the depfile + # ensures the target will be marked stale when resource files are removed. + depfile_deps = [] + resource_names = [] + for resource_dir in options.resource_dirs: + for resource_file in build_utils.FindInDirectory(resource_dir, '*'): + # Don't list the empty .keep file in depfile. Since it doesn't end up + # included in the .zip, it can lead to -w 'dupbuild=err' ninja errors + # if ever moved. + if not resource_file.endswith(os.path.join('empty', '.keep')): + input_paths.append(resource_file) + depfile_deps.append(resource_file) + resource_names.append(os.path.relpath(resource_file, resource_dir)) + + # Resource filenames matter to the output, so add them to strings as well. + # This matters if a file is renamed but not changed (http://crbug.com/597126). + input_strings.extend(sorted(resource_names)) + + build_utils.CallAndWriteDepfileIfStale( + lambda: _OnStaleMd5(options), + options, + input_paths=input_paths, + input_strings=input_strings, + output_paths=output_paths, + depfile_deps=depfile_deps, + add_pydeps=False) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/deps/v8/build/android/gyp/prepare_resources.pydeps b/deps/v8/build/android/gyp/prepare_resources.pydeps new file mode 100644 index 0000000000..0e9ccfbe5e --- /dev/null +++ b/deps/v8/build/android/gyp/prepare_resources.pydeps @@ -0,0 +1,30 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/prepare_resources.pydeps build/android/gyp/prepare_resources.py +../../../third_party/jinja2/__init__.py +../../../third_party/jinja2/_compat.py +../../../third_party/jinja2/bccache.py +../../../third_party/jinja2/compiler.py +../../../third_party/jinja2/defaults.py +../../../third_party/jinja2/environment.py +../../../third_party/jinja2/exceptions.py +../../../third_party/jinja2/filters.py +../../../third_party/jinja2/idtracking.py +../../../third_party/jinja2/lexer.py +../../../third_party/jinja2/loaders.py +../../../third_party/jinja2/nodes.py +../../../third_party/jinja2/optimizer.py +../../../third_party/jinja2/parser.py +../../../third_party/jinja2/runtime.py +../../../third_party/jinja2/tests.py +../../../third_party/jinja2/utils.py +../../../third_party/jinja2/visitor.py +../../../third_party/markupsafe/__init__.py +../../../third_party/markupsafe/_compat.py +../../../third_party/markupsafe/_native.py +../../gn_helpers.py +generate_v14_compatible_resources.py +prepare_resources.py +util/__init__.py +util/build_utils.py +util/md5_check.py +util/resource_utils.py diff --git a/deps/v8/build/android/gyp/proguard.py b/deps/v8/build/android/gyp/proguard.py new file mode 100755 index 0000000000..bb86b2dca6 --- /dev/null +++ b/deps/v8/build/android/gyp/proguard.py @@ -0,0 +1,290 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import cStringIO +import optparse +import os +import shutil +import sys +import tempfile + +from util import build_utils +from util import diff_utils +from util import proguard_util + +_GENERATED_PROGUARD_HEADER = """ +################################################################################ +# Dynamically generated from build/android/gyp/proguard.py +################################################################################ +""" + +# Example: +# android.arch.core.internal.SafeIterableMap$Entry -> b: +# 1:1:java.lang.Object getKey():353:353 -> getKey +# 2:2:java.lang.Object getValue():359:359 -> getValue +def _RemoveMethodMappings(orig_path, out_fd): + with open(orig_path) as in_fd: + for line in in_fd: + if line[:1] != ' ': + out_fd.write(line) + out_fd.flush() + + +def _ParseOptions(args): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--proguard-path', + help='Path to the proguard.jar to use.') + parser.add_option('--r8-path', + help='Path to the R8.jar to use.') + parser.add_option('--input-paths', + help='Paths to the .jar files proguard should run on.') + parser.add_option('--output-path', help='Path to the generated .jar file.') + parser.add_option('--proguard-configs', action='append', + help='Paths to proguard configuration files.') + parser.add_option('--proguard-config-exclusions', + default='', + help='GN list of paths to proguard configuration files ' + 'included by --proguard-configs, but that should ' + 'not actually be included.') + parser.add_option( + '--apply-mapping', help='Path to proguard mapping to apply.') + parser.add_option('--mapping-output', + help='Path for proguard to output mapping file to.') + parser.add_option( + '--output-config', + help='Path to write the merged proguard config file to.') + parser.add_option( + '--expected-configs-file', + help='Path to a file containing the expected merged proguard configs') + parser.add_option( + '--verify-expected-configs', + action='store_true', + help='Fail if the expected merged proguard configs differ from the ' + 'generated merged proguard configs.') + parser.add_option('--classpath', action='append', + help='Classpath for proguard.') + parser.add_option('--main-dex-rules-path', action='append', + help='Paths to main dex rules for multidex' + '- only works with R8.') + parser.add_option('--min-api', default='', + help='Minimum Android API level compatibility.') + parser.add_option('--verbose', '-v', action='store_true', + help='Print all proguard output') + parser.add_option( + '--repackage-classes', + help='Unique package name given to an asynchronously proguarded module') + + options, _ = parser.parse_args(args) + + assert not options.main_dex_rules_path or options.r8_path, \ + 'R8 must be enabled to pass main dex rules.' + + classpath = [] + for arg in options.classpath: + classpath += build_utils.ParseGnList(arg) + options.classpath = classpath + + configs = [] + for arg in options.proguard_configs: + configs += build_utils.ParseGnList(arg) + options.proguard_configs = configs + options.proguard_config_exclusions = ( + build_utils.ParseGnList(options.proguard_config_exclusions)) + + options.input_paths = build_utils.ParseGnList(options.input_paths) + + if not options.mapping_output: + options.mapping_output = options.output_path + '.mapping' + + if options.apply_mapping: + options.apply_mapping = os.path.abspath(options.apply_mapping) + + + return options + + +def _VerifyExpectedConfigs(expected_path, actual_path, fail_on_exit): + msg = diff_utils.DiffFileContents(expected_path, actual_path) + if not msg: + return + + sys.stderr.write("""\ +Proguard flag expectations file needs updating. For details see: +https://chromium.googlesource.com/chromium/src/+/HEAD/chrome/android/java/README.md +""") + sys.stderr.write(msg) + if fail_on_exit: + sys.exit(1) + + +def _MoveTempDexFile(tmp_dex_dir, dex_path): + """Move the temp dex file out of |tmp_dex_dir|. + + Args: + tmp_dex_dir: Path to temporary directory created with tempfile.mkdtemp(). + The directory should have just a single file. + dex_path: Target path to move dex file to. + + Raises: + Exception if there are multiple files in |tmp_dex_dir|. + """ + tempfiles = os.listdir(tmp_dex_dir) + if len(tempfiles) > 1: + raise Exception('%d files created, expected 1' % len(tempfiles)) + + tmp_dex_path = os.path.join(tmp_dex_dir, tempfiles[0]) + shutil.move(tmp_dex_path, dex_path) + + +def _CreateR8Command(options, map_output_path, output_dir, tmp_config_path, + libraries): + cmd = [ + 'java', '-jar', options.r8_path, + '--no-desugaring', + '--no-data-resources', + '--output', output_dir, + '--pg-map-output', map_output_path, + ] + + for lib in libraries: + cmd += ['--lib', lib] + + for config_file in options.proguard_configs: + cmd += ['--pg-conf', config_file] + + temp_config_string = '' + if options.apply_mapping or options.repackage_classes or options.min_api: + with open(tmp_config_path, 'w') as f: + if options.apply_mapping: + temp_config_string += '-applymapping \'%s\'\n' % (options.apply_mapping) + if options.repackage_classes: + temp_config_string += '-repackageclasses \'%s\'\n' % ( + options.repackage_classes) + if options.min_api: + temp_config_string += ( + '-assumevalues class android.os.Build$VERSION {\n' + + ' public static final int SDK_INT return ' + options.min_api + + '..9999;\n}\n') + f.write(temp_config_string) + cmd += ['--pg-conf', tmp_config_path] + + if options.main_dex_rules_path: + for main_dex_rule in options.main_dex_rules_path: + cmd += ['--main-dex-rules', main_dex_rule] + + cmd += options.input_paths + return cmd, temp_config_string + + +def main(args): + args = build_utils.ExpandFileArgs(args) + options = _ParseOptions(args) + + libraries = [] + for p in options.classpath: + # If a jar is part of input no need to include it as library jar. + if p not in libraries and p not in options.input_paths: + libraries.append(p) + + # TODO(agrieve): Remove proguard usages. + if options.r8_path: + temp_config_string = '' + with build_utils.TempDir() as tmp_dir: + tmp_mapping_path = os.path.join(tmp_dir, 'mapping.txt') + tmp_proguard_config_path = os.path.join(tmp_dir, 'proguard_config.txt') + # If there is no output (no classes are kept), this prevents this script + # from failing. + build_utils.Touch(tmp_mapping_path) + + f = cStringIO.StringIO() + proguard_util.WriteFlagsFile( + options.proguard_configs, f, exclude_generated=True) + merged_configs = f.getvalue() + # Fix up line endings (third_party configs can have windows endings) + merged_configs = merged_configs.replace('\r', '') + f.close() + print_stdout = '-whyareyoukeeping' in merged_configs + + if options.output_path.endswith('.dex'): + with build_utils.TempDir() as tmp_dex_dir: + cmd, temp_config_string = _CreateR8Command( + options, tmp_mapping_path, tmp_dex_dir, tmp_proguard_config_path, + libraries) + build_utils.CheckOutput(cmd, print_stdout=print_stdout) + _MoveTempDexFile(tmp_dex_dir, options.output_path) + else: + cmd, temp_config_string = _CreateR8Command( + options, tmp_mapping_path, options.output_path, + tmp_proguard_config_path, libraries) + build_utils.CheckOutput(cmd, print_stdout=print_stdout) + + # Copy output files to correct locations. + with build_utils.AtomicOutput(options.mapping_output) as mapping: + # Mapping files generated by R8 include comments that may break + # some of our tooling so remove those. + with open(tmp_mapping_path) as tmp: + mapping.writelines(l for l in tmp if not l.startswith('#')) + + with build_utils.AtomicOutput(options.output_config) as f: + f.write(merged_configs) + if temp_config_string: + f.write(_GENERATED_PROGUARD_HEADER) + f.write(temp_config_string) + + if options.expected_configs_file: + _VerifyExpectedConfigs(options.expected_configs_file, + options.output_config, + options.verify_expected_configs) + + other_inputs = [] + if options.apply_mapping: + other_inputs += options.apply_mapping + + build_utils.WriteDepfile( + options.depfile, + options.output_path, + inputs=options.proguard_configs + options.input_paths + libraries + + other_inputs, + add_pydeps=False) + else: + proguard = proguard_util.ProguardCmdBuilder(options.proguard_path) + proguard.injars(options.input_paths) + proguard.configs(options.proguard_configs) + proguard.config_exclusions(options.proguard_config_exclusions) + proguard.outjar(options.output_path) + proguard.mapping_output(options.mapping_output) + proguard.libraryjars(libraries) + proguard.verbose(options.verbose) + proguard.min_api(options.min_api) + # Do not consider the temp file as an input since its name is random. + input_paths = proguard.GetInputs() + + with tempfile.NamedTemporaryFile() as f: + if options.apply_mapping: + input_paths.append(options.apply_mapping) + # Maintain only class name mappings in the .mapping file in order to + # work around what appears to be a ProGuard bug in -applymapping: + # method 'int close()' is not being kept as 'a', but remapped to 'c' + _RemoveMethodMappings(options.apply_mapping, f) + proguard.mapping(f.name) + + input_strings = proguard.build() + if f.name in input_strings: + input_strings[input_strings.index(f.name)] = '$M' + + build_utils.CallAndWriteDepfileIfStale( + proguard.CheckOutput, + options, + input_paths=input_paths, + input_strings=input_strings, + output_paths=proguard.GetOutputs(), + depfile_deps=proguard.GetDepfileDeps(), + add_pydeps=False) + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/deps/v8/build/android/gyp/proguard.pydeps b/deps/v8/build/android/gyp/proguard.pydeps new file mode 100644 index 0000000000..fd870a0e4b --- /dev/null +++ b/deps/v8/build/android/gyp/proguard.pydeps @@ -0,0 +1,9 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/proguard.pydeps build/android/gyp/proguard.py +../../gn_helpers.py +proguard.py +util/__init__.py +util/build_utils.py +util/diff_utils.py +util/md5_check.py +util/proguard_util.py diff --git a/deps/v8/build/android/gyp/test/BUILD.gn b/deps/v8/build/android/gyp/test/BUILD.gn new file mode 100644 index 0000000000..2deac1d56f --- /dev/null +++ b/deps/v8/build/android/gyp/test/BUILD.gn @@ -0,0 +1,13 @@ +import("//build/config/android/rules.gni") + +java_library("hello_world_java") { + java_files = [ "java/org/chromium/helloworld/HelloWorldPrinter.java" ] +} + +java_binary("hello_world") { + deps = [ + ":hello_world_java", + ] + java_files = [ "java/org/chromium/helloworld/HelloWorldMain.java" ] + main_class = "org.chromium.helloworld.HelloWorldMain" +} diff --git a/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java b/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java new file mode 100644 index 0000000000..10860d8332 --- /dev/null +++ b/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java @@ -0,0 +1,15 @@ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.helloworld; + +public class HelloWorldMain { + public static void main(String[] args) { + if (args.length > 0) { + System.exit(Integer.parseInt(args[0])); + } + HelloWorldPrinter.print(); + } +} + diff --git a/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java b/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java new file mode 100644 index 0000000000..b09673e21f --- /dev/null +++ b/deps/v8/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java @@ -0,0 +1,12 @@ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package org.chromium.helloworld; + +public class HelloWorldPrinter { + public static void print() { + System.out.println("Hello, world!"); + } +} + diff --git a/deps/v8/build/android/gyp/util/__init__.py b/deps/v8/build/android/gyp/util/__init__.py new file mode 100644 index 0000000000..96196cffb2 --- /dev/null +++ b/deps/v8/build/android/gyp/util/__init__.py @@ -0,0 +1,3 @@ +# Copyright (c) 2012 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. diff --git a/deps/v8/build/android/gyp/util/build_utils.py b/deps/v8/build/android/gyp/util/build_utils.py new file mode 100644 index 0000000000..e4d7cc6128 --- /dev/null +++ b/deps/v8/build/android/gyp/util/build_utils.py @@ -0,0 +1,650 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Contains common helpers for GN action()s.""" + +import collections +import contextlib +import filecmp +import fnmatch +import json +import os +import pipes +import re +import shutil +import stat +import subprocess +import sys +import tempfile +import zipfile + +# Any new non-system import must be added to: +# //build/config/android/internal_rules.gni + +from util import md5_check + +sys.path.append(os.path.join(os.path.dirname(__file__), + os.pardir, os.pardir, os.pardir)) +import gn_helpers + +# Definition copied from pylib/constants/__init__.py to avoid adding +# a dependency on pylib. +DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT', + os.path.abspath(os.path.join(os.path.dirname(__file__), + os.pardir, os.pardir, os.pardir, os.pardir))) + +HERMETIC_TIMESTAMP = (2001, 1, 1, 0, 0, 0) +_HERMETIC_FILE_ATTR = (0o644 << 16) + + +@contextlib.contextmanager +def TempDir(): + dirname = tempfile.mkdtemp() + try: + yield dirname + finally: + shutil.rmtree(dirname) + + +def MakeDirectory(dir_path): + try: + os.makedirs(dir_path) + except OSError: + pass + + +def DeleteDirectory(dir_path): + if os.path.exists(dir_path): + shutil.rmtree(dir_path) + + +def Touch(path, fail_if_missing=False): + if fail_if_missing and not os.path.exists(path): + raise Exception(path + ' doesn\'t exist.') + + MakeDirectory(os.path.dirname(path)) + with open(path, 'a'): + os.utime(path, None) + + +def FindInDirectory(directory, filename_filter): + files = [] + for root, _dirnames, filenames in os.walk(directory): + matched_files = fnmatch.filter(filenames, filename_filter) + files.extend((os.path.join(root, f) for f in matched_files)) + return files + + +def ReadBuildVars(path): + """Parses a build_vars.txt into a dict.""" + with open(path) as f: + return dict(l.rstrip().split('=', 1) for l in f) + + +def ParseGnList(value): + """Converts a "GN-list" command-line parameter into a list. + + Conversions handled: + * None -> [] + * '' -> [] + * 'asdf' -> ['asdf'] + * '["a", "b"]' -> ['a', 'b'] + * ['["a", "b"]', 'c'] -> ['a', 'b', 'c'] (flattened list) + + The common use for this behavior is in the Android build where things can + take lists of @FileArg references that are expanded via ExpandFileArgs. + """ + # Convert None to []. + if not value: + return [] + # Convert a list of GN lists to a flattened list. + if isinstance(value, list): + ret = [] + for arg in value: + ret.extend(ParseGnList(arg)) + return ret + # Convert normal GN list. + if value.startswith('['): + return gn_helpers.GNValueParser(value).ParseList() + # Convert a single string value to a list. + return [value] + + +def CheckOptions(options, parser, required=None): + if not required: + return + for option_name in required: + if getattr(options, option_name) is None: + parser.error('--%s is required' % option_name.replace('_', '-')) + + +def WriteJson(obj, path, only_if_changed=False): + old_dump = None + if os.path.exists(path): + with open(path, 'r') as oldfile: + old_dump = oldfile.read() + + new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': ')) + + if not only_if_changed or old_dump != new_dump: + with open(path, 'w') as outfile: + outfile.write(new_dump) + + +@contextlib.contextmanager +def AtomicOutput(path, only_if_changed=True): + """Helper to prevent half-written outputs. + + Args: + path: Path to the final output file, which will be written atomically. + only_if_changed: If True (the default), do not touch the filesystem + if the content has not changed. + Returns: + A python context manager that yelds a NamedTemporaryFile instance + that must be used by clients to write the data to. On exit, the + manager will try to replace the final output file with the + temporary one if necessary. The temporary file is always destroyed + on exit. + Example: + with build_utils.AtomicOutput(output_path) as tmp_file: + subprocess.check_call(['prog', '--output', tmp_file.name]) + """ + # Create in same directory to ensure same filesystem when moving. + with tempfile.NamedTemporaryFile(suffix=os.path.basename(path), + dir=os.path.dirname(path), + delete=False) as f: + try: + yield f + + # file should be closed before comparison/move. + f.close() + if not (only_if_changed and os.path.exists(path) and + filecmp.cmp(f.name, path)): + shutil.move(f.name, path) + finally: + if os.path.exists(f.name): + os.unlink(f.name) + + +class CalledProcessError(Exception): + """This exception is raised when the process run by CheckOutput + exits with a non-zero exit code.""" + + def __init__(self, cwd, args, output): + super(CalledProcessError, self).__init__() + self.cwd = cwd + self.args = args + self.output = output + + def __str__(self): + # A user should be able to simply copy and paste the command that failed + # into their shell. + copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd), + ' '.join(map(pipes.quote, self.args))) + return 'Command failed: {}\n{}'.format(copyable_command, self.output) + + +def FilterLines(output, filter_string): + """Output filter from build_utils.CheckOutput. + + Args: + output: Executable output as from build_utils.CheckOutput. + filter_string: An RE string that will filter (remove) matching + lines from |output|. + + Returns: + The filtered output, as a single string. + """ + re_filter = re.compile(filter_string) + return '\n'.join( + line for line in output.splitlines() if not re_filter.search(line)) + + +# This can be used in most cases like subprocess.check_output(). The output, +# particularly when the command fails, better highlights the command's failure. +# If the command fails, raises a build_utils.CalledProcessError. +def CheckOutput(args, cwd=None, env=None, + print_stdout=False, print_stderr=True, + stdout_filter=None, + stderr_filter=None, + fail_func=lambda returncode, stderr: returncode != 0): + if not cwd: + cwd = os.getcwd() + + child = subprocess.Popen(args, + stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env) + stdout, stderr = child.communicate() + + if stdout_filter is not None: + stdout = stdout_filter(stdout) + + if stderr_filter is not None: + stderr = stderr_filter(stderr) + + if fail_func(child.returncode, stderr): + raise CalledProcessError(cwd, args, stdout + stderr) + + if print_stdout: + sys.stdout.write(stdout) + if print_stderr: + sys.stderr.write(stderr) + + return stdout + + +def GetModifiedTime(path): + # For a symlink, the modified time should be the greater of the link's + # modified time and the modified time of the target. + return max(os.lstat(path).st_mtime, os.stat(path).st_mtime) + + +def IsTimeStale(output, inputs): + if not os.path.exists(output): + return True + + output_time = GetModifiedTime(output) + for i in inputs: + if GetModifiedTime(i) > output_time: + return True + return False + + +def _CheckZipPath(name): + if os.path.normpath(name) != name: + raise Exception('Non-canonical zip path: %s' % name) + if os.path.isabs(name): + raise Exception('Absolute zip path: %s' % name) + + +def _IsSymlink(zip_file, name): + zi = zip_file.getinfo(name) + + # The two high-order bytes of ZipInfo.external_attr represent + # UNIX permissions and file type bits. + return stat.S_ISLNK(zi.external_attr >> 16) + + +def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None, + predicate=None): + if path is None: + path = os.getcwd() + elif not os.path.exists(path): + MakeDirectory(path) + + if not zipfile.is_zipfile(zip_path): + raise Exception('Invalid zip file: %s' % zip_path) + + extracted = [] + with zipfile.ZipFile(zip_path) as z: + for name in z.namelist(): + if name.endswith('/'): + MakeDirectory(os.path.join(path, name)) + continue + if pattern is not None: + if not fnmatch.fnmatch(name, pattern): + continue + if predicate and not predicate(name): + continue + _CheckZipPath(name) + if no_clobber: + output_path = os.path.join(path, name) + if os.path.exists(output_path): + raise Exception( + 'Path already exists from zip: %s %s %s' + % (zip_path, name, output_path)) + if _IsSymlink(z, name): + dest = os.path.join(path, name) + MakeDirectory(os.path.dirname(dest)) + os.symlink(z.read(name), dest) + extracted.append(dest) + else: + z.extract(name, path) + extracted.append(os.path.join(path, name)) + + return extracted + + +def AddToZipHermetic(zip_file, zip_path, src_path=None, data=None, + compress=None): + """Adds a file to the given ZipFile with a hard-coded modified time. + + Args: + zip_file: ZipFile instance to add the file to. + zip_path: Destination path within the zip file. + src_path: Path of the source file. Mutually exclusive with |data|. + data: File data as a string. + compress: Whether to enable compression. Default is taken from ZipFile + constructor. + """ + assert (src_path is None) != (data is None), ( + '|src_path| and |data| are mutually exclusive.') + _CheckZipPath(zip_path) + zipinfo = zipfile.ZipInfo(filename=zip_path, date_time=HERMETIC_TIMESTAMP) + zipinfo.external_attr = _HERMETIC_FILE_ATTR + + if src_path and os.path.islink(src_path): + zipinfo.filename = zip_path + zipinfo.external_attr |= stat.S_IFLNK << 16 # mark as a symlink + zip_file.writestr(zipinfo, os.readlink(src_path)) + return + + # zipfile.write() does + # external_attr = (os.stat(src_path)[0] & 0xFFFF) << 16 + # but we want to use _HERMETIC_FILE_ATTR, so manually set + # the few attr bits we care about. + if src_path: + st = os.stat(src_path) + for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH): + if st.st_mode & mode: + zipinfo.external_attr |= mode << 16 + + if src_path: + with open(src_path, 'rb') as f: + data = f.read() + + # zipfile will deflate even when it makes the file bigger. To avoid + # growing files, disable compression at an arbitrary cut off point. + if len(data) < 16: + compress = False + + # None converts to ZIP_STORED, when passed explicitly rather than the + # default passed to the ZipFile constructor. + compress_type = zip_file.compression + if compress is not None: + compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED + zip_file.writestr(zipinfo, data, compress_type) + + +def DoZip(inputs, output, base_dir=None, compress_fn=None, + zip_prefix_path=None): + """Creates a zip file from a list of files. + + Args: + inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples. + output: Path, fileobj, or ZipFile instance to add files to. + base_dir: Prefix to strip from inputs. + compress_fn: Applied to each input to determine whether or not to compress. + By default, items will be |zipfile.ZIP_STORED|. + zip_prefix_path: Path prepended to file path in zip file. + """ + if base_dir is None: + base_dir = '.' + input_tuples = [] + for tup in inputs: + if isinstance(tup, basestring): + tup = (os.path.relpath(tup, base_dir), tup) + input_tuples.append(tup) + + # Sort by zip path to ensure stable zip ordering. + input_tuples.sort(key=lambda tup: tup[0]) + + out_zip = output + if not isinstance(output, zipfile.ZipFile): + out_zip = zipfile.ZipFile(output, 'w') + + try: + for zip_path, fs_path in input_tuples: + if zip_prefix_path: + zip_path = os.path.join(zip_prefix_path, zip_path) + compress = compress_fn(zip_path) if compress_fn else None + AddToZipHermetic(out_zip, zip_path, src_path=fs_path, compress=compress) + finally: + if output is not out_zip: + out_zip.close() + + +def ZipDir(output, base_dir, compress_fn=None, zip_prefix_path=None): + """Creates a zip file from a directory.""" + inputs = [] + for root, _, files in os.walk(base_dir): + for f in files: + inputs.append(os.path.join(root, f)) + + with AtomicOutput(output) as f: + DoZip(inputs, f, base_dir, compress_fn=compress_fn, + zip_prefix_path=zip_prefix_path) + + +def MatchesGlob(path, filters): + """Returns whether the given path matches any of the given glob patterns.""" + return filters and any(fnmatch.fnmatch(path, f) for f in filters) + + +def MergeZips(output, input_zips, path_transform=None, compress=None): + """Combines all files from |input_zips| into |output|. + + Args: + output: Path, fileobj, or ZipFile instance to add files to. + input_zips: Iterable of paths to zip files to merge. + path_transform: Called for each entry path. Returns a new path, or None to + skip the file. + compress: Overrides compression setting from origin zip entries. + """ + path_transform = path_transform or (lambda p: p) + added_names = set() + + out_zip = output + if not isinstance(output, zipfile.ZipFile): + out_zip = zipfile.ZipFile(output, 'w') + + try: + for in_file in input_zips: + with zipfile.ZipFile(in_file, 'r') as in_zip: + # ijar creates zips with null CRCs. + in_zip._expected_crc = None + for info in in_zip.infolist(): + # Ignore directories. + if info.filename[-1] == '/': + continue + dst_name = path_transform(info.filename) + if not dst_name: + continue + already_added = dst_name in added_names + if not already_added: + if compress is not None: + compress_entry = compress + else: + compress_entry = info.compress_type != zipfile.ZIP_STORED + AddToZipHermetic( + out_zip, + dst_name, + data=in_zip.read(info), + compress=compress_entry) + added_names.add(dst_name) + finally: + if output is not out_zip: + out_zip.close() + + +def GetSortedTransitiveDependencies(top, deps_func): + """Gets the list of all transitive dependencies in sorted order. + + There should be no cycles in the dependency graph (crashes if cycles exist). + + Args: + top: A list of the top level nodes + deps_func: A function that takes a node and returns a list of its direct + dependencies. + Returns: + A list of all transitive dependencies of nodes in top, in order (a node will + appear in the list at a higher index than all of its dependencies). + """ + # Find all deps depth-first, maintaining original order in the case of ties. + deps_map = collections.OrderedDict() + def discover(nodes): + for node in nodes: + if node in deps_map: + continue + deps = deps_func(node) + discover(deps) + deps_map[node] = deps + + discover(top) + return list(deps_map) + + +def _ComputePythonDependencies(): + """Gets the paths of imported non-system python modules. + + A path is assumed to be a "system" import if it is outside of chromium's + src/. The paths will be relative to the current directory. + """ + _ForceLazyModulesToLoad() + module_paths = (m.__file__ for m in sys.modules.itervalues() + if m is not None and hasattr(m, '__file__')) + abs_module_paths = map(os.path.abspath, module_paths) + + assert os.path.isabs(DIR_SOURCE_ROOT) + non_system_module_paths = [ + p for p in abs_module_paths if p.startswith(DIR_SOURCE_ROOT)] + def ConvertPycToPy(s): + if s.endswith('.pyc'): + return s[:-1] + return s + + non_system_module_paths = map(ConvertPycToPy, non_system_module_paths) + non_system_module_paths = map(os.path.relpath, non_system_module_paths) + return sorted(set(non_system_module_paths)) + + +def _ForceLazyModulesToLoad(): + """Forces any lazily imported modules to fully load themselves. + + Inspecting the modules' __file__ attribute causes lazily imported modules + (e.g. from email) to get fully imported and update sys.modules. Iterate + over the values until sys.modules stabilizes so that no modules are missed. + """ + while True: + num_modules_before = len(sys.modules.keys()) + for m in sys.modules.values(): + if m is not None and hasattr(m, '__file__'): + _ = m.__file__ + num_modules_after = len(sys.modules.keys()) + if num_modules_before == num_modules_after: + break + + +def AddDepfileOption(parser): + # TODO(agrieve): Get rid of this once we've moved to argparse. + if hasattr(parser, 'add_option'): + func = parser.add_option + else: + func = parser.add_argument + func('--depfile', + help='Path to depfile (refer to `gn help depfile`)') + + +def WriteDepfile(depfile_path, first_gn_output, inputs=None, add_pydeps=True): + assert depfile_path != first_gn_output # http://crbug.com/646165 + assert not isinstance(inputs, basestring) # Easy mistake to make + inputs = inputs or [] + if add_pydeps: + inputs = _ComputePythonDependencies() + inputs + MakeDirectory(os.path.dirname(depfile_path)) + # Ninja does not support multiple outputs in depfiles. + with open(depfile_path, 'w') as depfile: + depfile.write(first_gn_output.replace(' ', '\\ ')) + depfile.write(': ') + depfile.write(' '.join(i.replace(' ', '\\ ') for i in inputs)) + depfile.write('\n') + + +def ExpandFileArgs(args): + """Replaces file-arg placeholders in args. + + These placeholders have the form: + @FileArg(filename:key1:key2:...:keyn) + + The value of such a placeholder is calculated by reading 'filename' as json. + And then extracting the value at [key1][key2]...[keyn]. + + Note: This intentionally does not return the list of files that appear in such + placeholders. An action that uses file-args *must* know the paths of those + files prior to the parsing of the arguments (typically by explicitly listing + them in the action's inputs in build files). + """ + new_args = list(args) + file_jsons = dict() + r = re.compile('@FileArg\((.*?)\)') + for i, arg in enumerate(args): + match = r.search(arg) + if not match: + continue + + lookup_path = match.group(1).split(':') + file_path = lookup_path[0] + if not file_path in file_jsons: + with open(file_path) as f: + file_jsons[file_path] = json.load(f) + + expansion = file_jsons[file_path] + for k in lookup_path[1:]: + expansion = expansion[k] + + # This should match ParseGnList. The output is either a GN-formatted list + # or a literal (with no quotes). + if isinstance(expansion, list): + new_args[i] = (arg[:match.start()] + gn_helpers.ToGNString(expansion) + + arg[match.end():]) + else: + new_args[i] = arg[:match.start()] + str(expansion) + arg[match.end():] + + return new_args + + +def ReadSourcesList(sources_list_file_name): + """Reads a GN-written file containing list of file names and returns a list. + + Note that this function should not be used to parse response files. + """ + with open(sources_list_file_name) as f: + return [file_name.strip() for file_name in f] + + +def CallAndWriteDepfileIfStale(function, options, record_path=None, + input_paths=None, input_strings=None, + output_paths=None, force=False, + pass_changes=False, depfile_deps=None, + add_pydeps=True): + """Wraps md5_check.CallAndRecordIfStale() and writes a depfile if applicable. + + Depfiles are automatically added to output_paths when present in the |options| + argument. They are then created after |function| is called. + + By default, only python dependencies are added to the depfile. If there are + other input paths that are not captured by GN deps, then they should be listed + in depfile_deps. It's important to write paths to the depfile that are already + captured by GN deps since GN args can cause GN deps to change, and such + changes are not immediately reflected in depfiles (http://crbug.com/589311). + """ + if not output_paths: + raise Exception('At least one output_path must be specified.') + input_paths = list(input_paths or []) + input_strings = list(input_strings or []) + output_paths = list(output_paths or []) + + python_deps = None + if hasattr(options, 'depfile') and options.depfile: + python_deps = _ComputePythonDependencies() + input_paths += python_deps + output_paths += [options.depfile] + + def on_stale_md5(changes): + args = (changes,) if pass_changes else () + function(*args) + if python_deps is not None: + all_depfile_deps = list(python_deps) if add_pydeps else [] + if depfile_deps: + all_depfile_deps.extend(depfile_deps) + WriteDepfile(options.depfile, output_paths[0], all_depfile_deps, + add_pydeps=False) + + md5_check.CallAndRecordIfStale( + on_stale_md5, + record_path=record_path, + input_paths=input_paths, + input_strings=input_strings, + output_paths=output_paths, + force=force, + pass_changes=True) diff --git a/deps/v8/build/android/gyp/util/build_utils_test.py b/deps/v8/build/android/gyp/util/build_utils_test.py new file mode 100755 index 0000000000..d462f0c676 --- /dev/null +++ b/deps/v8/build/android/gyp/util/build_utils_test.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import collections +import os +import sys +import unittest + +sys.path.insert( + 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))) +from util import build_utils + +_DEPS = collections.OrderedDict() +_DEPS['a'] = [] +_DEPS['b'] = [] +_DEPS['c'] = ['a'] +_DEPS['d'] = ['a'] +_DEPS['e'] = ['f'] +_DEPS['f'] = ['a', 'd'] +_DEPS['g'] = [] +_DEPS['h'] = ['d', 'b', 'f'] +_DEPS['i'] = ['f'] + + +class BuildUtilsTest(unittest.TestCase): + def testGetSortedTransitiveDependencies_all(self): + TOP = _DEPS.keys() + EXPECTED = ['a', 'b', 'c', 'd', 'f', 'e', 'g', 'h', 'i'] + actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get) + self.assertEqual(EXPECTED, actual) + + def testGetSortedTransitiveDependencies_leaves(self): + TOP = ['c', 'e', 'g', 'h', 'i'] + EXPECTED = ['a', 'c', 'd', 'f', 'e', 'g', 'b', 'h', 'i'] + actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get) + self.assertEqual(EXPECTED, actual) + + def testGetSortedTransitiveDependencies_leavesReverse(self): + TOP = ['i', 'h', 'g', 'e', 'c'] + EXPECTED = ['a', 'd', 'f', 'i', 'b', 'h', 'g', 'e', 'c'] + actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get) + self.assertEqual(EXPECTED, actual) + + +if __name__ == '__main__': + unittest.main() diff --git a/deps/v8/build/android/gyp/util/diff_utils.py b/deps/v8/build/android/gyp/util/diff_utils.py new file mode 100755 index 0000000000..b20dc27df2 --- /dev/null +++ b/deps/v8/build/android/gyp/util/diff_utils.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# +# Copyright 2019 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os + +import difflib +from util import build_utils + + +def DiffFileContents(expected_path, actual_path): + """Check file contents for equality and return the diff or None.""" + with open(expected_path) as f_expected, open(actual_path) as f_actual: + expected_lines = f_expected.readlines() + actual_lines = f_actual.readlines() + + if expected_lines == actual_lines: + return None + + expected_path = os.path.relpath(expected_path, build_utils.DIR_SOURCE_ROOT) + actual_path = os.path.relpath(actual_path, build_utils.DIR_SOURCE_ROOT) + + diff = difflib.unified_diff( + expected_lines, + actual_lines, + fromfile=os.path.join('before', expected_path), + tofile=os.path.join('after', expected_path), + n=0) + + # Space added before "patch" so that giant command is not put in bash history. + return """\ +Files Compared: + * {} + * {} + +To update the file, run: +########### START ########### + patch -p1 <<'END_DIFF' +{} +END_DIFF +############ END ############ +""".format(expected_path, actual_path, ''.join(diff).rstrip()) diff --git a/deps/v8/build/android/gyp/util/jar_info_utils.py b/deps/v8/build/android/gyp/util/jar_info_utils.py new file mode 100644 index 0000000000..677e4e4261 --- /dev/null +++ b/deps/v8/build/android/gyp/util/jar_info_utils.py @@ -0,0 +1,51 @@ +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os + +# Utilities to read and write .jar.info files. +# +# A .jar.info file contains a simple mapping from fully-qualified Java class +# names to the source file that actually defines it. +# +# For APKs, the .jar.info maps the class names to the .jar file that which +# contains its .class definition instead. + + +def ParseJarInfoFile(info_path): + """Parse a given .jar.info file as a dictionary. + + Args: + info_path: input .jar.info file path. + Returns: + A new dictionary mapping fully-qualified Java class names to file paths. + """ + info_data = dict() + if os.path.exists(info_path): + with open(info_path, 'r') as info_file: + for line in info_file: + line = line.strip() + if line: + fully_qualified_name, path = line.split(',', 1) + info_data[fully_qualified_name] = path + return info_data + + +def WriteJarInfoFile(output_obj, info_data, source_file_map=None): + """Generate a .jar.info file from a given dictionary. + + Args: + output_obj: output file object. + info_data: a mapping of fully qualified Java class names to filepaths. + source_file_map: an optional mapping from java source file paths to the + corresponding source .srcjar. This is because info_data may contain the + path of Java source files that where extracted from an .srcjar into a + temporary location. + """ + for fully_qualified_name, path in sorted(info_data.iteritems()): + if source_file_map and path in source_file_map: + path = source_file_map[path] + assert not path.startswith('/tmp'), ( + 'Java file path should not be in temp dir: {}'.format(path)) + output_obj.write('{},{}\n'.format(fully_qualified_name, path)) diff --git a/deps/v8/build/android/gyp/util/java_cpp_utils.py b/deps/v8/build/android/gyp/util/java_cpp_utils.py new file mode 100755 index 0000000000..0b9748657e --- /dev/null +++ b/deps/v8/build/android/gyp/util/java_cpp_utils.py @@ -0,0 +1,32 @@ +#!/user/bin/env python +# +# Copyright 2019 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import re +import sys + + +def GetScriptName(): + return os.path.basename(os.path.abspath(sys.argv[0])) + + +def KCamelToShouty(s): + """Convert |s| from kCamelCase or CamelCase to SHOUTY_CASE. + + kFooBar -> FOO_BAR + FooBar -> FOO_BAR + FooBAR9 -> FOO_BAR9 + FooBARBaz -> FOO_BAR_BAZ + """ + if not re.match(r'^k?([A-Z][^A-Z]+|[A-Z0-9]+)+$', s): + return s + # Strip the leading k. + s = re.sub(r'^k', '', s) + # Add _ between title words and anything else. + s = re.sub(r'([^_])([A-Z][^A-Z_0-9]+)', r'\1_\2', s) + # Add _ between lower -> upper transitions. + s = re.sub(r'([^A-Z_0-9])([A-Z])', r'\1_\2', s) + return s.upper() diff --git a/deps/v8/build/android/gyp/util/md5_check.py b/deps/v8/build/android/gyp/util/md5_check.py new file mode 100644 index 0000000000..9a15ee6e75 --- /dev/null +++ b/deps/v8/build/android/gyp/util/md5_check.py @@ -0,0 +1,420 @@ +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from __future__ import print_function + +import difflib +import hashlib +import itertools +import json +import os +import sys +import zipfile + + +# When set and a difference is detected, a diff of what changed is printed. +PRINT_EXPLANATIONS = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0)) + +# An escape hatch that causes all targets to be rebuilt. +_FORCE_REBUILD = int(os.environ.get('FORCE_REBUILD', 0)) + + +def CallAndRecordIfStale( + function, record_path=None, input_paths=None, input_strings=None, + output_paths=None, force=False, pass_changes=False): + """Calls function if outputs are stale. + + Outputs are considered stale if: + - any output_paths are missing, or + - the contents of any file within input_paths has changed, or + - the contents of input_strings has changed. + + To debug which files are out-of-date, set the environment variable: + PRINT_MD5_DIFFS=1 + + Args: + function: The function to call. + record_path: Path to record metadata. + Defaults to output_paths[0] + '.md5.stamp' + input_paths: List of paths to calcualte an md5 sum on. + input_strings: List of strings to record verbatim. + output_paths: List of output paths. + force: Whether to treat outputs as missing regardless of whether they + actually are. + pass_changes: Whether to pass a Changes instance to |function|. + """ + assert record_path or output_paths + input_paths = input_paths or [] + input_strings = input_strings or [] + output_paths = output_paths or [] + record_path = record_path or output_paths[0] + '.md5.stamp' + + assert record_path.endswith('.stamp'), ( + 'record paths must end in \'.stamp\' so that they are easy to find ' + 'and delete') + + new_metadata = _Metadata(track_entries=pass_changes or PRINT_EXPLANATIONS) + new_metadata.AddStrings(input_strings) + + for path in input_paths: + if _IsZipFile(path): + entries = _ExtractZipEntries(path) + new_metadata.AddZipFile(path, entries) + else: + new_metadata.AddFile(path, _Md5ForPath(path)) + + old_metadata = None + force = force or _FORCE_REBUILD + missing_outputs = [x for x in output_paths if force or not os.path.exists(x)] + # When outputs are missing, don't bother gathering change information. + if not missing_outputs and os.path.exists(record_path): + with open(record_path, 'r') as jsonfile: + try: + old_metadata = _Metadata.FromFile(jsonfile) + except: # pylint: disable=bare-except + pass # Not yet using new file format. + + changes = Changes(old_metadata, new_metadata, force, missing_outputs) + if not changes.HasChanges(): + return + + if PRINT_EXPLANATIONS: + print('=' * 80) + print('Target is stale: %s' % record_path) + print(changes.DescribeDifference()) + print('=' * 80) + + args = (changes,) if pass_changes else () + function(*args) + + with open(record_path, 'w') as f: + new_metadata.ToFile(f) + + +class Changes(object): + """Provides and API for querying what changed between runs.""" + + def __init__(self, old_metadata, new_metadata, force, missing_outputs): + self.old_metadata = old_metadata + self.new_metadata = new_metadata + self.force = force + self.missing_outputs = missing_outputs + + def _GetOldTag(self, path, subpath=None): + return self.old_metadata and self.old_metadata.GetTag(path, subpath) + + def HasChanges(self): + """Returns whether any changes exist.""" + return (self.force or + not self.old_metadata or + self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5() or + self.old_metadata.FilesMd5() != self.new_metadata.FilesMd5()) + + def AddedOrModifiedOnly(self): + """Returns whether the only changes were from added or modified (sub)files. + + No missing outputs, no removed paths/subpaths. + """ + if (self.force or + not self.old_metadata or + self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5()): + return False + if any(self.IterRemovedPaths()): + return False + for path in self.IterModifiedPaths(): + if any(self.IterRemovedSubpaths(path)): + return False + return True + + def IterAllPaths(self): + """Generator for paths.""" + return self.new_metadata.IterPaths(); + + def IterAllSubpaths(self, path): + """Generator for subpaths.""" + return self.new_metadata.IterSubpaths(path); + + def IterAddedPaths(self): + """Generator for paths that were added.""" + for path in self.new_metadata.IterPaths(): + if self._GetOldTag(path) is None: + yield path + + def IterAddedSubpaths(self, path): + """Generator for paths that were added within the given zip file.""" + for subpath in self.new_metadata.IterSubpaths(path): + if self._GetOldTag(path, subpath) is None: + yield subpath + + def IterRemovedPaths(self): + """Generator for paths that were removed.""" + if self.old_metadata: + for path in self.old_metadata.IterPaths(): + if self.new_metadata.GetTag(path) is None: + yield path + + def IterRemovedSubpaths(self, path): + """Generator for paths that were removed within the given zip file.""" + if self.old_metadata: + for subpath in self.old_metadata.IterSubpaths(path): + if self.new_metadata.GetTag(path, subpath) is None: + yield subpath + + def IterModifiedPaths(self): + """Generator for paths whose contents have changed.""" + for path in self.new_metadata.IterPaths(): + old_tag = self._GetOldTag(path) + new_tag = self.new_metadata.GetTag(path) + if old_tag is not None and old_tag != new_tag: + yield path + + def IterModifiedSubpaths(self, path): + """Generator for paths within a zip file whose contents have changed.""" + for subpath in self.new_metadata.IterSubpaths(path): + old_tag = self._GetOldTag(path, subpath) + new_tag = self.new_metadata.GetTag(path, subpath) + if old_tag is not None and old_tag != new_tag: + yield subpath + + def IterChangedPaths(self): + """Generator for all changed paths (added/removed/modified).""" + return itertools.chain(self.IterRemovedPaths(), + self.IterModifiedPaths(), + self.IterAddedPaths()) + + def IterChangedSubpaths(self, path): + """Generator for paths within a zip that were added/removed/modified.""" + return itertools.chain(self.IterRemovedSubpaths(path), + self.IterModifiedSubpaths(path), + self.IterAddedSubpaths(path)) + + def DescribeDifference(self): + """Returns a human-readable description of what changed.""" + if self.force: + return 'force=True' + elif self.missing_outputs: + return 'Outputs do not exist:\n ' + '\n '.join(self.missing_outputs) + elif self.old_metadata is None: + return 'Previous stamp file not found.' + + if self.old_metadata.StringsMd5() != self.new_metadata.StringsMd5(): + ndiff = difflib.ndiff(self.old_metadata.GetStrings(), + self.new_metadata.GetStrings()) + changed = [s for s in ndiff if not s.startswith(' ')] + return 'Input strings changed:\n ' + '\n '.join(changed) + + if self.old_metadata.FilesMd5() == self.new_metadata.FilesMd5(): + return "There's no difference." + + lines = [] + lines.extend('Added: ' + p for p in self.IterAddedPaths()) + lines.extend('Removed: ' + p for p in self.IterRemovedPaths()) + for path in self.IterModifiedPaths(): + lines.append('Modified: ' + path) + lines.extend(' -> Subpath added: ' + p + for p in self.IterAddedSubpaths(path)) + lines.extend(' -> Subpath removed: ' + p + for p in self.IterRemovedSubpaths(path)) + lines.extend(' -> Subpath modified: ' + p + for p in self.IterModifiedSubpaths(path)) + if lines: + return 'Input files changed:\n ' + '\n '.join(lines) + return 'I have no idea what changed (there is a bug).' + + +class _Metadata(object): + """Data model for tracking change metadata. + + Args: + track_entries: Enables per-file change tracking. Slower, but required for + Changes functionality. + """ + # Schema: + # { + # "files-md5": "VALUE", + # "strings-md5": "VALUE", + # "input-files": [ + # { + # "path": "path.jar", + # "tag": "{MD5 of entries}", + # "entries": [ + # { "path": "org/chromium/base/Foo.class", "tag": "{CRC32}" }, ... + # ] + # }, { + # "path": "path.txt", + # "tag": "{MD5}", + # } + # ], + # "input-strings": ["a", "b", ...], + # } + def __init__(self, track_entries=False): + self._track_entries = track_entries + self._files_md5 = None + self._strings_md5 = None + self._files = [] + self._strings = [] + # Map of (path, subpath) -> entry. Created upon first call to _GetEntry(). + self._file_map = None + + @classmethod + def FromFile(cls, fileobj): + """Returns a _Metadata initialized from a file object.""" + ret = cls() + obj = json.load(fileobj) + ret._files_md5 = obj['files-md5'] + ret._strings_md5 = obj['strings-md5'] + ret._files = obj.get('input-files', []) + ret._strings = obj.get('input-strings', []) + return ret + + def ToFile(self, fileobj): + """Serializes metadata to the given file object.""" + obj = { + 'files-md5': self.FilesMd5(), + 'strings-md5': self.StringsMd5(), + } + if self._track_entries: + obj['input-files'] = sorted(self._files, key=lambda e: e['path']) + obj['input-strings'] = self._strings + + json.dump(obj, fileobj, indent=2) + + def _AssertNotQueried(self): + assert self._files_md5 is None + assert self._strings_md5 is None + assert self._file_map is None + + def AddStrings(self, values): + self._AssertNotQueried() + self._strings.extend(str(v) for v in values) + + def AddFile(self, path, tag): + """Adds metadata for a non-zip file. + + Args: + path: Path to the file. + tag: A short string representative of the file contents. + """ + self._AssertNotQueried() + self._files.append({ + 'path': path, + 'tag': tag, + }) + + def AddZipFile(self, path, entries): + """Adds metadata for a zip file. + + Args: + path: Path to the file. + entries: List of (subpath, tag) tuples for entries within the zip. + """ + self._AssertNotQueried() + tag = _ComputeInlineMd5(itertools.chain((e[0] for e in entries), + (e[1] for e in entries))) + self._files.append({ + 'path': path, + 'tag': tag, + 'entries': [{"path": e[0], "tag": e[1]} for e in entries], + }) + + def GetStrings(self): + """Returns the list of input strings.""" + return self._strings + + def FilesMd5(self): + """Lazily computes and returns the aggregate md5 of input files.""" + if self._files_md5 is None: + # Omit paths from md5 since temporary files have random names. + self._files_md5 = _ComputeInlineMd5( + self.GetTag(p) for p in sorted(self.IterPaths())) + return self._files_md5 + + def StringsMd5(self): + """Lazily computes and returns the aggregate md5 of input strings.""" + if self._strings_md5 is None: + self._strings_md5 = _ComputeInlineMd5(self._strings) + return self._strings_md5 + + def _GetEntry(self, path, subpath=None): + """Returns the JSON entry for the given path / subpath.""" + if self._file_map is None: + self._file_map = {} + for entry in self._files: + self._file_map[(entry['path'], None)] = entry + for subentry in entry.get('entries', ()): + self._file_map[(entry['path'], subentry['path'])] = subentry + return self._file_map.get((path, subpath)) + + def GetTag(self, path, subpath=None): + """Returns the tag for the given path / subpath.""" + ret = self._GetEntry(path, subpath) + return ret and ret['tag'] + + def IterPaths(self): + """Returns a generator for all top-level paths.""" + return (e['path'] for e in self._files) + + def IterSubpaths(self, path): + """Returns a generator for all subpaths in the given zip. + + If the given path is not a zip file or doesn't exist, returns an empty + iterable. + """ + outer_entry = self._GetEntry(path) + if not outer_entry: + return () + subentries = outer_entry.get('entries', []) + return (entry['path'] for entry in subentries) + + +def _UpdateMd5ForFile(md5, path, block_size=2**16): + with open(path, 'rb') as infile: + while True: + data = infile.read(block_size) + if not data: + break + md5.update(data) + + +def _UpdateMd5ForDirectory(md5, dir_path): + for root, _, files in os.walk(dir_path): + for f in files: + _UpdateMd5ForFile(md5, os.path.join(root, f)) + + +def _Md5ForPath(path): + md5 = hashlib.md5() + if os.path.isdir(path): + _UpdateMd5ForDirectory(md5, path) + else: + _UpdateMd5ForFile(md5, path) + return md5.hexdigest() + + +def _ComputeInlineMd5(iterable): + """Computes the md5 of the concatenated parameters.""" + md5 = hashlib.md5() + for item in iterable: + md5.update(str(item)) + return md5.hexdigest() + + +def _IsZipFile(path): + """Returns whether to treat the given file as a zip file.""" + # ijar doesn't set the CRC32 field. + if path.endswith('.interface.jar'): + return False + return path[-4:] in ('.zip', '.apk', '.jar') or path.endswith('.srcjar') + + +def _ExtractZipEntries(path): + """Returns a list of (path, CRC32) of all files within |path|.""" + entries = [] + with zipfile.ZipFile(path) as zip_file: + for zip_info in zip_file.infolist(): + # Skip directories and empty files. + if zip_info.CRC: + entries.append( + (zip_info.filename, zip_info.CRC + zip_info.compress_type)) + return entries diff --git a/deps/v8/build/android/gyp/util/md5_check_test.py b/deps/v8/build/android/gyp/util/md5_check_test.py new file mode 100755 index 0000000000..41e9d3c248 --- /dev/null +++ b/deps/v8/build/android/gyp/util/md5_check_test.py @@ -0,0 +1,151 @@ +#!/usr/bin/env python +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import fnmatch +import os +import sys +import tempfile +import unittest +import zipfile + +sys.path.insert( + 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))) +from util import md5_check + + +def _WriteZipFile(path, entries): + with zipfile.ZipFile(path, 'w') as zip_file: + for subpath, data in entries: + zip_file.writestr(subpath, data) + + +class TestMd5Check(unittest.TestCase): + def setUp(self): + self.called = False + self.changes = None + + def testCallAndRecordIfStale(self): + input_strings = ['string1', 'string2'] + input_file1 = tempfile.NamedTemporaryFile(suffix='.txt') + input_file2 = tempfile.NamedTemporaryFile(suffix='.zip') + file1_contents = b'input file 1' + input_file1.write(file1_contents) + input_file1.flush() + # Test out empty zip file to start. + _WriteZipFile(input_file2.name, []) + input_files = [input_file1.name, input_file2.name] + + record_path = tempfile.NamedTemporaryFile(suffix='.stamp') + + def CheckCallAndRecord(should_call, message, force=False, + outputs_specified=False, outputs_missing=False, + expected_changes=None, added_or_modified_only=None): + output_paths = None + if outputs_specified: + output_file1 = tempfile.NamedTemporaryFile() + if outputs_missing: + output_file1.close() # Gets deleted on close(). + output_paths = [output_file1.name] + + self.called = False + self.changes = None + if expected_changes or added_or_modified_only is not None: + def MarkCalled(changes): + self.called = True + self.changes = changes + else: + def MarkCalled(): + self.called = True + + md5_check.CallAndRecordIfStale( + MarkCalled, + record_path=record_path.name, + input_paths=input_files, + input_strings=input_strings, + output_paths=output_paths, + force=force, + pass_changes=(expected_changes or added_or_modified_only) is not None) + self.assertEqual(should_call, self.called, message) + if expected_changes: + description = self.changes.DescribeDifference() + self.assertTrue(fnmatch.fnmatch(description, expected_changes), + 'Expected %s to match %s' % ( + repr(description), repr(expected_changes))) + if should_call and added_or_modified_only is not None: + self.assertEqual(added_or_modified_only, + self.changes.AddedOrModifiedOnly()) + + CheckCallAndRecord(True, 'should call when record doesn\'t exist', + expected_changes='Previous stamp file not found.', + added_or_modified_only=False) + CheckCallAndRecord(False, 'should not call when nothing changed') + CheckCallAndRecord(False, 'should not call when nothing changed #2', + outputs_specified=True, outputs_missing=False) + CheckCallAndRecord(True, 'should call when output missing', + outputs_specified=True, outputs_missing=True, + expected_changes='Outputs do not exist:*', + added_or_modified_only=False) + CheckCallAndRecord(True, force=True, message='should call when forced', + expected_changes='force=True', + added_or_modified_only=False) + + input_file1.write('some more input') + input_file1.flush() + CheckCallAndRecord(True, 'changed input file should trigger call', + expected_changes='*Modified: %s' % input_file1.name, + added_or_modified_only=True) + + input_files = input_files[::-1] + CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call') + + input_files = input_files[:1] + CheckCallAndRecord(True, 'removing file should trigger call', + expected_changes='*Removed: %s' % input_file1.name, + added_or_modified_only=False) + + input_files.append(input_file1.name) + CheckCallAndRecord(True, 'added input file should trigger call', + expected_changes='*Added: %s' % input_file1.name, + added_or_modified_only=True) + + input_strings[0] = input_strings[0] + ' a bit longer' + CheckCallAndRecord(True, 'changed input string should trigger call', + expected_changes='*Input strings changed*', + added_or_modified_only=False) + + input_strings = input_strings[::-1] + CheckCallAndRecord(True, 'reordering of string inputs should trigger call', + expected_changes='*Input strings changed*') + + input_strings = input_strings[:1] + CheckCallAndRecord(True, 'removing a string should trigger call') + + input_strings.append('a brand new string') + CheckCallAndRecord( + True, + 'added input string should trigger call', + added_or_modified_only=False) + + _WriteZipFile(input_file2.name, [('path/1.txt', '1')]) + CheckCallAndRecord(True, 'added subpath should trigger call', + expected_changes='*Modified: %s*Subpath added: %s' % ( + input_file2.name, 'path/1.txt'), + added_or_modified_only=True) + _WriteZipFile(input_file2.name, [('path/1.txt', '2')]) + CheckCallAndRecord(True, 'changed subpath should trigger call', + expected_changes='*Modified: %s*Subpath modified: %s' % ( + input_file2.name, 'path/1.txt'), + added_or_modified_only=True) + CheckCallAndRecord(False, 'should not call when nothing changed') + + _WriteZipFile(input_file2.name, []) + CheckCallAndRecord(True, 'removed subpath should trigger call', + expected_changes='*Modified: %s*Subpath removed: %s' % ( + input_file2.name, 'path/1.txt'), + added_or_modified_only=False) + + +if __name__ == '__main__': + unittest.main() diff --git a/deps/v8/build/android/gyp/util/proguard_util.py b/deps/v8/build/android/gyp/util/proguard_util.py new file mode 100644 index 0000000000..c0fba206dc --- /dev/null +++ b/deps/v8/build/android/gyp/util/proguard_util.py @@ -0,0 +1,236 @@ +# Copyright 2015 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import re +from util import build_utils + + +class ProguardOutputFilter(object): + """ProGuard outputs boring stuff to stdout (proguard version, jar path, etc) + as well as interesting stuff (notes, warnings, etc). If stdout is entirely + boring, this class suppresses the output. + """ + + IGNORE_RE = re.compile( + r'Pro.*version|Note:|Reading|Preparing|Printing|ProgramClass:|Searching|' + r'jar \[|\d+ class path entries checked') + + def __init__(self): + self._last_line_ignored = False + self._ignore_next_line = False + + def __call__(self, output): + ret = [] + for line in output.splitlines(True): + if self._ignore_next_line: + self._ignore_next_line = False + continue + + if '***BINARY RUN STATS***' in line: + self._last_line_ignored = True + self._ignore_next_line = True + elif not line.startswith(' '): + self._last_line_ignored = bool(self.IGNORE_RE.match(line)) + elif 'You should check if you need to specify' in line: + self._last_line_ignored = True + + if not self._last_line_ignored: + ret.append(line) + return ''.join(ret) + + +class ProguardCmdBuilder(object): + def __init__(self, proguard_jar): + assert os.path.exists(proguard_jar) + self._proguard_jar_path = proguard_jar + self._mapping = None + self._libraries = None + self._injars = None + self._configs = None + self._config_exclusions = None + self._outjar = None + self._mapping_output = None + self._verbose = False + self._min_api = None + self._disabled_optimizations = [] + + def outjar(self, path): + assert self._outjar is None + self._outjar = path + + def mapping_output(self, path): + assert self._mapping_output is None + self._mapping_output = path + + def mapping(self, path): + assert self._mapping is None + assert os.path.exists(path), path + self._mapping = path + + def libraryjars(self, paths): + assert self._libraries is None + for p in paths: + assert os.path.exists(p), p + self._libraries = paths + + def injars(self, paths): + assert self._injars is None + for p in paths: + assert os.path.exists(p), p + self._injars = paths + + def configs(self, paths): + assert self._configs is None + self._configs = paths + for p in self._configs: + assert os.path.exists(p), p + + def config_exclusions(self, paths): + assert self._config_exclusions is None + self._config_exclusions = paths + + def verbose(self, verbose): + self._verbose = verbose + + def min_api(self, min_api): + assert self._min_api is None + self._min_api = min_api + + def disable_optimizations(self, optimizations): + self._disabled_optimizations += optimizations + + def build(self): + assert self._injars is not None + assert self._outjar is not None + assert self._configs is not None + cmd = [ + 'java', '-jar', self._proguard_jar_path, + '-forceprocessing', + ] + + if self._mapping: + cmd += ['-applymapping', self._mapping] + + if self._libraries: + cmd += ['-libraryjars', ':'.join(self._libraries)] + + if self._min_api: + cmd += [ + '-assumevalues class android.os.Build$VERSION {' + + ' public static final int SDK_INT return ' + self._min_api + + '..9999; }' + ] + + for optimization in self._disabled_optimizations: + cmd += [ '-optimizations', '!' + optimization ] + + # Filter to just .class files to avoid warnings about multiple inputs having + # the same files in META_INF/. + cmd += [ + '-injars', + ':'.join('{}(**.class)'.format(x) for x in self._injars) + ] + + for config_file in self.GetConfigs(): + cmd += ['-include', config_file] + + # The output jar must be specified after inputs. + cmd += [ + '-outjars', self._outjar, + '-printseeds', self._outjar + '.seeds', + '-printusage', self._outjar + '.usage', + '-printmapping', self._mapping_output, + ] + + if self._verbose: + cmd.append('-verbose') + + return cmd + + def GetDepfileDeps(self): + # The list of inputs that the GN target does not directly know about. + inputs = self._configs + self._injars + if self._libraries: + inputs += self._libraries + return inputs + + def GetConfigs(self): + ret = list(self._configs) + for path in self._config_exclusions: + ret.remove(path) + return ret + + def GetInputs(self): + inputs = self.GetDepfileDeps() + inputs += [self._proguard_jar_path] + if self._mapping: + inputs.append(self._mapping) + return inputs + + def GetOutputs(self): + return [ + self._outjar, + self._outjar + '.flags', + self._mapping_output, + self._outjar + '.seeds', + self._outjar + '.usage', + ] + + def _WriteFlagsFile(self, cmd, out): + # Quite useful for auditing proguard flags. + WriteFlagsFile(self._configs, out) + out.write('#' * 80 + '\n') + out.write('# Command-line\n') + out.write('#' * 80 + '\n') + out.write('# ' + ' '.join(cmd) + '\n') + + def CheckOutput(self): + cmd = self.build() + + # There are a couple scenarios (.mapping files and switching from no + # proguard -> proguard) where GN's copy() target is used on output + # paths. These create hardlinks, so we explicitly unlink here to avoid + # updating files with multiple links. + for path in self.GetOutputs(): + if os.path.exists(path): + os.unlink(path) + + with open(self._outjar + '.flags', 'w') as out: + self._WriteFlagsFile(cmd, out) + + # Warning: and Error: are sent to stderr, but messages and Note: are sent + # to stdout. + stdout_filter = None + stderr_filter = None + if not self._verbose: + stdout_filter = ProguardOutputFilter() + stderr_filter = ProguardOutputFilter() + build_utils.CheckOutput(cmd, print_stdout=True, + print_stderr=True, + stdout_filter=stdout_filter, + stderr_filter=stderr_filter) + + # Proguard will skip writing -printseeds / -printusage / -printmapping if + # the files would be empty, but ninja needs all outputs to exist. + open(self._outjar + '.seeds', 'a').close() + open(self._outjar + '.usage', 'a').close() + open(self._outjar + '.mapping', 'a').close() + + +def WriteFlagsFile(configs, out, exclude_generated=False): + for config in sorted(configs): + if exclude_generated and config.endswith('.resources.proguard.txt'): + continue + + out.write('#' * 80 + '\n') + out.write('# ' + config + '\n') + out.write('#' * 80 + '\n') + with open(config) as config_file: + contents = config_file.read().rstrip() + # Remove numbers from generated rule comments to make file more + # diff'able. + contents = re.sub(r' #generated:\d+', '', contents) + out.write(contents) + out.write('\n\n') diff --git a/deps/v8/build/android/gyp/util/resource_utils.py b/deps/v8/build/android/gyp/util/resource_utils.py new file mode 100644 index 0000000000..61a4f3c238 --- /dev/null +++ b/deps/v8/build/android/gyp/util/resource_utils.py @@ -0,0 +1,834 @@ +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import argparse +import collections +import contextlib +import os +import re +import shutil +import subprocess +import sys +import tempfile +from xml.etree import ElementTree + +import util.build_utils as build_utils + +_SOURCE_ROOT = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', '..', '..', '..')) +# Import jinja2 from third_party/jinja2 +sys.path.insert(1, os.path.join(_SOURCE_ROOT, 'third_party')) +from jinja2 import Template # pylint: disable=F0401 + + +EMPTY_ANDROID_MANIFEST_PATH = os.path.join( + _SOURCE_ROOT, 'build', 'android', 'AndroidManifest.xml') + +ANDROID_NAMESPACE = 'http://schemas.android.com/apk/res/android' +TOOLS_NAMESPACE = 'http://schemas.android.com/tools' + +# A variation of these maps also exists in: +# //base/android/java/src/org/chromium/base/LocaleUtils.java +# //ui/android/java/src/org/chromium/base/LocalizationUtils.java +_CHROME_TO_ANDROID_LOCALE_MAP = { + 'es-419': 'es-rUS', + 'fil': 'tl', + 'he': 'iw', + 'id': 'in', + 'yi': 'ji', +} +_ANDROID_TO_CHROMIUM_LANGUAGE_MAP = { + 'tl': 'fil', + 'iw': 'he', + 'in': 'id', + 'ji': 'yi', + 'no': 'nb', # 'no' is not a real language. http://crbug.com/920960 +} + + +_xml_namespace_initialized = False + + +def ToAndroidLocaleName(chromium_locale): + """Convert an Chromium locale name into a corresponding Android one.""" + # First handle the special cases, these are needed to deal with Android + # releases *before* 5.0/Lollipop. + android_locale = _CHROME_TO_ANDROID_LOCALE_MAP.get(chromium_locale) + if android_locale: + return android_locale + + # Format of Chromium locale name is '<lang>' or '<lang>-<region>' + # where <lang> is a 2 or 3 letter language code (ISO 639-1 or 639-2) + # and region is a capitalized locale region name. + lang, _, region = chromium_locale.partition('-') + if not region: + return lang + + # Translate newer language tags into obsolete ones. Only necessary if + # region is not None (e.g. 'he-IL' -> 'iw-rIL') + lang = _CHROME_TO_ANDROID_LOCALE_MAP.get(lang, lang) + + # Using '<lang>-r<region>' is now acceptable as a locale name for all + # versions of Android. + return '%s-r%s' % (lang, region) + + +# ISO 639 language code + optional ("-r" + capitalized region code). +# Note that before Android 5.0/Lollipop, only 2-letter ISO 639-1 codes +# are supported. +_RE_ANDROID_LOCALE_QUALIFIER_1 = re.compile(r'^([a-z]{2,3})(\-r([A-Z]+))?$') + +# Starting with Android 7.0/Nougat, BCP 47 codes are supported but must +# be prefixed with 'b+', and may include optional tags. e.g. 'b+en+US', +# 'b+ja+Latn', 'b+ja+JP+Latn' +_RE_ANDROID_LOCALE_QUALIFIER_2 = re.compile(r'^b\+([a-z]{2,3})(\+.+)?$') + +# Matches an all-uppercase region name. +_RE_ALL_UPPERCASE = re.compile(r'^[A-Z]+$') + + +def ToChromiumLocaleName(android_locale): + """Convert an Android locale name into a Chromium one.""" + lang = None + region = None + m = _RE_ANDROID_LOCALE_QUALIFIER_1.match(android_locale) + if m: + lang = m.group(1) + if m.group(2): + region = m.group(3) + else: + m = _RE_ANDROID_LOCALE_QUALIFIER_2.match(android_locale) + if m: + lang = m.group(1) + if m.group(2): + tags = m.group(2).split('+') + # First all-uppercase tag is a region. This deals with cases where + # a special tag is placed before it (e.g. 'cmn+Hant-TW') + for tag in tags: + if _RE_ALL_UPPERCASE.match(tag): + region = tag + break + + if not lang: + return None + + # Special case for es-rUS -> es-419 + if lang == 'es' and region == 'US': + return 'es-419' + + lang = _ANDROID_TO_CHROMIUM_LANGUAGE_MAP.get(lang, lang) + if not region: + return lang + + return '%s-%s' % (lang, region) + + +def IsAndroidLocaleQualifier(string): + """Returns true if |string| is a valid Android resource locale qualifier.""" + return (_RE_ANDROID_LOCALE_QUALIFIER_1.match(string) + or _RE_ANDROID_LOCALE_QUALIFIER_2.match(string)) + + +def FindLocaleInStringResourceFilePath(file_path): + """Return Android locale name of a string resource file path. + + Args: + file_path: A file path. + Returns: + If |file_path| is of the format '.../values-<locale>/<name>.xml', return + the value of <locale> (and Android locale qualifier). Otherwise return None. + """ + if not file_path.endswith('.xml'): + return None + prefix = 'values-' + dir_name = os.path.basename(os.path.dirname(file_path)) + if not dir_name.startswith(prefix): + return None + qualifier = dir_name[len(prefix):] + return qualifier if IsAndroidLocaleQualifier(qualifier) else None + + +def ToAndroidLocaleList(locale_list): + """Convert a list of Chromium locales into the corresponding Android list.""" + return sorted(ToAndroidLocaleName(locale) for locale in locale_list) + +# Represents a line from a R.txt file. +_TextSymbolEntry = collections.namedtuple('RTextEntry', + ('java_type', 'resource_type', 'name', 'value')) + + +def CreateResourceInfoFile(files_to_zip, zip_path): + """Given a mapping of archive paths to their source, write an info file. + + The info file contains lines of '{archive_path},{source_path}' for ease of + parsing. Assumes that there is no comma in the file names. + + Args: + files_to_zip: Dict mapping path in the zip archive to original source. + zip_path: Path where the zip file ends up, this is where the info file goes. + """ + info_file_path = zip_path + '.info' + with open(info_file_path, 'w') as info_file: + for archive_path, source_path in files_to_zip.iteritems(): + info_file.write('{},{}\n'.format(archive_path, source_path)) + + +def _ParseTextSymbolsFile(path, fix_package_ids=False): + """Given an R.txt file, returns a list of _TextSymbolEntry. + + Args: + path: Input file path. + fix_package_ids: if True, 0x00 and 0x02 package IDs read from the file + will be fixed to 0x7f. + Returns: + A list of _TextSymbolEntry instances. + Raises: + Exception: An unexpected line was detected in the input. + """ + ret = [] + with open(path) as f: + for line in f: + m = re.match(r'(int(?:\[\])?) (\w+) (\w+) (.+)$', line) + if not m: + raise Exception('Unexpected line in R.txt: %s' % line) + java_type, resource_type, name, value = m.groups() + if fix_package_ids: + value = _FixPackageIds(value) + ret.append(_TextSymbolEntry(java_type, resource_type, name, value)) + return ret + + +def _FixPackageIds(resource_value): + # Resource IDs for resources belonging to regular APKs have their first byte + # as 0x7f (package id). However with webview, since it is not a regular apk + # but used as a shared library, aapt is passed the --shared-resources flag + # which changes some of the package ids to 0x00 and 0x02. This function + # normalises these (0x00 and 0x02) package ids to 0x7f, which the generated + # code in R.java changes to the correct package id at runtime. + # resource_value is a string with either, a single value '0x12345678', or an + # array of values like '{ 0xfedcba98, 0x01234567, 0x56789abc }' + return re.sub(r'0x(?:00|02)', r'0x7f', resource_value) + + +def _GetRTxtResourceNames(r_txt_path): + """Parse an R.txt file and extract the set of resource names from it.""" + return {entry.name for entry in _ParseTextSymbolsFile(r_txt_path)} + + +def GetRTxtStringResourceNames(r_txt_path): + """Parse an R.txt file and the list of its string resource names.""" + return sorted({ + entry.name + for entry in _ParseTextSymbolsFile(r_txt_path) + if entry.resource_type == 'string' + }) + + +def GenerateStringResourcesWhitelist(module_r_txt_path, whitelist_r_txt_path): + """Generate a whitelist of string resource IDs. + + Args: + module_r_txt_path: Input base module R.txt path. + whitelist_r_txt_path: Input whitelist R.txt path. + Returns: + A dictionary mapping numerical resource IDs to the corresponding + string resource names. The ID values are taken from string resources in + |module_r_txt_path| that are also listed by name in |whitelist_r_txt_path|. + """ + whitelisted_names = { + entry.name + for entry in _ParseTextSymbolsFile(whitelist_r_txt_path) + if entry.resource_type == 'string' + } + return { + int(entry.value, 0): entry.name + for entry in _ParseTextSymbolsFile(module_r_txt_path) + if entry.resource_type == 'string' and entry.name in whitelisted_names + } + + +class RJavaBuildOptions: + """A class used to model the various ways to build an R.java file. + + This is used to control which resource ID variables will be final or + non-final, and whether an onResourcesLoaded() method will be generated + to adjust the non-final ones, when the corresponding library is loaded + at runtime. + + Note that by default, all resources are final, and there is no + method generated, which corresponds to calling ExportNoResources(). + """ + def __init__(self): + self.has_constant_ids = True + self.resources_whitelist = None + self.has_on_resources_loaded = False + self.export_const_styleable = False + + def ExportNoResources(self): + """Make all resource IDs final, and don't generate a method.""" + self.has_constant_ids = True + self.resources_whitelist = None + self.has_on_resources_loaded = False + self.export_const_styleable = False + + def ExportAllResources(self): + """Make all resource IDs non-final in the R.java file.""" + self.has_constant_ids = False + self.resources_whitelist = None + + def ExportSomeResources(self, r_txt_file_path): + """Only select specific resource IDs to be non-final. + + Args: + r_txt_file_path: The path to an R.txt file. All resources named + int it will be non-final in the generated R.java file, all others + will be final. + """ + self.has_constant_ids = True + self.resources_whitelist = _GetRTxtResourceNames(r_txt_file_path) + + def ExportAllStyleables(self): + """Make all styleable constants non-final, even non-resources ones. + + Resources that are styleable but not of int[] type are not actually + resource IDs but constants. By default they are always final. Call this + method to make them non-final anyway in the final R.java file. + """ + self.export_const_styleable = True + + def GenerateOnResourcesLoaded(self): + """Generate an onResourcesLoaded() method. + + This Java method will be called at runtime by the framework when + the corresponding library (which includes the R.java source file) + will be loaded at runtime. This corresponds to the --shared-resources + or --app-as-shared-lib flags of 'aapt package'. + """ + self.has_on_resources_loaded = True + + def _IsResourceFinal(self, entry): + """Determines whether a resource should be final or not. + + Args: + entry: A _TextSymbolEntry instance. + Returns: + True iff the corresponding entry should be final. + """ + if entry.resource_type == 'styleable' and entry.java_type != 'int[]': + # A styleable constant may be exported as non-final after all. + return not self.export_const_styleable + elif not self.has_constant_ids: + # Every resource is non-final + return False + elif not self.resources_whitelist: + # No whitelist means all IDs are non-final. + return True + else: + # Otherwise, only those in the + return entry.name not in self.resources_whitelist + + +def CreateRJavaFiles(srcjar_dir, package, main_r_txt_file, extra_res_packages, + extra_r_txt_files, rjava_build_options): + """Create all R.java files for a set of packages and R.txt files. + + Args: + srcjar_dir: The top-level output directory for the generated files. + package: Top-level package name. + main_r_txt_file: The main R.txt file containing the valid values + of _all_ resource IDs. + extra_res_packages: A list of extra package names. + extra_r_txt_files: A list of extra R.txt files. One per item in + |extra_res_packages|. Note that all resource IDs in them will be ignored, + |and replaced by the values extracted from |main_r_txt_file|. + rjava_build_options: An RJavaBuildOptions instance that controls how + exactly the R.java file is generated. + Raises: + Exception if a package name appears several times in |extra_res_packages| + """ + assert len(extra_res_packages) == len(extra_r_txt_files), \ + 'Need one R.txt file per package' + + packages = list(extra_res_packages) + r_txt_files = list(extra_r_txt_files) + + if package and package not in packages: + # Sometimes, an apk target and a resources target share the same + # AndroidManifest.xml and thus |package| will already be in |packages|. + packages.append(package) + r_txt_files.append(main_r_txt_file) + + # Map of (resource_type, name) -> Entry. + # Contains the correct values for resources. + all_resources = {} + for entry in _ParseTextSymbolsFile(main_r_txt_file, fix_package_ids=True): + all_resources[(entry.resource_type, entry.name)] = entry + + # Map of package_name->resource_type->entry + resources_by_package = ( + collections.defaultdict(lambda: collections.defaultdict(list))) + # Build the R.java files using each package's R.txt file, but replacing + # each entry's placeholder value with correct values from all_resources. + for package, r_txt_file in zip(packages, r_txt_files): + if package in resources_by_package: + raise Exception(('Package name "%s" appeared twice. All ' + 'android_resources() targets must use unique package ' + 'names, or no package name at all.') % package) + resources_by_type = resources_by_package[package] + # The sub-R.txt files have the wrong values at this point. Read them to + # figure out which entries belong to them, but use the values from the + # main R.txt file. + for entry in _ParseTextSymbolsFile(r_txt_file): + entry = all_resources.get((entry.resource_type, entry.name)) + # For most cases missing entry here is an error. It means that some + # library claims to have or depend on a resource that isn't included into + # the APK. There is one notable exception: Google Play Services (GMS). + # GMS is shipped as a bunch of AARs. One of them - basement - contains + # R.txt with ids of all resources, but most of the resources are in the + # other AARs. However, all other AARs reference their resources via + # basement's R.java so the latter must contain all ids that are in its + # R.txt. Most targets depend on only a subset of GMS AARs so some + # resources are missing, which is okay because the code that references + # them is missing too. We can't get an id for a resource that isn't here + # so the only solution is to skip the resource entry entirely. + # + # We can verify that all entries referenced in the code were generated + # correctly by running Proguard on the APK: it will report missing + # fields. + if entry: + resources_by_type[entry.resource_type].append(entry) + + for package, resources_by_type in resources_by_package.iteritems(): + _CreateRJavaSourceFile(srcjar_dir, package, resources_by_type, + rjava_build_options) + + +def _CreateRJavaSourceFile(srcjar_dir, package, resources_by_type, + rjava_build_options): + """Generates an R.java source file.""" + package_r_java_dir = os.path.join(srcjar_dir, *package.split('.')) + build_utils.MakeDirectory(package_r_java_dir) + package_r_java_path = os.path.join(package_r_java_dir, 'R.java') + java_file_contents = _RenderRJavaSource(package, resources_by_type, + rjava_build_options) + with open(package_r_java_path, 'w') as f: + f.write(java_file_contents) + + +# Resource IDs inside resource arrays are sorted. Application resource IDs start +# with 0x7f but system resource IDs start with 0x01 thus system resource ids are +# always at the start of the array. This function finds the index of the first +# non system resource id to be used for package ID rewriting (we should not +# rewrite system resource ids). +def _GetNonSystemIndex(entry): + """Get the index of the first application resource ID within a resource + array.""" + res_ids = re.findall(r'0x[0-9a-f]{8}', entry.value) + for i, res_id in enumerate(res_ids): + if res_id.startswith('0x7f'): + return i + return len(res_ids) + + +def _RenderRJavaSource(package, resources_by_type, rjava_build_options): + """Render an R.java source file. See _CreateRJaveSourceFile for args info.""" + final_resources_by_type = collections.defaultdict(list) + non_final_resources_by_type = collections.defaultdict(list) + for res_type, resources in resources_by_type.iteritems(): + for entry in resources: + # Entries in stylable that are not int[] are not actually resource ids + # but constants. + if rjava_build_options._IsResourceFinal(entry): + final_resources_by_type[res_type].append(entry) + else: + non_final_resources_by_type[res_type].append(entry) + + # Keep these assignments all on one line to make diffing against regular + # aapt-generated files easier. + create_id = ('{{ e.resource_type }}.{{ e.name }} ^= packageIdTransform;') + create_id_arr = ('{{ e.resource_type }}.{{ e.name }}[i] ^=' + ' packageIdTransform;') + for_loop_condition = ('int i = {{ startIndex(e) }}; i < ' + '{{ e.resource_type }}.{{ e.name }}.length; ++i') + + # Here we diverge from what aapt does. Because we have so many + # resources, the onResourcesLoaded method was exceeding the 64KB limit that + # Java imposes. For this reason we split onResourcesLoaded into different + # methods for each resource type. + template = Template("""/* AUTO-GENERATED FILE. DO NOT MODIFY. */ + +package {{ package }}; + +public final class R { + private static boolean sResourcesDidLoad; + {% for resource_type in resource_types %} + public static final class {{ resource_type }} { + {% for e in final_resources[resource_type] %} + public static final {{ e.java_type }} {{ e.name }} = {{ e.value }}; + {% endfor %} + {% for e in non_final_resources[resource_type] %} + {% if e.value != '0' %} + public static {{ e.java_type }} {{ e.name }} = {{ e.value }}; + {% else %} + public static {{ e.java_type }} {{ e.name }}; + {% endif %} + {% endfor %} + } + {% endfor %} + {% if has_on_resources_loaded %} + public static void onResourcesLoaded(int packageId) { + assert !sResourcesDidLoad; + sResourcesDidLoad = true; + int packageIdTransform = (packageId ^ 0x7f) << 24; + {% for resource_type in resource_types %} + onResourcesLoaded{{ resource_type|title }}(packageIdTransform); + {% for e in non_final_resources[resource_type] %} + {% if e.java_type == 'int[]' %} + for(""" + for_loop_condition + """) { + """ + create_id_arr + """ + } + {% endif %} + {% endfor %} + {% endfor %} + } + {% for res_type in resource_types %} + private static void onResourcesLoaded{{ res_type|title }} ( + int packageIdTransform) { + {% for e in non_final_resources[res_type] %} + {% if res_type != 'styleable' and e.java_type != 'int[]' %} + """ + create_id + """ + {% endif %} + {% endfor %} + } + {% endfor %} + {% endif %} +} +""", trim_blocks=True, lstrip_blocks=True) + + return template.render( + package=package, + resource_types=sorted(resources_by_type), + has_on_resources_loaded=rjava_build_options.has_on_resources_loaded, + final_resources=final_resources_by_type, + non_final_resources=non_final_resources_by_type, + startIndex=_GetNonSystemIndex) + + +def ExtractPackageFromManifest(manifest_path): + """Extract package name from Android manifest file.""" + return ParseAndroidManifest(manifest_path)[1].get('package') + + +def ExtractBinaryManifestValues(aapt2_path, apk_path): + """Returns (version_code, version_name, package_name) for the given apk.""" + output = subprocess.check_output([ + aapt2_path, 'dump', 'xmltree', apk_path, '--file', 'AndroidManifest.xml' + ]) + version_code = re.search(r'versionCode.*?=(\d*)', output).group(1) + version_name = re.search(r'versionName.*?="(.*?)"', output).group(1) + package_name = re.search(r'package.*?="(.*?)"', output).group(1) + return version_code, version_name, package_name + + +def ExtractArscPackage(aapt2_path, apk_path): + """Returns (package_name, package_id) of resources.arsc from apk_path.""" + proc = subprocess.Popen([aapt2_path, 'dump', 'resources', apk_path], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + for line in proc.stdout: + # Package name=org.chromium.webview_shell id=7f + if line.startswith('Package'): + proc.kill() + parts = line.split() + package_name = parts[1].split('=')[1] + package_id = parts[2][3:] + return package_name, int(package_id, 16) + + # aapt2 currently crashes when dumping webview resources, but not until after + # it prints the "Package" line (b/130553900). + sys.stderr.write(proc.stderr.read()) + raise Exception('Failed to find arsc package name') + + +def ExtractDeps(dep_zips, deps_dir): + """Extract a list of resource dependency zip files. + + Args: + dep_zips: A list of zip file paths, each one will be extracted to + a subdirectory of |deps_dir|, named after the zip file's path (e.g. + '/some/path/foo.zip' -> '{deps_dir}/some_path_foo/'). + deps_dir: Top-level extraction directory. + Returns: + The list of all sub-directory paths, relative to |deps_dir|. + Raises: + Exception: If a sub-directory already exists with the same name before + extraction. + """ + dep_subdirs = [] + for z in dep_zips: + subdirname = z.replace(os.path.sep, '_') + subdir = os.path.join(deps_dir, subdirname) + if os.path.exists(subdir): + raise Exception('Resource zip name conflict: ' + subdirname) + build_utils.ExtractAll(z, path=subdir) + dep_subdirs.append(subdir) + return dep_subdirs + + +class _ResourceBuildContext(object): + """A temporary directory for packaging and compiling Android resources. + + Args: + temp_dir: Optional root build directory path. If None, a temporary + directory will be created, and removed in Close(). + """ + def __init__(self, temp_dir=None): + """Initialized the context.""" + # The top-level temporary directory. + if temp_dir: + self.temp_dir = temp_dir + self.remove_on_exit = False + else: + self.temp_dir = tempfile.mkdtemp() + self.remove_on_exit = True + + # A location to store resources extracted form dependency zip files. + self.deps_dir = os.path.join(self.temp_dir, 'deps') + os.mkdir(self.deps_dir) + # A location to place aapt-generated files. + self.gen_dir = os.path.join(self.temp_dir, 'gen') + os.mkdir(self.gen_dir) + # Location of the generated R.txt file. + self.r_txt_path = os.path.join(self.gen_dir, 'R.txt') + # A location to place generated R.java files. + self.srcjar_dir = os.path.join(self.temp_dir, 'java') + os.mkdir(self.srcjar_dir) + + def Close(self): + """Close the context and destroy all temporary files.""" + if self.remove_on_exit: + shutil.rmtree(self.temp_dir) + + +@contextlib.contextmanager +def BuildContext(temp_dir=None): + """Generator for a _ResourceBuildContext instance.""" + try: + context = _ResourceBuildContext(temp_dir) + yield context + finally: + context.Close() + + +def ResourceArgsParser(): + """Create an argparse.ArgumentParser instance with common argument groups. + + Returns: + A tuple of (parser, in_group, out_group) corresponding to the parser + instance, and the input and output argument groups for it, respectively. + """ + parser = argparse.ArgumentParser(description=__doc__) + + input_opts = parser.add_argument_group('Input options') + output_opts = parser.add_argument_group('Output options') + + build_utils.AddDepfileOption(output_opts) + + input_opts.add_argument('--include-resources', required=True, action="append", + help='Paths to arsc resource files used to link ' + 'against. Can be specified multiple times.') + + input_opts.add_argument('--dependencies-res-zips', required=True, + help='Resources zip archives from dependents. Required to ' + 'resolve @type/foo references into dependent ' + 'libraries.') + + input_opts.add_argument( + '--r-text-in', + help='Path to pre-existing R.txt. Its resource IDs override those found ' + 'in the aapt-generated R.txt when generating R.java.') + + input_opts.add_argument( + '--extra-res-packages', + help='Additional package names to generate R.java files for.') + + input_opts.add_argument( + '--extra-r-text-files', + help='For each additional package, the R.txt file should contain a ' + 'list of resources to be included in the R.java file in the format ' + 'generated by aapt.') + + return (parser, input_opts, output_opts) + + +def HandleCommonOptions(options): + """Handle common command-line options after parsing. + + Args: + options: the result of parse_args() on the parser returned by + ResourceArgsParser(). This function updates a few common fields. + """ + options.include_resources = [build_utils.ParseGnList(r) for r in + options.include_resources] + # Flatten list of include resources list to make it easier to use. + options.include_resources = [r for resources in options.include_resources + for r in resources] + + options.dependencies_res_zips = ( + build_utils.ParseGnList(options.dependencies_res_zips)) + + # Don't use [] as default value since some script explicitly pass "". + if options.extra_res_packages: + options.extra_res_packages = ( + build_utils.ParseGnList(options.extra_res_packages)) + else: + options.extra_res_packages = [] + + if options.extra_r_text_files: + options.extra_r_text_files = ( + build_utils.ParseGnList(options.extra_r_text_files)) + else: + options.extra_r_text_files = [] + + +def ParseAndroidResourceStringsFromXml(xml_data): + """Parse and Android xml resource file and extract strings from it. + + Args: + xml_data: XML file data. + Returns: + A (dict, namespaces) tuple, where |dict| maps string names to their UTF-8 + encoded value, and |namespaces| is a dictionary mapping prefixes to URLs + corresponding to namespaces declared in the <resources> element. + """ + # NOTE: This uses regular expression matching because parsing with something + # like ElementTree makes it tedious to properly parse some of the structured + # text found in string resources, e.g.: + # <string msgid="3300176832234831527" \ + # name="abc_shareactionprovider_share_with_application">\ + # "Condividi tramite <ns1:g id="APPLICATION_NAME">%s</ns1:g>"\ + # </string> + result = {} + + # Find <resources> start tag and extract namespaces from it. + m = re.search('<resources([^>]*)>', xml_data, re.MULTILINE) + if not m: + raise Exception('<resources> start tag expected: ' + xml_data) + input_data = xml_data[m.end():] + resource_attrs = m.group(1) + re_namespace = re.compile('\s*(xmlns:(\w+)="([^"]+)")') + namespaces = {} + while resource_attrs: + m = re_namespace.match(resource_attrs) + if not m: + break + namespaces[m.group(2)] = m.group(3) + resource_attrs = resource_attrs[m.end(1):] + + # Find each string element now. + re_string_element_start = re.compile('<string ([^>]* )?name="([^">]+)"[^>]*>') + re_string_element_end = re.compile('</string>') + while input_data: + m = re_string_element_start.search(input_data) + if not m: + break + name = m.group(2) + input_data = input_data[m.end():] + m2 = re_string_element_end.search(input_data) + if not m2: + raise Exception('Expected closing string tag: ' + input_data) + text = input_data[:m2.start()] + input_data = input_data[m2.end():] + if len(text) and text[0] == '"' and text[-1] == '"': + text = text[1:-1] + result[name] = text + + return result, namespaces + + +def GenerateAndroidResourceStringsXml(names_to_utf8_text, namespaces=None): + """Generate an XML text corresponding to an Android resource strings map. + + Args: + names_to_text: A dictionary mapping resource names to localized + text (encoded as UTF-8). + namespaces: A map of namespace prefix to URL. + Returns: + New non-Unicode string containing an XML data structure describing the + input as an Android resource .xml file. + """ + result = '<?xml version="1.0" encoding="utf-8"?>\n' + result += '<resources' + if namespaces: + for prefix, url in sorted(namespaces.iteritems()): + result += ' xmlns:%s="%s"' % (prefix, url) + result += '>\n' + if not names_to_utf8_text: + result += '<!-- this file intentionally empty -->\n' + else: + for name, utf8_text in sorted(names_to_utf8_text.iteritems()): + result += '<string name="%s">"%s"</string>\n' % (name, utf8_text) + result += '</resources>\n' + return result + + +def FilterAndroidResourceStringsXml(xml_file_path, string_predicate): + """Remove unwanted localized strings from an Android resource .xml file. + + This function takes a |string_predicate| callable object that will + receive a resource string name, and should return True iff the + corresponding <string> element should be kept in the file. + + Args: + xml_file_path: Android resource strings xml file path. + string_predicate: A predicate function which will receive the string name + and shal + """ + with open(xml_file_path) as f: + xml_data = f.read() + strings_map, namespaces = ParseAndroidResourceStringsFromXml(xml_data) + + string_deletion = False + for name in strings_map.keys(): + if not string_predicate(name): + del strings_map[name] + string_deletion = True + + if string_deletion: + new_xml_data = GenerateAndroidResourceStringsXml(strings_map, namespaces) + with open(xml_file_path, 'wb') as f: + f.write(new_xml_data) + + +def _RegisterElementTreeNamespaces(): + global _xml_namespace_initialized + if not _xml_namespace_initialized: + _xml_namespace_initialized = True + ElementTree.register_namespace('android', ANDROID_NAMESPACE) + ElementTree.register_namespace('tools', TOOLS_NAMESPACE) + + +def ParseAndroidManifest(path): + """Parses an AndroidManifest.xml using ElementTree. + + Registers required namespaces & creates application node if missing. + + Returns tuple of: + doc: Root xml document. + manifest_node: the <manifest> node. + app_node: the <application> node. + """ + _RegisterElementTreeNamespaces() + doc = ElementTree.parse(path) + # ElementTree.find does not work if the required tag is the root. + if doc.getroot().tag == 'manifest': + manifest_node = doc.getroot() + else: + manifest_node = doc.find('manifest') + + app_node = doc.find('application') + if app_node is None: + app_node = ElementTree.SubElement(manifest_node, 'application') + + return doc, manifest_node, app_node diff --git a/deps/v8/build/android/gyp/util/resource_utils_test.py b/deps/v8/build/android/gyp/util/resource_utils_test.py new file mode 100755 index 0000000000..dc1094aca0 --- /dev/null +++ b/deps/v8/build/android/gyp/util/resource_utils_test.py @@ -0,0 +1,268 @@ +#!/usr/bin/env python +# coding: utf-8 +# Copyright 2018 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import collections +import os +import sys +import unittest + +sys.path.insert( + 0, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))) +from util import build_utils + +# Required because the following import needs build/android/gyp in the +# Python path to import util.build_utils. +_BUILD_ANDROID_GYP_ROOT = os.path.abspath( + os.path.join(os.path.dirname(__file__), os.pardir)) +sys.path.insert(1, _BUILD_ANDROID_GYP_ROOT) + +import resource_utils # pylint: disable=relative-import + +# pylint: disable=line-too-long + +_TEST_XML_INPUT_1 = '''<?xml version="1.0" encoding="utf-8"?> +<resources xmlns:android="http://schemas.android.com/apk/res/android"> +<string name="copy_to_clipboard_failure_message">"Lõikelauale kopeerimine ebaõnnestus"</string> +<string name="low_memory_error">"Eelmist toimingut ei saa vähese mälu tõttu lõpetada"</string> +<string name="opening_file_error">"Valit. faili avamine ebaõnnestus"</string> +<string name="structured_text">"This is <android:g id="STRUCTURED_TEXT">%s</android:g>"</string> +</resources> +''' + +_TEST_XML_OUTPUT_2 = '''<?xml version="1.0" encoding="utf-8"?> +<resources xmlns:android="http://schemas.android.com/apk/res/android"> +<string name="low_memory_error">"Eelmist toimingut ei saa vähese mälu tõttu lõpetada"</string> +<string name="structured_text">"This is <android:g id="STRUCTURED_TEXT">%s</android:g>"</string> +</resources> +''' + +# pylint: enable=line-too-long + +_TEST_XML_OUTPUT_EMPTY = '''<?xml version="1.0" encoding="utf-8"?> +<resources> +<!-- this file intentionally empty --> +</resources> +''' + +_TEST_RESOURCES_MAP_1 = { + 'low_memory_error': 'Eelmist toimingut ei saa vähese mälu tõttu lõpetada', + 'opening_file_error': 'Valit. faili avamine ebaõnnestus', + 'copy_to_clipboard_failure_message': 'Lõikelauale kopeerimine ebaõnnestus', + 'structured_text': 'This is <android:g id="STRUCTURED_TEXT">%s</android:g>', +} + +_TEST_NAMESPACES_1 = {'android': 'http://schemas.android.com/apk/res/android'} + +_TEST_RESOURCES_WHITELIST_1 = ['low_memory_error', 'structured_text'] + +# Extracted from one generated Chromium R.txt file, with string resource +# names shuffled randomly. +_TEST_R_TXT = r'''int anim abc_fade_in 0x7f050000 +int anim abc_fade_out 0x7f050001 +int anim abc_grow_fade_in_from_bottom 0x7f050002 +int array DefaultCookiesSettingEntries 0x7f120002 +int array DefaultCookiesSettingValues 0x7f120003 +int array DefaultGeolocationSettingEntries 0x7f120004 +int attr actionBarDivider 0x7f0100e7 +int attr actionBarStyle 0x7f0100e2 +int string AllowedDomainsForAppsDesc 0x7f0c0105 +int string AlternateErrorPagesEnabledDesc 0x7f0c0107 +int string AuthAndroidNegotiateAccountTypeDesc 0x7f0c0109 +int string AllowedDomainsForAppsTitle 0x7f0c0104 +int string AlternateErrorPagesEnabledTitle 0x7f0c0106 +int[] styleable SnackbarLayout { 0x0101011f, 0x7f010076, 0x7f0100ba } +int styleable SnackbarLayout_android_maxWidth 0 +int styleable SnackbarLayout_elevation 2 +''' + +# Test whitelist R.txt file. Note that AlternateErrorPagesEnabledTitle is +# listed as an 'anim' and should thus be skipped. Similarly the string +# 'ThisStringDoesNotAppear' should not be in the final result. +_TEST_WHITELIST_R_TXT = r'''int anim AlternateErrorPagesEnabledTitle 0x7f0eeeee +int string AllowedDomainsForAppsDesc 0x7f0c0105 +int string AlternateErrorPagesEnabledDesc 0x7f0c0107 +int string ThisStringDoesNotAppear 0x7f0fffff +''' + +_TEST_R_TEXT_RESOURCES_IDS = { + 0x7f0c0105: 'AllowedDomainsForAppsDesc', + 0x7f0c0107: 'AlternateErrorPagesEnabledDesc', +} + +# Names of string resources in _TEST_R_TXT, should be sorted! +_TEST_R_TXT_STRING_RESOURCE_NAMES = sorted([ + 'AllowedDomainsForAppsDesc', + 'AllowedDomainsForAppsTitle', + 'AlternateErrorPagesEnabledDesc', + 'AlternateErrorPagesEnabledTitle', + 'AuthAndroidNegotiateAccountTypeDesc', +]) + + +def _CreateTestFile(tmp_dir, file_name, file_data): + file_path = os.path.join(tmp_dir, file_name) + with open(file_path, 'wt') as f: + f.write(file_data) + return file_path + + + +class ResourceUtilsTest(unittest.TestCase): + + def test_GetRTxtStringResourceNames(self): + with build_utils.TempDir() as tmp_dir: + tmp_file = _CreateTestFile(tmp_dir, "test_R.txt", _TEST_R_TXT) + self.assertListEqual( + resource_utils.GetRTxtStringResourceNames(tmp_file), + _TEST_R_TXT_STRING_RESOURCE_NAMES) + + def test_GenerateStringResourcesWhitelist(self): + with build_utils.TempDir() as tmp_dir: + tmp_module_rtxt_file = _CreateTestFile(tmp_dir, "test_R.txt", _TEST_R_TXT) + tmp_whitelist_rtxt_file = _CreateTestFile(tmp_dir, "test_whitelist_R.txt", + _TEST_WHITELIST_R_TXT) + self.assertDictEqual( + resource_utils.GenerateStringResourcesWhitelist( + tmp_module_rtxt_file, tmp_whitelist_rtxt_file), + _TEST_R_TEXT_RESOURCES_IDS) + + def test_IsAndroidLocaleQualifier(self): + good_locales = [ + 'en', + 'en-rUS', + 'fil', + 'fil-rPH', + 'iw', + 'iw-rIL', + 'b+en', + 'b+en+US', + 'b+ja+Latn', + 'b+ja+JP+Latn', + 'b+cmn+Hant-TW', + ] + bad_locales = [ + 'e', 'english', 'en-US', 'en_US', 'en-rus', 'b+e', 'b+english', 'b+ja+' + ] + for locale in good_locales: + self.assertTrue( + resource_utils.IsAndroidLocaleQualifier(locale), + msg="'%s' should be a good locale!" % locale) + + for locale in bad_locales: + self.assertFalse( + resource_utils.IsAndroidLocaleQualifier(locale), + msg="'%s' should be a bad locale!" % locale) + + def test_ToAndroidLocaleName(self): + _TEST_CHROMIUM_TO_ANDROID_LOCALE_MAP = { + 'en': 'en', + 'en-US': 'en-rUS', + 'en-FOO': 'en-rFOO', + 'fil': 'tl', + 'tl': 'tl', + 'he': 'iw', + 'he-IL': 'iw-rIL', + 'id': 'in', + 'id-BAR': 'in-rBAR', + 'nb': 'nb', + 'yi': 'ji' + } + for chromium_locale, android_locale in \ + _TEST_CHROMIUM_TO_ANDROID_LOCALE_MAP.iteritems(): + result = resource_utils.ToAndroidLocaleName(chromium_locale) + self.assertEqual(result, android_locale) + + def test_ToChromiumLocaleName(self): + _TEST_ANDROID_TO_CHROMIUM_LOCALE_MAP = { + 'foo': 'foo', + 'foo-rBAR': 'foo-BAR', + 'b+foo': 'foo', + 'b+foo+BAR': 'foo-BAR', + 'b+foo+BAR+Whatever': 'foo-BAR', + 'b+foo+Whatever+BAR': 'foo-BAR', + 'b+foo+Whatever': 'foo', + 'en': 'en', + 'en-rUS': 'en-US', + 'en-US': None, + 'en-FOO': None, + 'en-rFOO': 'en-FOO', + 'es-rES': 'es-ES', + 'es-rUS': 'es-419', + 'tl': 'fil', + 'fil': 'fil', + 'iw': 'he', + 'iw-rIL': 'he-IL', + 'in': 'id', + 'in-rBAR': 'id-BAR', + 'id-rBAR': 'id-BAR', + 'nb': 'nb', + 'no': 'nb', # http://crbug.com/920960 + } + for android_locale, chromium_locale in \ + _TEST_ANDROID_TO_CHROMIUM_LOCALE_MAP.iteritems(): + result = resource_utils.ToChromiumLocaleName(android_locale) + self.assertEqual(result, chromium_locale) + + def test_FindLocaleInStringResourceFilePath(self): + self.assertEqual( + None, + resource_utils.FindLocaleInStringResourceFilePath( + 'res/values/whatever.xml')) + self.assertEqual( + 'foo', + resource_utils.FindLocaleInStringResourceFilePath( + 'res/values-foo/whatever.xml')) + self.assertEqual( + 'foo-rBAR', + resource_utils.FindLocaleInStringResourceFilePath( + 'res/values-foo-rBAR/whatever.xml')) + self.assertEqual( + None, + resource_utils.FindLocaleInStringResourceFilePath( + 'res/values-foo/ignore-subdirs/whatever.xml')) + + def test_ParseAndroidResourceStringsFromXml(self): + ret, namespaces = resource_utils.ParseAndroidResourceStringsFromXml( + _TEST_XML_INPUT_1) + self.assertDictEqual(ret, _TEST_RESOURCES_MAP_1) + self.assertDictEqual(namespaces, _TEST_NAMESPACES_1) + + def test_GenerateAndroidResourceStringsXml(self): + # Fist, an empty strings map, with no namespaces + result = resource_utils.GenerateAndroidResourceStringsXml({}) + self.assertEqual(result, _TEST_XML_OUTPUT_EMPTY) + + result = resource_utils.GenerateAndroidResourceStringsXml( + _TEST_RESOURCES_MAP_1, _TEST_NAMESPACES_1) + self.assertEqual(result, _TEST_XML_INPUT_1) + + @staticmethod + def _CreateTestResourceFile(output_dir, locale, string_map, namespaces): + values_dir = os.path.join(output_dir, 'values-' + locale) + build_utils.MakeDirectory(values_dir) + file_path = os.path.join(values_dir, 'strings.xml') + with open(file_path, 'w') as f: + file_data = resource_utils.GenerateAndroidResourceStringsXml( + string_map, namespaces) + f.write(file_data) + return file_path + + def _CheckTestResourceFile(self, file_path, expected_data): + with open(file_path) as f: + file_data = f.read() + self.assertEqual(file_data, expected_data) + + def test_FilterAndroidResourceStringsXml(self): + with build_utils.TempDir() as tmp_path: + test_file = self._CreateTestResourceFile( + tmp_path, 'foo', _TEST_RESOURCES_MAP_1, _TEST_NAMESPACES_1) + resource_utils.FilterAndroidResourceStringsXml( + test_file, lambda x: x in _TEST_RESOURCES_WHITELIST_1) + self._CheckTestResourceFile(test_file, _TEST_XML_OUTPUT_2) + + +if __name__ == '__main__': + unittest.main() diff --git a/deps/v8/build/android/gyp/write_build_config.py b/deps/v8/build/android/gyp/write_build_config.py new file mode 100755 index 0000000000..68dfac4bb8 --- /dev/null +++ b/deps/v8/build/android/gyp/write_build_config.py @@ -0,0 +1,1643 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Writes a build_config file. + +The build_config file for a target is a json file containing information about +how to build that target based on the target's dependencies. This includes +things like: the javac classpath, the list of android resources dependencies, +etc. It also includes the information needed to create the build_config for +other targets that depend on that one. + +Android build scripts should not refer to the build_config directly, and the +build specification should instead pass information in using the special +file-arg syntax (see build_utils.py:ExpandFileArgs). That syntax allows passing +of values in a json dict in a file and looks like this: + --python-arg=@FileArg(build_config_path:javac:classpath) + +Note: If paths to input files are passed in this way, it is important that: + 1. inputs/deps of the action ensure that the files are available the first + time the action runs. + 2. Either (a) or (b) + a. inputs/deps ensure that the action runs whenever one of the files changes + b. the files are added to the action's depfile + +NOTE: All paths within .build_config files are relative to $OUTPUT_CHROMIUM_DIR. + +This is a technical note describing the format of .build_config files. +Please keep it updated when changing this script. For extraction and +visualization instructions, see build/android/docs/build_config.md + +------------- BEGIN_MARKDOWN --------------------------------------------------- +The .build_config file format +=== + +# Introduction + +This document tries to explain the format of `.build_config` generated during +the Android build of Chromium. For a higher-level explanation of these files, +please read +[build/android/docs/build_config.md](build/android/docs/build_config.md). + +# The `deps_info` top-level dictionary: + +All `.build_config` files have a required `'deps_info'` key, whose value is a +dictionary describing the target and its dependencies. The latter has the +following required keys: + +## Required keys in `deps_info`: + +* `deps_info['type']`: The target type as a string. + + The following types are known by the internal GN build rules and the + build scripts altogether: + + * [java_binary](#target_java_binary) + * [java_annotation_processor](#target_java_annotation_processor) + * [junit_binary](#target_junit_binary) + * [java_library](#target_java_library) + * [android_assets](#target_android_assets) + * [android_resources](#target_android_resources) + * [android_apk](#target_android_apk) + * [android_app_bundle_module](#target_android_app_bundle_module) + * [android_app_bundle](#target_android_app_bundle) + * [dist_jar](#target_dist_jar) + * [dist_aar](#target_dist_aar) + * [resource_rewriter](#target_resource_rewriter) + * [group](#target_group) + + See later sections for more details of some of these. + +* `deps_info['path']`: Path to the target's `.build_config` file. + +* `deps_info['name']`: Nothing more than the basename of `deps_info['path']` +at the moment. + +* `deps_info['deps_configs']`: List of paths to the `.build_config` files of +all *direct* dependencies of the current target. + + NOTE: Because the `.build_config` of a given target is always generated + after the `.build_config` of its dependencies, the `write_build_config.py` + script can use chains of `deps_configs` to compute transitive dependencies + for each target when needed. + +## Optional keys in `deps_info`: + +The following keys will only appear in the `.build_config` files of certain +target types: + +* `deps_info['requires_android']`: True to indicate that the corresponding +code uses Android-specific APIs, and thus cannot run on the host within a +regular JVM. May only appear in Java-related targets. + +* `deps_info['supports_android']`: +May appear in Java-related targets, and indicates that +the corresponding code doesn't use Java APIs that are not available on +Android. As such it may run either on the host or on an Android device. + +* `deps_info['assets']`: +Only seen for the [`android_assets`](#target_android_assets) type. See below. + +* `deps_info['package_name']`: Java package name associated with this target. + + NOTE: For `android_resources` targets, + this is the package name for the corresponding R class. For `android_apk` + targets, this is the corresponding package name. This does *not* appear for + other target types. + +* `deps_info['android_manifest']`: +Path to an AndroidManifest.xml file related to the current target. + +# Top-level `resources` dictionary: + +This dictionary only appears for a few target types that can contain or +relate to Android resources (e.g. `android_resources` or `android_apk`): + +* `resources['dependency_zips']`: +List of `deps_info['resources_zip']` entries for all `android_resources` +dependencies for the current target. + +* `resource['extra_package_names']`: +Always empty for `android_resources` types. Otherwise, +the list of `deps_info['package_name']` entries for all `android_resources` +dependencies for the current target. Computed automatically by +`write_build_config.py`. + +* `resources['extra_r_text_files']`: +Always empty for `android_resources` types. Otherwise, the list of +`deps_info['r_text']` entries for all `android_resources` dependencies for +the current target. Computed automatically. + + +# `.build_config` target types description: + +## <a name="target_group">Target type `group`</a>: + +This type corresponds to a simple target that is only used to group +dependencies. It matches the `java_group()` GN template. Its only top-level +`deps_info` keys are `supports_android` (always True), and `deps_configs`. + + +## <a name="target_android_resources">Target type `android_resources`</a>: + +This type corresponds to targets that are used to group Android resource files. +For example, all `android_resources` dependencies of an `android_apk` will +end up packaged into the final APK by the build system. + +It uses the following keys: + +* `deps_info['resource_dirs']`: +List of paths to the source directories containing the resources for this +target. This key is optional, because some targets can refer to prebuilt +`.aar` archives. + + +* `deps_info['resources_zip']`: +*Required*. Path to the `.resources.zip` file that contains all raw/uncompiled +resource files for this target (and also no `R.txt`, `R.java` or `R.class`). + + If `deps_info['resource_dirs']` is missing, this must point to a prebuilt + `.aar` archive containing resources. Otherwise, this will point to a + zip archive generated at build time, wrapping the content of + `deps_info['resource_dirs']` into a single zip file. + +* `deps_info['package_name']`: +Java package name that the R class for this target belongs to. + +* `deps_info['android_manifest']`: +Optional. Path to the top-level Android manifest file associated with these +resources (if not provided, an empty manifest will be used to generate R.txt). + +* `deps_info['r_text']`: +Provide the path to the `R.txt` file that describes the resources wrapped by +this target. Normally this file is generated from the content of the resource +directories or zip file, but some targets can provide their own `R.txt` file +if they want. + +* `deps_info['srcjar_path']`: +Path to the `.srcjar` file that contains the auto-generated `R.java` source +file corresponding to the content of `deps_info['r_text']`. This is *always* +generated from the content of `deps_info['r_text']` by the +`build/android/gyp/process_resources.py` script. + + +## <a name="target_android_assets">Target type `android_assets`</a>: + +This type corresponds to targets used to group Android assets, i.e. liberal +files that will be placed under `//assets/` within the final APK. + +These use an `deps_info['assets']` key to hold a dictionary of values related +to assets covered by this target. + +* `assets['sources']`: +The list of all asset source paths for this target. Each source path can +use an optional `:<zipPath>` suffix, where `<zipPath>` is the final location +of the assets (relative to `//assets/`) within the APK. + +* `assets['outputs']`: +Optional. Some of the sources might be renamed before being stored in the +final //assets/ sub-directory. When this happens, this contains a list of +all renamed output file paths + + NOTE: When not empty, the first items of `assets['sources']` must match + every item in this list. Extra sources correspond to non-renamed sources. + + NOTE: This comes from the `asset_renaming_destinations` parameter for the + `android_assets()` GN template. + +* `assets['disable_compression']`: +Optional. Will be True to indicate that these assets should be stored +uncompressed in the final APK. For example, this is necessary for locale +.pak files used by the System WebView feature. + +* `assets['treat_as_locale_paks']`: +Optional. Will be True to indicate that these assets are locale `.pak` files +(containing localized strings for C++). These are later processed to generate +a special ``.build_config`.java` source file, listing all supported Locales in +the current build. + + +## <a name="target_java_library">Target type `java_library`</a>: + +This type is used to describe target that wrap Java bytecode, either created +by compiling sources, or providing them with a prebuilt jar. + +* `deps_info['unprocessed_jar_path']`: +Path to the original .jar file for this target, before any kind of processing +through Proguard or other tools. For most targets this is generated +from sources, with a name like `$target_name.javac.jar`. However, when using +a prebuilt jar, this will point to the source archive directly. + +* `deps_info['jar_path']`: +Path to a file that is the result of processing +`deps_info['unprocessed_jar_path']` with various tools. + +* `deps_info['interface_jar_path']: +Path to the interface jar generated for this library. This corresponds to +a jar file that only contains declarations. Generated by running the `ijar` +tool on `deps_info['jar_path']` + +* `deps_info['dex_path']`: +Path to the `.dex` file generated for this target, from `deps_info['jar_path']` +unless this comes from a prebuilt `.aar` archive. + +* `deps_info['is_prebuilt']`: +True to indicate that this target corresponds to a prebuilt `.jar` file. +In this case, `deps_info['unprocessed_jar_path']` will point to the source +`.jar` file. Otherwise, it will be point to a build-generated file. + +* `deps_info['java_sources_file']`: +Path to a single `.sources` file listing all the Java sources that were used +to generate the library (simple text format, one `.jar` path per line). + +* `deps_info['owned_resource_dirs']`: +List of all resource directories belonging to all resource dependencies for +this target. + +* `deps_info['owned_resource_zips']`: +List of all resource zip files belonging to all resource dependencies for this +target. + +* `deps_info['owned_resource_srcjars']`: +List of all .srcjar files belonging to all resource dependencies for this +target. + +* `deps_info['javac']`: +A dictionary containing information about the way the sources in this library +are compiled. Appears also on other Java-related targets. See the [dedicated +section about this](#dict_javac) below for details. + +* `deps_info['javac_full_classpath']`: +The classpath used when performing bytecode processing. Essentially the +collection of all `deps_info['unprocessed_jar_path']` entries for the target +and all its dependencies. + +* `deps_info['javac_full_interface_classpath']`: +The classpath used when using the errorprone compiler. + +* `deps_info['proguard_enabled"]`: +True to indicate that ProGuard processing is enabled for this target. + +* `deps_info['proguard_configs"]`: +A list of paths to ProGuard configuration files related to this library. + +* `deps_info['extra_classpath_jars']: +For some Java related types, a list of extra `.jar` files to use at build time +but not at runtime. + +* `deps_info['extra_classpath_interface_jars']: +The interface jars corresponding to extra_classpath_jars. + +## <a name="target_java_binary">Target type `java_binary`</a>: + +This type corresponds to a Java binary, which is nothing more than a +`java_library` target that also provides a main class name. It thus inherits +all entries from the `java_library` type, and adds: + +* `deps_info['main_class']`: +Name of the main Java class that serves as an entry point for the binary. + +* `deps_info['java_runtime_classpath']`: +The classpath used when running a Java or Android binary. Essentially the +collection of all `deps_info['jar_path']` entries for the target and all its +dependencies. + + +## <a name="target_junit_binary">Target type `junit_binary`</a>: + +A target type for JUnit-specific binaries. Identical to +[`java_binary`](#target_java_binary) in the context of `.build_config` files, +except the name. + + +## <a name="target_java_annotation_processor">Target type \ +`java_annotation_processor`</a>: + +A target type for Java annotation processors. Identical to +[`java_binary`](#target_java_binary) in the context of `.build_config` files, +except the name, except that it requires a `deps_info['main_class']` entry. + + +## <a name="target_android_apk">Target type `android_apk`</a>: + +Corresponds to an Android APK. Inherits from the +[`java_binary`](#target_java_binary) type and adds: + +* `deps_info['apk_path']`: +Path to the raw, unsigned, APK generated by this target. + +* `deps_info['incremental_apk_path']`: +Path to the raw, unsigned, incremental APK generated by this target. + +* `deps_info['incremental_install_json_path']`: +Path to the JSON file with per-apk details for incremental install. +See `build/android/gyp/incremental/write_installer_json.py` for more +details about its content. + +* `deps_info['dist_jar']['all_interface_jars']`: +For `android_apk` and `dist_jar` targets, a list of all interface jar files +that will be merged into the final `.jar` file for distribution. + +* `deps_info['final_dex']['path']`: +Path to the final classes.dex file (or classes.zip in case of multi-dex) +for this APK. + +* `deps_info['final_dex']['dependency_dex_files']`: +The list of paths to all `deps_info['dex_path']` entries for all library +dependencies for this APK. + +* `native['libraries']` +List of native libraries for the primary ABI to be embedded in this APK. +E.g. [ "libchrome.so" ] (i.e. this doesn't include any ABI sub-directory +prefix). + +* `native['java_libraries_list']` +The same list as `native['libraries']` as a string holding a Java source +fragment, e.g. `"{\"chrome\"}"`, without any `lib` prefix, and `.so` +suffix (as expected by `System.loadLibrary()`). + +* `native['second_abi_libraries']` +List of native libraries for the secondary ABI to be embedded in this APK. +Empty if only a single ABI is supported. + +* `native['uncompress_shared_libraries']` +A boolean indicating whether native libraries are stored uncompressed in the +APK. + +* `native['extra_shared_libraries']` +A list of native libraries to store within the APK, in addition to those from +`native['libraries']`. These correspond to things like the Chromium linker +or instrumentation libraries. + +* `assets` +A list of assets stored compressed in the APK. Each entry has the format +`<source-path>:<destination-path>`, where `<source-path>` is relative to +`$CHROMIUM_OUTPUT_DIR`, and `<destination-path>` is relative to `//assets/` +within the APK. + +NOTE: Not to be confused with the `deps_info['assets']` dictionary that +belongs to `android_assets` targets only. + +* `uncompressed_assets` +A list of uncompressed assets stored in the APK. Each entry has the format +`<source-path>:<destination-path>` too. + +* `compressed_locales_java_list` +A string holding a Java source fragment that gives the list of locales stored +compressed in the `//assets/` directory. E.g. `"{\"am\","\ar\",\"en-US\"}"`. +Note that the files will be stored with the `.pak` extension (e.g. +`//assets/en-US.pak`). + +* `uncompressed_locales_java_list` +A string holding a Java source fragment that gives the list of locales stored +uncompressed in the `//assets/stored-locales/` directory. These are used for +the System WebView feature only. Note that the files will be stored with the +`.pak` extension (e.g. `//assets/stored-locales/en-US.apk`). + +* `extra_android_manifests` +A list of `deps_configs['android_manifest]` entries, for all resource +dependencies for this target. I.e. a list of paths to manifest files for +all the resources in this APK. These will be merged with the root manifest +file to generate the final one used to build the APK. + +* `java_resources_jars` +This is a list of `.jar` files whose *Java* resources should be included in +the final APK. For example, this is used to copy the `.res` files from the +EMMA Coverage tool. The copy will omit any `.class` file and the top-level +`//meta-inf/` directory from the input jars. Everything else will be copied +into the final APK as-is. + +NOTE: This has nothing to do with *Android* resources. + +* `jni['all_source']` +The list of all `deps_info['java_sources_file']` entries for all library +dependencies for this APK. Note: this is a list of files, where each file +contains a list of Java source files. This is used for JNI registration. + +* `deps_info['proguard_all_configs']`: +The collection of all 'deps_info['proguard_configs']` values from this target +and all its dependencies. + +* `deps_info['proguard_classpath_jars']`: +The collection of all 'deps_info['extra_classpath_jars']` values from all +dependencies. + +* `deps_info['proguard_under_test_mapping']`: +Applicable to apks with proguard enabled that have an apk_under_test. This is +the path to the apk_under_test's output proguard .mapping file. + +## <a name="target_android_app_bundle_module">Target type \ +`android_app_bundle_module`</a>: + +Corresponds to an Android app bundle module. Very similar to an APK and +inherits the same fields, except that this does not generate an installable +file (see `android_app_bundle`), and for the following omitted fields: + +* `deps_info['apk_path']`, `deps_info['incremental_apk_path']` and + `deps_info['incremental_install_json_path']` are omitted. + +* top-level `dist_jar` is omitted as well. + +In addition to `android_apk` targets though come these new fields: + +* `deps_info['proto_resources_path']`: +The path of an zip archive containing the APK's resources compiled to the +protocol buffer format (instead of regular binary xml + resources.arsc). + +* `deps_info['module_rtxt_path']`: +The path of the R.txt file generated when compiling the resources for the bundle +module. + +* `deps_info['base_whitelist_rtxt_path']`: +Optional path to an R.txt file used as a whitelist for base string resources. +This means that any string resource listed in this file *and* in +`deps_info['module_rtxt_path']` will end up in the base split APK of any +`android_app_bundle` target that uses this target as its base module. + +This ensures that such localized strings are available to all bundle installs, +even when language based splits are enabled (e.g. required for WebView strings +inside the Monochrome bundle). + + +## <a name="target_android_app_bundle">Target type `android_app_bundle`</a> + +This target type corresponds to an Android app bundle, and is built from one +or more `android_app_bundle_module` targets listed as dependencies. + + +## <a name="target_dist_aar">Target type `dist_aar`</a>: + +This type corresponds to a target used to generate an `.aar` archive for +distribution. The archive's content is determined by the target's dependencies. + +This always has the following entries: + + * `deps_info['supports_android']` (always True). + * `deps_info['requires_android']` (always True). + * `deps_info['proguard_configs']` (optional). + + +## <a name="target_dist_jar">Target type `dist_jar`</a>: + +This type is similar to [`dist_aar`](#target_dist_aar) but is not +Android-specific, and used to create a `.jar` file that can be later +redistributed. + +This always has the following entries: + + * `deps_info['proguard_enabled']` (False by default). + * `deps_info['proguard_configs']` (optional). + * `deps_info['supports_android']` (True by default). + * `deps_info['requires_android']` (False by default). + + + +## <a name="target_resource_rewriter">Target type `resource_rewriter`</a>: + +The ResourceRewriter Java class is in charge of rewriting resource IDs at +runtime, for the benefit of the System WebView feature. This is a special +target type for it. + +Its `.build_config` only keeps a list of dependencies in its +`deps_info['deps_configs']` key. + +## <a name="dict_javac">The `deps_info['javac']` dictionary</a>: + +This dictionary appears in Java-related targets (e.g. `java_library`, +`android_apk` and others), and contains information related to the compilation +of Java sources, class files, and jars. + +* `javac['resource_packages']` +For `java_library` targets, this is the list of package names for all resource +dependencies for the current target. Order must match the one from +`javac['srcjars']`. For other target types, this key does not exist. + +* `javac['classpath']` +The classpath used to compile this target when annotation processors are +present. + +* `javac['interface_classpath']` +The classpath used to compile this target when annotation processors are +not present. These are also always used to known when a target needs to be +rebuilt. + +* `javac['processor_classpath']` +The classpath listing the jars used for annotation processors. I.e. sent as +`-processorpath` when invoking `javac`. + +* `javac['processor_classes']` +The list of annotation processor main classes. I.e. sent as `-processor' when +invoking `javac`. + +## <a name="android_app_bundle">Target type `android_app_bundle`</a>: + +This type corresponds to an Android app bundle (`.aab` file). + +--------------- END_MARKDOWN --------------------------------------------------- +TODO(estevenson): Add docs for static library synchronized proguarding. +""" + +import collections +import itertools +import json +import optparse +import os +import sys +import xml.dom.minidom + +from util import build_utils + +# Types that should never be used as a dependency of another build config. +_ROOT_TYPES = ('android_apk', 'java_binary', 'java_annotation_processor', + 'junit_binary', 'resource_rewriter', 'android_app_bundle') +# Types that should not allow code deps to pass through. +_RESOURCE_TYPES = ('android_assets', 'android_resources', 'system_java_library') + + +def _ExtractMarkdownDocumentation(input_text): + """Extract Markdown documentation from a list of input strings lines. + + This generates a list of strings extracted from |input_text|, by looking + for '-- BEGIN_MARKDOWN --' and '-- END_MARKDOWN --' line markers.""" + in_markdown = False + result = [] + for line in input_text.splitlines(): + if in_markdown: + if '-- END_MARKDOWN --' in line: + in_markdown = False + else: + result.append(line) + else: + if '-- BEGIN_MARKDOWN --' in line: + in_markdown = True + + return result + +class AndroidManifest(object): + def __init__(self, path): + self.path = path + dom = xml.dom.minidom.parse(path) + manifests = dom.getElementsByTagName('manifest') + assert len(manifests) == 1 + self.manifest = manifests[0] + + def GetInstrumentationElements(self): + instrumentation_els = self.manifest.getElementsByTagName('instrumentation') + if len(instrumentation_els) == 0: + return None + return instrumentation_els + + def CheckInstrumentationElements(self, expected_package): + instrs = self.GetInstrumentationElements() + if not instrs: + raise Exception('No <instrumentation> elements found in %s' % self.path) + for instr in instrs: + instrumented_package = instr.getAttributeNS( + 'http://schemas.android.com/apk/res/android', 'targetPackage') + if instrumented_package != expected_package: + raise Exception( + 'Wrong instrumented package. Expected %s, got %s' + % (expected_package, instrumented_package)) + + def GetPackageName(self): + return self.manifest.getAttribute('package') + + +dep_config_cache = {} +def GetDepConfig(path): + if not path in dep_config_cache: + with open(path) as jsonfile: + dep_config_cache[path] = json.load(jsonfile)['deps_info'] + return dep_config_cache[path] + + +def DepsOfType(wanted_type, configs): + return [c for c in configs if c['type'] == wanted_type] + + +def GetAllDepsConfigsInOrder(deps_config_paths): + def GetDeps(path): + return GetDepConfig(path)['deps_configs'] + return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps) + + +class Deps(object): + def __init__(self, direct_deps_config_paths): + self.all_deps_config_paths = GetAllDepsConfigsInOrder( + direct_deps_config_paths) + self.direct_deps_configs = [ + GetDepConfig(p) for p in direct_deps_config_paths] + self.all_deps_configs = [ + GetDepConfig(p) for p in self.all_deps_config_paths] + self.direct_deps_config_paths = direct_deps_config_paths + + def All(self, wanted_type=None): + if type is None: + return self.all_deps_configs + return DepsOfType(wanted_type, self.all_deps_configs) + + def Direct(self, wanted_type=None): + if wanted_type is None: + return self.direct_deps_configs + return DepsOfType(wanted_type, self.direct_deps_configs) + + def AllConfigPaths(self): + return self.all_deps_config_paths + + def RemoveNonDirectDep(self, path): + if path in self.direct_deps_config_paths: + raise Exception('Cannot remove direct dep.') + self.all_deps_config_paths.remove(path) + self.all_deps_configs.remove(GetDepConfig(path)) + + def GradlePrebuiltJarPaths(self): + ret = [] + + def helper(cur): + for config in cur.Direct('java_library'): + if config['is_prebuilt'] or config['gradle_treat_as_prebuilt']: + if config['jar_path'] not in ret: + ret.append(config['jar_path']) + + helper(self) + return ret + + def GradleLibraryProjectDeps(self): + ret = [] + + def helper(cur): + for config in cur.Direct('java_library'): + if config['is_prebuilt']: + pass + elif config['gradle_treat_as_prebuilt']: + helper(Deps(config['deps_configs'])) + elif config not in ret: + ret.append(config) + + helper(self) + return ret + + +def _MergeAssets(all_assets): + """Merges all assets from the given deps. + + Returns: + A tuple of: (compressed, uncompressed, locale_paks) + |compressed| and |uncompressed| are lists of "srcPath:zipPath". srcPath is + the path of the asset to add, and zipPath is the location within the zip + (excluding assets/ prefix). + |locale_paks| is a set of all zipPaths that have been marked as + treat_as_locale_paks=true. + """ + compressed = {} + uncompressed = {} + locale_paks = set() + for asset_dep in all_assets: + entry = asset_dep['assets'] + disable_compression = entry.get('disable_compression') + treat_as_locale_paks = entry.get('treat_as_locale_paks') + dest_map = uncompressed if disable_compression else compressed + other_map = compressed if disable_compression else uncompressed + outputs = entry.get('outputs', []) + for src, dest in itertools.izip_longest(entry['sources'], outputs): + if not dest: + dest = os.path.basename(src) + # Merge so that each path shows up in only one of the lists, and that + # deps of the same target override previous ones. + other_map.pop(dest, 0) + dest_map[dest] = src + if treat_as_locale_paks: + locale_paks.add(dest) + + def create_list(asset_map): + ret = ['%s:%s' % (src, dest) for dest, src in asset_map.iteritems()] + # Sort to ensure deterministic ordering. + ret.sort() + return ret + + return create_list(compressed), create_list(uncompressed), locale_paks + + +def _ResolveGroups(configs): + """Returns a list of configs with all groups inlined.""" + ret = list(configs) + while True: + groups = DepsOfType('group', ret) + if not groups: + return ret + for config in groups: + index = ret.index(config) + expanded_configs = [GetDepConfig(p) for p in config['deps_configs']] + ret[index:index + 1] = expanded_configs + + +def _DepsFromPaths(dep_paths, target_type, filter_root_targets=True): + """Resolves all groups and trims dependency branches that we never want. + + E.g. When a resource or asset depends on an apk target, the intent is to + include the .apk as a resource/asset, not to have the apk's classpath added. + """ + configs = [GetDepConfig(p) for p in dep_paths] + groups = DepsOfType('group', configs) + configs = _ResolveGroups(configs) + configs += groups + # Don't allow root targets to be considered as a dep. + if filter_root_targets: + configs = [c for c in configs if c['type'] not in _ROOT_TYPES] + + # Don't allow java libraries to cross through assets/resources. + if target_type in _RESOURCE_TYPES: + configs = [c for c in configs if c['type'] in _RESOURCE_TYPES] + + return Deps([c['path'] for c in configs]) + + +def _ExtractSharedLibsFromRuntimeDeps(runtime_deps_file): + ret = [] + with open(runtime_deps_file) as f: + for line in f: + line = line.rstrip() + if not line.endswith('.so'): + continue + # Only unstripped .so files are listed in runtime deps. + # Convert to the stripped .so by going up one directory. + ret.append(os.path.normpath(line.replace('lib.unstripped/', ''))) + ret.reverse() + return ret + + +def _CreateJavaLibrariesList(library_paths): + """Returns a java literal array with the "base" library names: + e.g. libfoo.so -> foo + """ + return ('{%s}' % ','.join(['"%s"' % s[3:-3] for s in library_paths])) + + +def _CreateJavaLocaleListFromAssets(assets, locale_paks): + """Returns a java literal array from a list of locale assets. + + Args: + assets: A list of all APK asset paths in the form 'src:dst' + locale_paks: A list of asset paths that correponds to the locale pak + files of interest. Each |assets| entry will have its 'dst' part matched + against it to determine if they are part of the result. + Returns: + A string that is a Java source literal array listing the locale names + of the corresponding asset files, without directory or .pak suffix. + E.g. '{"en-GB", "en-US", "es-ES", "fr", ... }' + """ + assets_paths = [a.split(':')[1] for a in assets] + locales = [os.path.basename(a)[:-4] for a in assets_paths if a in locale_paks] + return '{%s}' % ','.join(['"%s"' % l for l in sorted(locales)]) + + +def main(argv): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + parser.add_option('--build-config', help='Path to build_config output.') + parser.add_option( + '--type', + help='Type of this target (e.g. android_library).') + parser.add_option( + '--deps-configs', + help='GN-list of dependent build_config files.') + parser.add_option( + '--annotation-processor-configs', + help='GN-list of build_config files for annotation processors.') + parser.add_option( + '--classpath-deps-configs', + help='GN-list of build_config files for libraries to include as ' + 'build-time-only classpath.') + + # android_resources options + parser.add_option('--srcjar', help='Path to target\'s resources srcjar.') + parser.add_option('--resources-zip', help='Path to target\'s resources zip.') + parser.add_option('--r-text', help='Path to target\'s R.txt file.') + parser.add_option('--package-name', + help='Java package name for these resources.') + parser.add_option('--android-manifest', help='Path to android manifest.') + parser.add_option('--resource-dirs', action='append', default=[], + help='GYP-list of resource dirs') + + # android_assets options + parser.add_option('--asset-sources', help='List of asset sources.') + parser.add_option('--asset-renaming-sources', + help='List of asset sources with custom destinations.') + parser.add_option('--asset-renaming-destinations', + help='List of asset custom destinations.') + parser.add_option('--disable-asset-compression', action='store_true', + help='Whether to disable asset compression.') + parser.add_option('--treat-as-locale-paks', action='store_true', + help='Consider the assets as locale paks in BuildConfig.java') + + # java library options + parser.add_option('--jar-path', help='Path to target\'s jar output.') + parser.add_option('--unprocessed-jar-path', + help='Path to the .jar to use for javac classpath purposes.') + parser.add_option('--interface-jar-path', + help='Path to the .interface.jar to use for javac classpath purposes.') + parser.add_option('--is-prebuilt', action='store_true', + help='Whether the jar was compiled or pre-compiled.') + parser.add_option('--java-sources-file', help='Path to .sources file') + parser.add_option('--bundled-srcjars', + help='GYP-list of .srcjars that have been included in this java_library.') + parser.add_option('--supports-android', action='store_true', + help='Whether this library supports running on the Android platform.') + parser.add_option('--requires-android', action='store_true', + help='Whether this library requires running on the Android platform.') + parser.add_option('--bypass-platform-checks', action='store_true', + help='Bypass checks for support/require Android platform.') + parser.add_option('--extra-classpath-jars', + help='GYP-list of .jar files to include on the classpath when compiling, ' + 'but not to include in the final binary.') + parser.add_option('--gradle-treat-as-prebuilt', action='store_true', + help='Whether this library should be treated as a prebuilt library by ' + 'generate_gradle.py.') + parser.add_option('--main-class', + help='Main class for java_binary or java_annotation_processor targets.') + parser.add_option('--java-resources-jar-path', + help='Path to JAR that contains java resources. Everything ' + 'from this JAR except meta-inf/ content and .class files ' + 'will be added to the final APK.') + + # android library options + parser.add_option('--dex-path', help='Path to target\'s dex output.') + + # native library options + parser.add_option('--shared-libraries-runtime-deps', + help='Path to file containing runtime deps for shared ' + 'libraries.') + parser.add_option('--native-libs', + action='append', + help='GN-list of native libraries for primary ' + 'android-abi. Can be specified multiple times.', + default=[]) + parser.add_option('--secondary-abi-shared-libraries-runtime-deps', + help='Path to file containing runtime deps for secondary ' + 'abi shared libraries.') + parser.add_option('--secondary-native-libs', + action='append', + help='GN-list of native libraries for secondary ' + 'android-abi. Can be specified multiple times.', + default=[]) + parser.add_option( + '--native-lib-placeholders', + action='append', + help='GN-list of native library placeholders to add.', + default=[]) + parser.add_option( + '--secondary-native-lib-placeholders', + action='append', + help='GN-list of native library placeholders to add ' + 'for the secondary android-abi.', + default=[]) + parser.add_option('--uncompress-shared-libraries', default=False, + action='store_true', + help='Whether to store native libraries uncompressed') + # apk options + parser.add_option('--apk-path', help='Path to the target\'s apk output.') + parser.add_option('--incremental-apk-path', + help="Path to the target's incremental apk output.") + parser.add_option('--incremental-install-json-path', + help="Path to the target's generated incremental install " + "json.") + parser.add_option( + '--static-library-dependent-configs', + help='GN list of .build_configs of targets that use this target as a ' + 'static library.') + + parser.add_option('--tested-apk-config', + help='Path to the build config of the tested apk (for an instrumentation ' + 'test apk).') + parser.add_option('--proguard-enabled', action='store_true', + help='Whether proguard is enabled for this apk or bundle module.') + parser.add_option('--proguard-configs', + help='GN-list of proguard flag files to use in final apk.') + parser.add_option('--proguard-mapping-path', + help='Path to jar created by ProGuard step') + parser.add_option('--fail', + help='GN-list of error message lines to fail with.') + + parser.add_option('--final-dex-path', + help='Path to final input classes.dex (or classes.zip) to ' + 'use in final apk.') + parser.add_option('--apk-proto-resources', + help='Path to resources compiled in protocol buffer format ' + ' for this apk.') + parser.add_option( + '--module-rtxt-path', + help='Path to R.txt file for resources in a bundle module.') + parser.add_option( + '--base-whitelist-rtxt-path', + help='Path to R.txt file for the base resources whitelist.') + + parser.add_option('--generate-markdown-format-doc', action='store_true', + help='Dump the Markdown .build_config format documentation ' + 'then exit immediately.') + + parser.add_option( + '--base-module-build-config', + help='Path to the base module\'s build config ' + 'if this is a feature module.') + + options, args = parser.parse_args(argv) + + if args: + parser.error('No positional arguments should be given.') + + if options.generate_markdown_format_doc: + doc_lines = _ExtractMarkdownDocumentation(__doc__) + for line in doc_lines: + print(line) + return 0 + + if options.fail: + parser.error('\n'.join(build_utils.ParseGnList(options.fail))) + + jar_path_options = ['jar_path', 'unprocessed_jar_path', 'interface_jar_path'] + required_options_map = { + 'android_apk': ['build_config', 'dex_path', 'final_dex_path'] + \ + jar_path_options, + 'android_app_bundle_module': ['build_config', 'dex_path', + 'final_dex_path'] + jar_path_options, + 'android_assets': ['build_config'], + 'android_resources': ['build_config', 'resources_zip'], + 'dist_aar': ['build_config'], + 'dist_jar': ['build_config'], + 'group': ['build_config'], + 'java_annotation_processor': ['build_config', 'main_class'], + 'java_binary': ['build_config'], + 'java_library': ['build_config'] + jar_path_options, + 'junit_binary': ['build_config'], + 'resource_rewriter': ['build_config'], + 'system_java_library': ['build_config'], + 'android_app_bundle': ['build_config'], + } + required_options = required_options_map.get(options.type) + if not required_options: + raise Exception('Unknown type: <%s>' % options.type) + + build_utils.CheckOptions(options, parser, required_options) + + if options.type != 'android_app_bundle_module': + if options.apk_proto_resources: + raise Exception('--apk-proto-resources can only be used with ' + '--type=android_app_bundle_module') + if options.module_rtxt_path: + raise Exception('--module-rxt-path can only be used with ' + '--type=android_app_bundle_module') + if options.base_whitelist_rtxt_path: + raise Exception('--base-whitelist-rtxt-path can only be used with ' + '--type=android_app_bundle_module') + + is_apk_or_module_target = options.type in ('android_apk', + 'android_app_bundle_module') + + if options.uncompress_shared_libraries: + if not is_apk_or_module_target: + raise Exception('--uncompressed-shared-libraries can only be used ' + 'with --type=android_apk or ' + '--type=android_app_bundle_module') + + if options.jar_path and options.supports_android and not options.dex_path: + raise Exception('java_library that supports Android requires a dex path.') + if any(getattr(options, x) for x in jar_path_options): + for attr in jar_path_options: + if not getattr(options, attr): + raise('Expected %s to be set.' % attr) + + if options.requires_android and not options.supports_android: + raise Exception( + '--supports-android is required when using --requires-android') + + is_java_target = options.type in ( + 'java_binary', 'junit_binary', 'java_annotation_processor', + 'java_library', 'android_apk', 'dist_aar', 'dist_jar', + 'system_java_library', 'android_app_bundle_module') + + is_static_library_dex_provider_target = ( + options.static_library_dependent_configs and options.proguard_enabled) + if is_static_library_dex_provider_target and options.type != 'android_apk': + raise Exception( + '--static-library-dependent-configs only supports --type=android_apk') + + options.static_library_dependent_configs = build_utils.ParseGnList( + options.static_library_dependent_configs) + static_library_dependent_configs_by_path = { + p: GetDepConfig(p) + for p in options.static_library_dependent_configs + } + + deps = _DepsFromPaths( + build_utils.ParseGnList(options.deps_configs), options.type) + processor_deps = _DepsFromPaths( + build_utils.ParseGnList(options.annotation_processor_configs or ''), + options.type, filter_root_targets=False) + classpath_deps = _DepsFromPaths( + build_utils.ParseGnList(options.classpath_deps_configs or ''), + options.type) + + all_inputs = sorted( + set(deps.AllConfigPaths() + processor_deps.AllConfigPaths() + + classpath_deps.AllConfigPaths() + + list(static_library_dependent_configs_by_path))) + + system_library_deps = deps.Direct('system_java_library') + direct_library_deps = deps.Direct('java_library') + direct_group_deps = deps.Direct('group') + all_group_deps = deps.All('group') + all_library_deps = deps.All('java_library') + all_resources_deps = deps.All('android_resources') + all_classpath_library_deps = classpath_deps.All('java_library') + + base_module_build_config = None + if options.base_module_build_config: + with open(options.base_module_build_config, 'r') as f: + base_module_build_config = json.load(f) + + # Initialize some common config. + # Any value that needs to be queryable by dependents must go within deps_info. + config = { + 'deps_info': { + 'name': os.path.basename(options.build_config), + 'path': options.build_config, + 'type': options.type, + 'deps_configs': deps.direct_deps_config_paths + }, + # Info needed only by generate_gradle.py. + 'gradle': {} + } + deps_info = config['deps_info'] + gradle = config['gradle'] + + if options.type == 'android_apk' and options.tested_apk_config: + tested_apk_deps = Deps([options.tested_apk_config]) + tested_apk_config = tested_apk_deps.Direct()[0] + tested_apk_resources_deps = tested_apk_deps.All('android_resources') + gradle['apk_under_test'] = tested_apk_config['name'] + all_resources_deps = [ + d for d in all_resources_deps if not d in tested_apk_resources_deps] + + # Required for generating gradle files. + if options.type == 'java_library': + deps_info['is_prebuilt'] = bool(options.is_prebuilt) + deps_info['gradle_treat_as_prebuilt'] = options.gradle_treat_as_prebuilt + + if options.android_manifest: + deps_info['android_manifest'] = options.android_manifest + + if is_java_target: + if options.java_sources_file: + deps_info['java_sources_file'] = options.java_sources_file + if options.bundled_srcjars: + gradle['bundled_srcjars'] = ( + build_utils.ParseGnList(options.bundled_srcjars)) + + gradle['dependent_android_projects'] = [] + gradle['dependent_java_projects'] = [] + gradle['dependent_prebuilt_jars'] = deps.GradlePrebuiltJarPaths() + + if options.main_class: + deps_info['main_class'] = options.main_class + + for c in deps.GradleLibraryProjectDeps(): + if c['requires_android']: + gradle['dependent_android_projects'].append(c['path']) + else: + gradle['dependent_java_projects'].append(c['path']) + + # TODO(tiborg): Remove creation of JNI info for type group and java_library + # once we can generate the JNI registration based on APK / module targets as + # opposed to groups and libraries. + if is_apk_or_module_target or options.type in ( + 'group', 'java_library', 'junit_binary'): + deps_info['jni'] = {} + all_java_sources = [c['java_sources_file'] for c in all_library_deps + if 'java_sources_file' in c] + if options.java_sources_file: + all_java_sources.append(options.java_sources_file) + + if options.apk_proto_resources: + deps_info['proto_resources_path'] = options.apk_proto_resources + + if options.module_rtxt_path: + deps_info['module_rtxt_path'] = options.module_rtxt_path + if options.base_whitelist_rtxt_path: + deps_info['base_whitelist_rtxt_path'] = options.base_whitelist_rtxt_path + else: + # Ensure there is an entry, even if it is empty, for modules + # that don't need such a whitelist. + deps_info['base_whitelist_rtxt_path'] = '' + + if is_java_target: + deps_info['requires_android'] = bool(options.requires_android) + deps_info['supports_android'] = bool(options.supports_android) + + if not options.bypass_platform_checks: + deps_require_android = (all_resources_deps + + [d['name'] for d in all_library_deps if d['requires_android']]) + deps_not_support_android = ( + [d['name'] for d in all_library_deps if not d['supports_android']]) + + if deps_require_android and not options.requires_android: + raise Exception('Some deps require building for the Android platform: ' + + str(deps_require_android)) + + if deps_not_support_android and options.supports_android: + raise Exception('Not all deps support the Android platform: ' + + str(deps_not_support_android)) + + if is_java_target: + # Classpath values filled in below (after applying tested_apk_config). + config['javac'] = {} + if options.jar_path: + deps_info['jar_path'] = options.jar_path + deps_info['unprocessed_jar_path'] = options.unprocessed_jar_path + deps_info['interface_jar_path'] = options.interface_jar_path + if options.dex_path: + deps_info['dex_path'] = options.dex_path + if options.type == 'android_apk': + deps_info['apk_path'] = options.apk_path + deps_info['incremental_apk_path'] = options.incremental_apk_path + deps_info['incremental_install_json_path'] = ( + options.incremental_install_json_path) + + if options.type == 'android_assets': + all_asset_sources = [] + if options.asset_renaming_sources: + all_asset_sources.extend( + build_utils.ParseGnList(options.asset_renaming_sources)) + if options.asset_sources: + all_asset_sources.extend(build_utils.ParseGnList(options.asset_sources)) + + deps_info['assets'] = { + 'sources': all_asset_sources + } + if options.asset_renaming_destinations: + deps_info['assets']['outputs'] = ( + build_utils.ParseGnList(options.asset_renaming_destinations)) + if options.disable_asset_compression: + deps_info['assets']['disable_compression'] = True + if options.treat_as_locale_paks: + deps_info['assets']['treat_as_locale_paks'] = True + + if options.type == 'android_resources': + deps_info['resources_zip'] = options.resources_zip + if options.srcjar: + deps_info['srcjar'] = options.srcjar + if options.android_manifest: + manifest = AndroidManifest(options.android_manifest) + deps_info['package_name'] = manifest.GetPackageName() + if options.package_name: + deps_info['package_name'] = options.package_name + if options.r_text: + deps_info['r_text'] = options.r_text + + deps_info['resources_dirs'] = [] + if options.resource_dirs: + for gyp_list in options.resource_dirs: + deps_info['resources_dirs'].extend(build_utils.ParseGnList(gyp_list)) + + if options.requires_android and is_java_target: + # Lint all resources that are not already linted by a dependent library. + owned_resource_dirs = set() + owned_resource_zips = set() + owned_resource_srcjars = set() + for c in all_resources_deps: + # Always use resources_dirs in favour of resources_zips so that lint error + # messages have paths that are closer to reality (and to avoid needing to + # extract during lint). + if c['resources_dirs']: + owned_resource_dirs.update(c['resources_dirs']) + else: + owned_resource_zips.add(c['resources_zip']) + srcjar = c.get('srcjar') + if srcjar: + owned_resource_srcjars.add(srcjar) + + for c in itertools.chain(all_library_deps, all_classpath_library_deps): + if c['requires_android']: + owned_resource_dirs.difference_update(c['owned_resources_dirs']) + owned_resource_zips.difference_update(c['owned_resources_zips']) + # Many .aar files include R.class files in them, as it makes it easier + # for IDEs to resolve symbols. However, including them is not required + # and not all prebuilts do. Rather than try to detect their presense, + # just assume they are not there. The only consequence is redundant + # compilation of the R.class. + if not c['is_prebuilt']: + owned_resource_srcjars.difference_update(c['owned_resource_srcjars']) + deps_info['owned_resources_dirs'] = sorted(owned_resource_dirs) + deps_info['owned_resources_zips'] = sorted(owned_resource_zips) + deps_info['owned_resource_srcjars'] = sorted(owned_resource_srcjars) + + if options.type == 'java_library': + # Used to strip out R.class for android_prebuilt()s. + config['javac']['resource_packages'] = [ + c['package_name'] for c in all_resources_deps if 'package_name' in c] + + if options.type in ( + 'android_resources', 'android_apk', 'junit_binary', 'resource_rewriter', + 'dist_aar', 'android_app_bundle_module'): + config['resources'] = {} + + dependency_zips = [ + c['resources_zip'] for c in all_resources_deps if c['resources_zip'] + ] + extra_package_names = [] + extra_r_text_files = [] + + if options.type != 'android_resources': + extra_package_names = [ + c['package_name'] for c in all_resources_deps if 'package_name' in c] + extra_r_text_files = [ + c['r_text'] for c in all_resources_deps if 'r_text' in c] + + # For feature modules, remove any resources that already exist in the base + # module. + if base_module_build_config: + dependency_zips = [ + c for c in dependency_zips + if c not in base_module_build_config['resources']['dependency_zips'] + ] + extra_package_names = [ + c for c in extra_package_names if c not in + base_module_build_config['resources']['extra_package_names'] + ] + extra_r_text_files = [ + c for c in extra_r_text_files if c not in + base_module_build_config['resources']['extra_r_text_files'] + ] + + config['resources']['dependency_zips'] = dependency_zips + config['resources']['extra_package_names'] = extra_package_names + config['resources']['extra_r_text_files'] = extra_r_text_files + if options.type == 'android_apk' and options.tested_apk_config: + config['resources']['arsc_package_name'] = ( + tested_apk_config['package_name']) + + if is_apk_or_module_target: + deps_dex_files = [c['dex_path'] for c in all_library_deps] + + if options.type == 'group': + if options.extra_classpath_jars: + # These are .jars to add to javac classpath but not to runtime classpath. + extra_jars = build_utils.ParseGnList(options.extra_classpath_jars) + deps_info['extra_classpath_jars'] = extra_jars + deps_info['extra_classpath_interface_jars'] = extra_jars + + if is_java_target: + # The classpath used to compile this target when annotation processors are + # present. + javac_classpath = [ + c['unprocessed_jar_path'] for c in direct_library_deps] + # The classpath used to compile this target when annotation processors are + # not present. These are also always used to know when a target needs to be + # rebuilt. + javac_interface_classpath = [ + c['interface_jar_path'] for c in direct_library_deps] + # The classpath used for error prone. + javac_full_interface_classpath = [ + c['interface_jar_path'] for c in all_library_deps] + # The classpath used for bytecode-rewritting. + javac_full_classpath = [ + c['unprocessed_jar_path'] for c in all_library_deps] + + for dep in direct_group_deps: + javac_classpath.extend(dep.get('extra_classpath_jars', [])) + javac_interface_classpath.extend( + dep.get('extra_classpath_interface_jars', [])) + for dep in all_group_deps: + javac_full_classpath.extend(dep.get('extra_classpath_jars', [])) + javac_full_interface_classpath.extend( + dep.get('extra_classpath_interface_jars', [])) + + # Deps to add to the compile-time classpath (but not the runtime classpath). + # TODO(agrieve): Might be less confusing to fold these into bootclasspath. + javac_extra_jars = [ + c['unprocessed_jar_path'] for c in classpath_deps.All('java_library') + ] + extra_jars = [c['jar_path'] for c in classpath_deps.All('java_library')] + interface_extra_jars = [ + c['interface_jar_path'] for c in classpath_deps.All('java_library') + ] + + # These are jars specified by input_jars_paths that almost never change. + # Just add them directly to all the *extra_jars. + if options.extra_classpath_jars: + # These are .jars to add to javac classpath but not to runtime classpath. + javac_extra_jars.extend( + build_utils.ParseGnList(options.extra_classpath_jars)) + extra_jars.extend(build_utils.ParseGnList(options.extra_classpath_jars)) + interface_extra_jars.extend( + build_utils.ParseGnList(options.extra_classpath_jars)) + + if extra_jars: + deps_info['extra_classpath_jars'] = extra_jars + + if interface_extra_jars: + deps_info['extra_classpath_interface_jars'] = interface_extra_jars + + javac_extra_jars = [p for p in javac_extra_jars if p not in javac_classpath] + javac_classpath.extend(javac_extra_jars) + javac_full_classpath.extend( + p for p in javac_extra_jars if p not in javac_full_classpath) + + interface_extra_jars = [ + p for p in interface_extra_jars if p not in javac_interface_classpath + ] + javac_interface_classpath.extend(interface_extra_jars) + javac_full_interface_classpath.extend( + p for p in interface_extra_jars + if p not in javac_full_interface_classpath) + + if is_java_target or options.type == 'android_app_bundle': + # The classpath to use to run this target (or as an input to ProGuard). + java_full_classpath = [] + if is_java_target and options.jar_path: + java_full_classpath.append(options.jar_path) + java_full_classpath.extend(c['jar_path'] for c in all_library_deps) + if options.type == 'android_app_bundle': + for d in deps.Direct('android_app_bundle_module'): + java_full_classpath.extend( + c for c in d.get('java_runtime_classpath', []) + if c not in java_full_classpath) + + all_configs = build_utils.ParseGnList(options.proguard_configs) + deps_info['proguard_configs'] = list(all_configs) + extra_jars = [] + + if is_static_library_dex_provider_target: + # Map classpath entries to configs that include them in their classpath. + configs_by_classpath_entry = collections.defaultdict(list) + for config_path, dep_config in ( + static_library_dependent_configs_by_path.iteritems()): + all_configs.extend(dep_config['proguard_all_configs']) + extra_jars.extend(dep_config['proguard_classpath_jars']) + all_java_sources.extend(dep_config['jni']['all_source']) + for cp_entry in dep_config['java_runtime_classpath']: + # The APK Java targets for the static library dependent targets will + # have some of the same classes (R.java) due to shared resource + # dependencies. To avoid Proguard failures due to duplicate classes, we + # merge the APK jars into the static library's jar_path as a + # preprocessing build step. + if cp_entry != dep_config['jar_path']: + configs_by_classpath_entry[cp_entry].append(config_path) + + for cp_entry in java_full_classpath: + configs_by_classpath_entry[cp_entry].append(options.build_config) + + # Map configs to classpath entries that should be included in their final + # dex. + classpath_entries_by_owning_config = collections.defaultdict(list) + for cp_entry, candidate_configs in configs_by_classpath_entry.iteritems(): + config_path = (candidate_configs[0] + if len(candidate_configs) == 1 else options.build_config) + classpath_entries_by_owning_config[config_path].append(cp_entry) + java_full_classpath.append(cp_entry) + + classpath_entries_by_owning_config[options.build_config].append( + deps_info['jar_path']) + + java_full_classpath = sorted(set(java_full_classpath)) + deps_info['static_library_dependent_classpath_configs'] = { + path: sorted(set(classpath)) + for path, classpath in classpath_entries_by_owning_config.iteritems() + } + + if is_apk_or_module_target or options.type in ('group', 'java_library', + 'junit_binary'): + deps_info['jni']['all_source'] = sorted(set(all_java_sources)) + + system_jars = [c['jar_path'] for c in system_library_deps] + system_interface_jars = [c['interface_jar_path'] for c in system_library_deps] + if system_library_deps: + config['android'] = {} + config['android']['sdk_interface_jars'] = system_interface_jars + config['android']['sdk_jars'] = system_jars + + if options.type in ('android_apk', 'dist_aar', + 'dist_jar', 'android_app_bundle_module', 'android_app_bundle'): + for c in all_library_deps: + all_configs.extend(p for p in c.get('proguard_configs', [])) + extra_jars.extend(p for p in c.get('extra_classpath_jars', [])) + for c in all_group_deps: + extra_jars.extend(p for p in c.get('extra_classpath_jars', [])) + if options.type == 'android_app_bundle': + for c in deps.Direct('android_app_bundle_module'): + all_configs.extend(p for p in c.get('proguard_configs', [])) + if options.type == 'android_app_bundle': + for d in deps.Direct('android_app_bundle_module'): + extra_jars.extend( + c for c in d.get('proguard_classpath_jars', []) + if c not in extra_jars) + + if options.type == 'android_app_bundle': + deps_proguard_enabled = [] + deps_proguard_disabled = [] + for d in deps.Direct('android_app_bundle_module'): + if not d['java_runtime_classpath']: + # We don't care about modules that have no Java code for proguarding. + continue + if d['proguard_enabled']: + deps_proguard_enabled.append(d['name']) + else: + deps_proguard_disabled.append(d['name']) + if deps_proguard_enabled and deps_proguard_disabled: + raise Exception('Deps %s have proguard enabled while deps %s have ' + 'proguard disabled' % (deps_proguard_enabled, + deps_proguard_disabled)) + else: + deps_info['proguard_enabled'] = bool(options.proguard_enabled) + if options.proguard_mapping_path: + deps_info['proguard_mapping_path'] = options.proguard_mapping_path + + # The java code for an instrumentation test apk is assembled differently for + # ProGuard vs. non-ProGuard. + # + # Without ProGuard: Each library's jar is dexed separately and then combined + # into a single classes.dex. A test apk will include all dex files not already + # present in the apk-under-test. At runtime all test code lives in the test + # apk, and the program code lives in the apk-under-test. + # + # With ProGuard: Each library's .jar file is fed into ProGuard, which outputs + # a single .jar, which is then dexed into a classes.dex. A test apk includes + # all jar files from the program and the tests because having them separate + # doesn't work with ProGuard's whole-program optimizations. Although the + # apk-under-test still has all of its code in its classes.dex, none of it is + # used at runtime because the copy of it within the test apk takes precidence. + if options.type == 'android_apk' and options.tested_apk_config: + if tested_apk_config['proguard_enabled']: + assert options.proguard_enabled, ('proguard must be enabled for ' + 'instrumentation apks if it\'s enabled for the tested apk.') + # Mutating lists, so no need to explicitly re-assign to dict. + all_configs.extend(p for p in tested_apk_config['proguard_all_configs']) + extra_jars.extend(p for p in tested_apk_config['proguard_classpath_jars']) + tested_apk_config = GetDepConfig(options.tested_apk_config) + deps_info['proguard_under_test_mapping'] = ( + tested_apk_config['proguard_mapping_path']) + elif options.proguard_enabled: + # Not sure why you'd want to proguard the test apk when the under-test apk + # is not proguarded, but it's easy enough to support. + deps_info['proguard_under_test_mapping'] = '' + + expected_tested_package = tested_apk_config['package_name'] + AndroidManifest(options.android_manifest).CheckInstrumentationElements( + expected_tested_package) + + # Add all tested classes to the test's classpath to ensure that the test's + # java code is a superset of the tested apk's java code + java_full_classpath.extend( + p for p in tested_apk_config['java_runtime_classpath'] + if p not in java_full_classpath) + # Include in the classpath classes that are added directly to the apk under + # test (those that are not a part of a java_library). + javac_classpath.append(tested_apk_config['unprocessed_jar_path']) + javac_full_classpath.append(tested_apk_config['unprocessed_jar_path']) + javac_interface_classpath.append(tested_apk_config['interface_jar_path']) + javac_full_interface_classpath.append( + tested_apk_config['interface_jar_path']) + javac_full_interface_classpath.extend( + p for p in tested_apk_config['javac_full_interface_classpath'] + if p not in javac_full_interface_classpath) + javac_full_classpath.extend( + p for p in tested_apk_config['javac_full_classpath'] + if p not in javac_full_classpath) + + # Exclude dex files from the test apk that exist within the apk under test. + # TODO(agrieve): When proguard is enabled, this filtering logic happens + # within proguard_util.py. Move the logic for the proguard case into + # here as well. + tested_apk_library_deps = tested_apk_deps.All('java_library') + tested_apk_deps_dex_files = [c['dex_path'] for c in tested_apk_library_deps] + deps_dex_files = [ + p for p in deps_dex_files if not p in tested_apk_deps_dex_files] + + if options.type in ('android_apk', 'dist_aar', 'dist_jar', + 'android_app_bundle_module', 'android_app_bundle'): + deps_info['proguard_all_configs'] = sorted(set(all_configs)) + deps_info['proguard_classpath_jars'] = sorted(set(extra_jars)) + + # Dependencies for the final dex file of an apk. + if is_apk_or_module_target: + config['final_dex'] = {} + dex_config = config['final_dex'] + dex_config['dependency_dex_files'] = deps_dex_files + dex_config['path'] = options.final_dex_path + + if is_java_target: + config['javac']['classpath'] = javac_classpath + config['javac']['interface_classpath'] = javac_interface_classpath + # Direct() will be of type 'java_annotation_processor'. + config['javac']['processor_classpath'] = [ + c['jar_path'] for c in processor_deps.Direct() if c.get('jar_path')] + [ + c['jar_path'] for c in processor_deps.All('java_library')] + config['javac']['processor_classes'] = [ + c['main_class'] for c in processor_deps.Direct()] + deps_info['javac_full_classpath'] = javac_full_classpath + deps_info['javac_full_interface_classpath'] = javac_full_interface_classpath + elif options.type == 'android_app_bundle': + # bundles require javac_full_classpath to create .aab.jar.info. + javac_full_classpath = set() + for d in deps.Direct('android_app_bundle_module'): + javac_full_classpath.update(p for p in d['javac_full_classpath']) + javac_full_classpath.add(d['jar_path']) + deps_info['javac_full_classpath'] = sorted(javac_full_classpath) + + if options.type in ('android_apk', 'dist_jar', 'java_binary', 'junit_binary', + 'android_app_bundle_module', 'android_app_bundle'): + deps_info['java_runtime_classpath'] = java_full_classpath + + if options.type in ('android_apk', 'dist_jar'): + all_interface_jars = [] + if options.jar_path: + all_interface_jars.append(options.interface_jar_path) + all_interface_jars.extend(c['interface_jar_path'] for c in all_library_deps) + + config['dist_jar'] = { + 'all_interface_jars': all_interface_jars, + } + + if is_apk_or_module_target: + manifest = AndroidManifest(options.android_manifest) + deps_info['package_name'] = manifest.GetPackageName() + if not options.tested_apk_config and manifest.GetInstrumentationElements(): + # This must then have instrumentation only for itself. + manifest.CheckInstrumentationElements(manifest.GetPackageName()) + + library_paths = [] + java_libraries_list = None + if options.shared_libraries_runtime_deps: + library_paths = _ExtractSharedLibsFromRuntimeDeps( + options.shared_libraries_runtime_deps) + java_libraries_list = _CreateJavaLibrariesList(library_paths) + all_inputs.append(options.shared_libraries_runtime_deps) + + secondary_abi_library_paths = [] + if options.secondary_abi_shared_libraries_runtime_deps: + secondary_abi_library_paths = _ExtractSharedLibsFromRuntimeDeps( + options.secondary_abi_shared_libraries_runtime_deps) + all_inputs.append(options.secondary_abi_shared_libraries_runtime_deps) + + secondary_abi_library_paths.extend( + build_utils.ParseGnList(options.secondary_native_libs)) + + native_library_placeholder_paths = build_utils.ParseGnList( + options.native_lib_placeholders) + + secondary_native_library_placeholder_paths = build_utils.ParseGnList( + options.secondary_native_lib_placeholders) + + extra_shared_libraries = build_utils.ParseGnList(options.native_libs) + + config['native'] = { + 'libraries': + library_paths, + 'native_library_placeholders': + native_library_placeholder_paths, + 'secondary_abi_libraries': + secondary_abi_library_paths, + 'secondary_native_library_placeholders': + secondary_native_library_placeholder_paths, + 'java_libraries_list': + java_libraries_list, + 'uncompress_shared_libraries': + options.uncompress_shared_libraries, + 'extra_shared_libraries': + extra_shared_libraries, + } + config['assets'], config['uncompressed_assets'], locale_paks = ( + _MergeAssets(deps.All('android_assets'))) + config['compressed_locales_java_list'] = _CreateJavaLocaleListFromAssets( + config['assets'], locale_paks) + config['uncompressed_locales_java_list'] = _CreateJavaLocaleListFromAssets( + config['uncompressed_assets'], locale_paks) + + config['extra_android_manifests'] = filter(None, ( + d.get('android_manifest') for d in all_resources_deps)) + + # Collect java resources + java_resources_jars = [d['java_resources_jar'] for d in all_library_deps + if 'java_resources_jar' in d] + if options.tested_apk_config: + tested_apk_resource_jars = [d['java_resources_jar'] + for d in tested_apk_library_deps + if 'java_resources_jar' in d] + java_resources_jars = [jar for jar in java_resources_jars + if jar not in tested_apk_resource_jars] + config['java_resources_jars'] = java_resources_jars + + if options.java_resources_jar_path: + deps_info['java_resources_jar'] = options.java_resources_jar_path + + build_utils.WriteJson(config, options.build_config, only_if_changed=True) + + if options.depfile: + build_utils.WriteDepfile(options.depfile, options.build_config, all_inputs, + add_pydeps=False) # pydeps listed in GN. + + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/deps/v8/build/android/gyp/write_build_config.pydeps b/deps/v8/build/android/gyp/write_build_config.pydeps new file mode 100644 index 0000000000..e317c47cae --- /dev/null +++ b/deps/v8/build/android/gyp/write_build_config.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_build_config.pydeps build/android/gyp/write_build_config.py +../../gn_helpers.py +util/__init__.py +util/build_utils.py +util/md5_check.py +write_build_config.py diff --git a/deps/v8/build/android/gyp/write_ordered_libraries.py b/deps/v8/build/android/gyp/write_ordered_libraries.py new file mode 100755 index 0000000000..61da64ce26 --- /dev/null +++ b/deps/v8/build/android/gyp/write_ordered_libraries.py @@ -0,0 +1,117 @@ +#!/usr/bin/env python +# +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Writes dependency ordered list of native libraries. + +The list excludes any Android system libraries, as those are not bundled with +the APK. + +This list of libraries is used for several steps of building an APK. +In the component build, the --input-libraries only needs to be the top-level +library (i.e. libcontent_shell_content_view). This will then use readelf to +inspect the shared libraries and determine the full list of (non-system) +libraries that should be included in the APK. +""" + +# TODO(cjhopman): See if we can expose the list of library dependencies from +# gyp, rather than calculating it ourselves. +# http://crbug.com/225558 + +import optparse +import os +import re +import sys + +from util import build_utils + +_readelf = None + +_library_re = re.compile( + '.*NEEDED.*Shared library: \[(?P<library_name>.+)\]') + +_library_path_map = {} + + +def SetReadelfPath(path): + global _readelf + _readelf = path + + +def CallReadElf(library_or_executable): + assert _readelf is not None + readelf_cmd = [_readelf, '-d', library_or_executable] + return build_utils.CheckOutput(readelf_cmd) + + +def GetDependencies(library_or_executable): + elf = CallReadElf(library_or_executable) + deps = [] + for l in _library_re.findall(elf): + p = _library_path_map.get(l) + if p is not None: + deps.append(p) + return deps + + +def GetSortedTransitiveDependencies(libraries): + """Returns all transitive library dependencies in dependency order.""" + return build_utils.GetSortedTransitiveDependencies( + libraries, GetDependencies) + + +def main(): + parser = optparse.OptionParser() + build_utils.AddDepfileOption(parser) + + parser.add_option('--readelf', help='Path to the readelf binary.') + parser.add_option('--runtime-deps', + help='A file created for the target using write_runtime_deps.') + parser.add_option('--exclude-shared-libraries', + help='List of shared libraries to exclude from the output.') + parser.add_option('--output', help='Path to the generated .json file.') + + options, _ = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:])) + + SetReadelfPath(options.readelf) + + unsorted_lib_paths = [] + exclude_shared_libraries = [] + if options.exclude_shared_libraries: + exclude_shared_libraries = options.exclude_shared_libraries.split(',') + for f in open(options.runtime_deps): + f = f[:-1] + if f.endswith('.so'): + p = f.replace('lib.unstripped/', '') + if os.path.basename(p) in exclude_shared_libraries: + continue + unsorted_lib_paths.append(p) + _library_path_map[os.path.basename(p)] = p + + lib_paths = GetSortedTransitiveDependencies(unsorted_lib_paths) + + libraries = [os.path.basename(l) for l in lib_paths] + + # Convert to "base" library names: e.g. libfoo.so -> foo + java_libraries_list = ( + '{%s}' % ','.join(['"%s"' % s[3:-3] for s in libraries])) + + out_json = { + 'libraries': libraries, + 'lib_paths': lib_paths, + 'java_libraries_list': java_libraries_list + } + build_utils.WriteJson( + out_json, + options.output, + only_if_changed=True) + + if options.depfile: + build_utils.WriteDepfile( + options.depfile, options.output, lib_paths, add_pydeps=False) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/deps/v8/build/android/gyp/write_ordered_libraries.pydeps b/deps/v8/build/android/gyp/write_ordered_libraries.pydeps new file mode 100644 index 0000000000..c2ed1fee36 --- /dev/null +++ b/deps/v8/build/android/gyp/write_ordered_libraries.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/write_ordered_libraries.pydeps build/android/gyp/write_ordered_libraries.py +../../gn_helpers.py +util/__init__.py +util/build_utils.py +util/md5_check.py +write_ordered_libraries.py diff --git a/deps/v8/build/android/gyp/zip.py b/deps/v8/build/android/gyp/zip.py new file mode 100755 index 0000000000..b9503960fa --- /dev/null +++ b/deps/v8/build/android/gyp/zip.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python +# +# Copyright 2014 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. +"""Archives a set of files.""" + +import argparse +import os +import sys +import zipfile + +from util import build_utils + + +def main(args): + args = build_utils.ExpandFileArgs(args) + parser = argparse.ArgumentParser(args) + parser.add_argument('--input-files', help='GN-list of files to zip.') + parser.add_argument( + '--input-files-base-dir', + help='Paths in the archive will be relative to this directory') + parser.add_argument('--input-zips', help='GN-list of zips to merge.') + parser.add_argument( + '--input-zips-excluded-globs', + help='GN-list of globs for paths to exclude.') + parser.add_argument('--output', required=True, help='Path to output archive.') + compress_group = parser.add_mutually_exclusive_group() + compress_group.add_argument( + '--compress', action='store_true', help='Compress entries') + compress_group.add_argument( + '--no-compress', + action='store_false', + dest='compress', + help='Do not compress entries') + build_utils.AddDepfileOption(parser) + options = parser.parse_args(args) + + with build_utils.AtomicOutput(options.output) as f: + with zipfile.ZipFile(f.name, 'w') as out_zip: + depfile_deps = None + if options.input_files: + files = build_utils.ParseGnList(options.input_files) + build_utils.DoZip( + files, + out_zip, + base_dir=options.input_files_base_dir, + compress_fn=lambda _: options.compress) + + if options.input_zips: + files = build_utils.ParseGnList(options.input_zips) + depfile_deps = files + path_transform = None + if options.input_zips_excluded_globs: + globs = build_utils.ParseGnList(options.input_zips_excluded_globs) + path_transform = ( + lambda p: None if build_utils.MatchesGlob(p, globs) else p) + build_utils.MergeZips( + out_zip, + files, + path_transform=path_transform, + compress=options.compress) + + # Depfile used only by dist_jar(). + if options.depfile: + build_utils.WriteDepfile( + options.depfile, options.output, inputs=depfile_deps, add_pydeps=False) + + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/deps/v8/build/android/gyp/zip.pydeps b/deps/v8/build/android/gyp/zip.pydeps new file mode 100644 index 0000000000..ce99648ca0 --- /dev/null +++ b/deps/v8/build/android/gyp/zip.pydeps @@ -0,0 +1,7 @@ +# Generated by running: +# build/print_python_deps.py --root build/android/gyp --output build/android/gyp/zip.pydeps build/android/gyp/zip.py +../../gn_helpers.py +util/__init__.py +util/build_utils.py +util/md5_check.py +zip.py |